##// END OF EJS Templates
Se agrega una nueva clase de escritura FitsWriter, ademas de las Clases Metadata y ParameterConf, que se usan para leer el archivo xml que configura el header de los archivos FITS.
Daniel Valdez -
r351:a731a256fc18
parent child
Show More
@@ -1,2737 +1,2920
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JRODataIO.py 169 2012-11-19 21:57:03Z murco $
5 5 '''
6 6
7 7 import os, sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import time, datetime
13 13 try:
14 14 import pyfits
15 15 except:
16 16 print "pyfits module has not been imported, it should be installed to save files in fits format"
17 17
18 18 from jrodata import *
19 19 from jroheaderIO import *
20 20 from jroprocessing import *
21 21
22 22 LOCALTIME = True #-18000
23 23
24 24 def isNumber(str):
25 25 """
26 26 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
27 27
28 28 Excepciones:
29 29 Si un determinado string no puede ser convertido a numero
30 30 Input:
31 31 str, string al cual se le analiza para determinar si convertible a un numero o no
32 32
33 33 Return:
34 34 True : si el string es uno numerico
35 35 False : no es un string numerico
36 36 """
37 37 try:
38 38 float( str )
39 39 return True
40 40 except:
41 41 return False
42 42
43 43 def isThisFileinRange(filename, startUTSeconds, endUTSeconds):
44 44 """
45 45 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
46 46
47 47 Inputs:
48 48 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
49 49
50 50 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
51 51 segundos contados desde 01/01/1970.
52 52 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
53 53 segundos contados desde 01/01/1970.
54 54
55 55 Return:
56 56 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
57 57 fecha especificado, de lo contrario retorna False.
58 58
59 59 Excepciones:
60 60 Si el archivo no existe o no puede ser abierto
61 61 Si la cabecera no puede ser leida.
62 62
63 63 """
64 64 basicHeaderObj = BasicHeader(LOCALTIME)
65 65
66 66 try:
67 67 fp = open(filename,'rb')
68 68 except:
69 69 raise IOError, "The file %s can't be opened" %(filename)
70 70
71 71 sts = basicHeaderObj.read(fp)
72 72 fp.close()
73 73
74 74 if not(sts):
75 75 print "Skipping the file %s because it has not a valid header" %(filename)
76 76 return 0
77 77
78 78 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
79 79 return 0
80 80
81 81 return 1
82 82
83 83 def isFileinThisTime(filename, startTime, endTime):
84 84 """
85 85 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
86 86
87 87 Inputs:
88 88 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
89 89
90 90 startTime : tiempo inicial del rango seleccionado en formato datetime.time
91 91
92 92 endTime : tiempo final del rango seleccionado en formato datetime.time
93 93
94 94 Return:
95 95 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
96 96 fecha especificado, de lo contrario retorna False.
97 97
98 98 Excepciones:
99 99 Si el archivo no existe o no puede ser abierto
100 100 Si la cabecera no puede ser leida.
101 101
102 102 """
103 103
104 104
105 105 try:
106 106 fp = open(filename,'rb')
107 107 except:
108 108 raise IOError, "The file %s can't be opened" %(filename)
109 109
110 110 basicHeaderObj = BasicHeader(LOCALTIME)
111 111 sts = basicHeaderObj.read(fp)
112 112 fp.close()
113 113
114 114 thisDatetime = basicHeaderObj.datatime
115 115 thisTime = basicHeaderObj.datatime.time()
116 116
117 117 if not(sts):
118 118 print "Skipping the file %s because it has not a valid header" %(filename)
119 119 return None
120 120
121 121 if not ((startTime <= thisTime) and (endTime > thisTime)):
122 122 return None
123 123
124 124 return thisDatetime
125 125
126 126 def getlastFileFromPath(path, ext):
127 127 """
128 128 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
129 129 al final de la depuracion devuelve el ultimo file de la lista que quedo.
130 130
131 131 Input:
132 132 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
133 133 ext : extension de los files contenidos en una carpeta
134 134
135 135 Return:
136 136 El ultimo file de una determinada carpeta, no se considera el path.
137 137 """
138 138 validFilelist = []
139 139 fileList = os.listdir(path)
140 140
141 141 # 0 1234 567 89A BCDE
142 142 # H YYYY DDD SSS .ext
143 143
144 144 for file in fileList:
145 145 try:
146 146 year = int(file[1:5])
147 147 doy = int(file[5:8])
148 148
149 149
150 150 except:
151 151 continue
152 152
153 153 if (os.path.splitext(file)[-1].lower() != ext.lower()):
154 154 continue
155 155
156 156 validFilelist.append(file)
157 157
158 158 if validFilelist:
159 159 validFilelist = sorted( validFilelist, key=str.lower )
160 160 return validFilelist[-1]
161 161
162 162 return None
163 163
164 164 def checkForRealPath(path, foldercounter, year, doy, set, ext):
165 165 """
166 166 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
167 167 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
168 168 el path exacto de un determinado file.
169 169
170 170 Example :
171 171 nombre correcto del file es .../.../D2009307/P2009307367.ext
172 172
173 173 Entonces la funcion prueba con las siguientes combinaciones
174 174 .../.../y2009307367.ext
175 175 .../.../Y2009307367.ext
176 176 .../.../x2009307/y2009307367.ext
177 177 .../.../x2009307/Y2009307367.ext
178 178 .../.../X2009307/y2009307367.ext
179 179 .../.../X2009307/Y2009307367.ext
180 180 siendo para este caso, la ultima combinacion de letras, identica al file buscado
181 181
182 182 Return:
183 183 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
184 184 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
185 185 para el filename
186 186 """
187 187 fullfilename = None
188 188 find_flag = False
189 189 filename = None
190 190
191 191 prefixDirList = [None,'d','D']
192 192 if ext.lower() == ".r": #voltage
193 193 prefixFileList = ['d','D']
194 194 elif ext.lower() == ".pdata": #spectra
195 195 prefixFileList = ['p','P']
196 196 else:
197 197 return None, filename
198 198
199 199 #barrido por las combinaciones posibles
200 200 for prefixDir in prefixDirList:
201 201 thispath = path
202 202 if prefixDir != None:
203 203 #formo el nombre del directorio xYYYYDDD (x=d o x=D)
204 204 if foldercounter == 0:
205 205 thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy ))
206 206 else:
207 207 thispath = os.path.join(path, "%s%04d%03d_%02d" % ( prefixDir, year, doy , foldercounter))
208 208 for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D"
209 209 filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
210 210 fullfilename = os.path.join( thispath, filename ) #formo el path completo
211 211
212 212 if os.path.exists( fullfilename ): #verifico que exista
213 213 find_flag = True
214 214 break
215 215 if find_flag:
216 216 break
217 217
218 218 if not(find_flag):
219 219 return None, filename
220 220
221 221 return fullfilename, filename
222 222
223 223 def isDoyFolder(folder):
224 224 try:
225 225 year = int(folder[1:5])
226 226 except:
227 227 return 0
228 228
229 229 try:
230 230 doy = int(folder[5:8])
231 231 except:
232 232 return 0
233 233
234 234 return 1
235 235
236 236 class JRODataIO:
237 237
238 238 c = 3E8
239 239
240 240 isConfig = False
241 241
242 242 basicHeaderObj = BasicHeader(LOCALTIME)
243 243
244 244 systemHeaderObj = SystemHeader()
245 245
246 246 radarControllerHeaderObj = RadarControllerHeader()
247 247
248 248 processingHeaderObj = ProcessingHeader()
249 249
250 250 online = 0
251 251
252 252 dtype = None
253 253
254 254 pathList = []
255 255
256 256 filenameList = []
257 257
258 258 filename = None
259 259
260 260 ext = None
261 261
262 262 flagIsNewFile = 1
263 263
264 264 flagTimeBlock = 0
265 265
266 266 flagIsNewBlock = 0
267 267
268 268 fp = None
269 269
270 270 firstHeaderSize = 0
271 271
272 272 basicHeaderSize = 24
273 273
274 274 versionFile = 1103
275 275
276 276 fileSize = None
277 277
278 278 ippSeconds = None
279 279
280 280 fileSizeByHeader = None
281 281
282 282 fileIndex = None
283 283
284 284 profileIndex = None
285 285
286 286 blockIndex = None
287 287
288 288 nTotalBlocks = None
289 289
290 290 maxTimeStep = 30
291 291
292 292 lastUTTime = None
293 293
294 294 datablock = None
295 295
296 296 dataOut = None
297 297
298 298 blocksize = None
299 299
300 300 def __init__(self):
301 301
302 302 raise ValueError, "Not implemented"
303 303
304 304 def run(self):
305 305
306 306 raise ValueError, "Not implemented"
307 307
308 308 def getOutput(self):
309 309
310 310 return self.dataOut
311 311
312 312 class JRODataReader(JRODataIO, ProcessingUnit):
313 313
314 314 nReadBlocks = 0
315 315
316 316 delay = 10 #number of seconds waiting a new file
317 317
318 318 nTries = 3 #quantity tries
319 319
320 320 nFiles = 3 #number of files for searching
321 321
322 322 path = None
323 323
324 324 foldercounter = 0
325 325
326 326 flagNoMoreFiles = 0
327 327
328 328 datetimeList = []
329 329
330 330 __isFirstTimeOnline = 1
331 331
332 332 __printInfo = True
333 333
334 334 profileIndex = None
335 335
336 336 def __init__(self):
337 337
338 338 """
339 339
340 340 """
341 341
342 342 raise ValueError, "This method has not been implemented"
343 343
344 344
345 345 def createObjByDefault(self):
346 346 """
347 347
348 348 """
349 349 raise ValueError, "This method has not been implemented"
350 350
351 351 def getBlockDimension(self):
352 352
353 353 raise ValueError, "No implemented"
354 354
355 355 def __searchFilesOffLine(self,
356 356 path,
357 357 startDate,
358 358 endDate,
359 359 startTime=datetime.time(0,0,0),
360 360 endTime=datetime.time(23,59,59),
361 361 set=None,
362 362 expLabel='',
363 363 ext='.r',
364 364 walk=True):
365 365
366 366 pathList = []
367 367
368 368 if not walk:
369 369 pathList.append(path)
370 370
371 371 else:
372 372 dirList = []
373 373 for thisPath in os.listdir(path):
374 374 if not os.path.isdir(os.path.join(path,thisPath)):
375 375 continue
376 376 if not isDoyFolder(thisPath):
377 377 continue
378 378
379 379 dirList.append(thisPath)
380 380
381 381 if not(dirList):
382 382 return None, None
383 383
384 384 thisDate = startDate
385 385
386 386 while(thisDate <= endDate):
387 387 year = thisDate.timetuple().tm_year
388 388 doy = thisDate.timetuple().tm_yday
389 389
390 390 matchlist = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy) + '*')
391 391 if len(matchlist) == 0:
392 392 thisDate += datetime.timedelta(1)
393 393 continue
394 394 for match in matchlist:
395 395 pathList.append(os.path.join(path,match,expLabel))
396 396
397 397 thisDate += datetime.timedelta(1)
398 398
399 399 if pathList == []:
400 400 print "Any folder was found for the date range: %s-%s" %(startDate, endDate)
401 401 return None, None
402 402
403 403 print "%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate)
404 404
405 405 filenameList = []
406 406 datetimeList = []
407 407
408 408 for i in range(len(pathList)):
409 409
410 410 thisPath = pathList[i]
411 411
412 412 fileList = glob.glob1(thisPath, "*%s" %ext)
413 413 fileList.sort()
414 414
415 415 for file in fileList:
416 416
417 417 filename = os.path.join(thisPath,file)
418 418 thisDatetime = isFileinThisTime(filename, startTime, endTime)
419 419
420 420 if not(thisDatetime):
421 421 continue
422 422
423 423 filenameList.append(filename)
424 424 datetimeList.append(thisDatetime)
425 425
426 426 if not(filenameList):
427 427 print "Any file was found for the time range %s - %s" %(startTime, endTime)
428 428 return None, None
429 429
430 430 print "%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime)
431 431 print
432 432
433 433 for i in range(len(filenameList)):
434 434 print "%s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
435 435
436 436 self.filenameList = filenameList
437 437 self.datetimeList = datetimeList
438 438
439 439 return pathList, filenameList
440 440
441 441 def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True):
442 442
443 443 """
444 444 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
445 445 devuelve el archivo encontrado ademas de otros datos.
446 446
447 447 Input:
448 448 path : carpeta donde estan contenidos los files que contiene data
449 449
450 450 expLabel : Nombre del subexperimento (subfolder)
451 451
452 452 ext : extension de los files
453 453
454 454 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
455 455
456 456 Return:
457 457 directory : eL directorio donde esta el file encontrado
458 458 filename : el ultimo file de una determinada carpeta
459 459 year : el anho
460 460 doy : el numero de dia del anho
461 461 set : el set del archivo
462 462
463 463
464 464 """
465 465 dirList = []
466 466
467 467 if not walk:
468 468 fullpath = path
469 469
470 470 else:
471 471 #Filtra solo los directorios
472 472 for thisPath in os.listdir(path):
473 473 if not os.path.isdir(os.path.join(path,thisPath)):
474 474 continue
475 475 if not isDoyFolder(thisPath):
476 476 continue
477 477
478 478 dirList.append(thisPath)
479 479
480 480 if not(dirList):
481 481 return None, None, None, None, None
482 482
483 483 dirList = sorted( dirList, key=str.lower )
484 484
485 485 doypath = dirList[-1]
486 486 foldercounter = int(doypath.split('_')[1]) if len(doypath.split('_'))>1 else 0
487 487 fullpath = os.path.join(path, doypath, expLabel)
488 488
489 489
490 490 print "%s folder was found: " %(fullpath )
491 491
492 492 filename = getlastFileFromPath(fullpath, ext)
493 493
494 494 if not(filename):
495 495 return None, None, None, None, None
496 496
497 497 print "%s file was found" %(filename)
498 498
499 499 if not(self.__verifyFile(os.path.join(fullpath, filename))):
500 500 return None, None, None, None, None
501 501
502 502 year = int( filename[1:5] )
503 503 doy = int( filename[5:8] )
504 504 set = int( filename[8:11] )
505 505
506 506 return fullpath, foldercounter, filename, year, doy, set
507 507
508 508 def __setNextFileOffline(self):
509 509
510 510 idFile = self.fileIndex
511 511
512 512 while (True):
513 513 idFile += 1
514 514 if not(idFile < len(self.filenameList)):
515 515 self.flagNoMoreFiles = 1
516 516 print "No more Files"
517 517 return 0
518 518
519 519 filename = self.filenameList[idFile]
520 520
521 521 if not(self.__verifyFile(filename)):
522 522 continue
523 523
524 524 fileSize = os.path.getsize(filename)
525 525 fp = open(filename,'rb')
526 526 break
527 527
528 528 self.flagIsNewFile = 1
529 529 self.fileIndex = idFile
530 530 self.filename = filename
531 531 self.fileSize = fileSize
532 532 self.fp = fp
533 533
534 534 print "Setting the file: %s"%self.filename
535 535
536 536 return 1
537 537
538 538 def __setNextFileOnline(self):
539 539 """
540 540 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
541 541 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
542 542 siguientes.
543 543
544 544 Affected:
545 545 self.flagIsNewFile
546 546 self.filename
547 547 self.fileSize
548 548 self.fp
549 549 self.set
550 550 self.flagNoMoreFiles
551 551
552 552 Return:
553 553 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
554 554 1 : si el file fue abierto con exito y esta listo a ser leido
555 555
556 556 Excepciones:
557 557 Si un determinado file no puede ser abierto
558 558 """
559 559 nFiles = 0
560 560 fileOk_flag = False
561 561 firstTime_flag = True
562 562
563 563 self.set += 1
564 564
565 565 if self.set > 999:
566 566 self.set = 0
567 567 self.foldercounter += 1
568 568
569 569 #busca el 1er file disponible
570 570 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
571 571 if fullfilename:
572 572 if self.__verifyFile(fullfilename, False):
573 573 fileOk_flag = True
574 574
575 575 #si no encuentra un file entonces espera y vuelve a buscar
576 576 if not(fileOk_flag):
577 577 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
578 578
579 579 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
580 580 tries = self.nTries
581 581 else:
582 582 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
583 583
584 584 for nTries in range( tries ):
585 585 if firstTime_flag:
586 586 print "\tWaiting %0.2f sec for the file \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
587 587 time.sleep( self.delay )
588 588 else:
589 589 print "\tSearching next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
590 590
591 591 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
592 592 if fullfilename:
593 593 if self.__verifyFile(fullfilename):
594 594 fileOk_flag = True
595 595 break
596 596
597 597 if fileOk_flag:
598 598 break
599 599
600 600 firstTime_flag = False
601 601
602 602 print "\tSkipping the file \"%s\" due to this file doesn't exist" % filename
603 603 self.set += 1
604 604
605 605 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
606 606 self.set = 0
607 607 self.doy += 1
608 608 self.foldercounter = 0
609 609
610 610 if fileOk_flag:
611 611 self.fileSize = os.path.getsize( fullfilename )
612 612 self.filename = fullfilename
613 613 self.flagIsNewFile = 1
614 614 if self.fp != None: self.fp.close()
615 615 self.fp = open(fullfilename, 'rb')
616 616 self.flagNoMoreFiles = 0
617 617 print 'Setting the file: %s' % fullfilename
618 618 else:
619 619 self.fileSize = 0
620 620 self.filename = None
621 621 self.flagIsNewFile = 0
622 622 self.fp = None
623 623 self.flagNoMoreFiles = 1
624 624 print 'No more Files'
625 625
626 626 return fileOk_flag
627 627
628 628
629 629 def setNextFile(self):
630 630 if self.fp != None:
631 631 self.fp.close()
632 632
633 633 if self.online:
634 634 newFile = self.__setNextFileOnline()
635 635 else:
636 636 newFile = self.__setNextFileOffline()
637 637
638 638 if not(newFile):
639 639 return 0
640 640
641 641 self.__readFirstHeader()
642 642 self.nReadBlocks = 0
643 643 return 1
644 644
645 645 def __waitNewBlock(self):
646 646 """
647 647 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
648 648
649 649 Si el modo de lectura es OffLine siempre retorn 0
650 650 """
651 651 if not self.online:
652 652 return 0
653 653
654 654 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
655 655 return 0
656 656
657 657 currentPointer = self.fp.tell()
658 658
659 659 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
660 660
661 661 for nTries in range( self.nTries ):
662 662
663 663 self.fp.close()
664 664 self.fp = open( self.filename, 'rb' )
665 665 self.fp.seek( currentPointer )
666 666
667 667 self.fileSize = os.path.getsize( self.filename )
668 668 currentSize = self.fileSize - currentPointer
669 669
670 670 if ( currentSize >= neededSize ):
671 671 self.__rdBasicHeader()
672 672 return 1
673 673
674 674 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
675 675 time.sleep( self.delay )
676 676
677 677
678 678 return 0
679 679
680 680 def __jumpToLastBlock(self):
681 681
682 682 if not(self.__isFirstTimeOnline):
683 683 return
684 684
685 685 csize = self.fileSize - self.fp.tell()
686 686
687 687 #sata el primer bloque de datos
688 688 if csize > self.processingHeaderObj.blockSize:
689 689 self.fp.seek(self.fp.tell() + self.processingHeaderObj.blockSize)
690 690 else:
691 691 return
692 692
693 693 csize = self.fileSize - self.fp.tell()
694 694 neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
695 695 factor = int(csize/neededsize)
696 696 if factor > 0:
697 697 self.fp.seek(self.fp.tell() + factor*neededsize)
698 698
699 699 self.flagIsNewFile = 0
700 700 self.__isFirstTimeOnline = 0
701 701
702 702
703 703 def __setNewBlock(self):
704 704
705 705 if self.fp == None:
706 706 return 0
707 707
708 708 if self.online:
709 709 self.__jumpToLastBlock()
710 710
711 711 if self.flagIsNewFile:
712 712 return 1
713 713
714 714 self.lastUTTime = self.basicHeaderObj.utc
715 715 currentSize = self.fileSize - self.fp.tell()
716 716 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
717 717
718 718 if (currentSize >= neededSize):
719 719 self.__rdBasicHeader()
720 720 return 1
721 721
722 722 if self.__waitNewBlock():
723 723 return 1
724 724
725 725 if not(self.setNextFile()):
726 726 return 0
727 727
728 728 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
729 729
730 730 self.flagTimeBlock = 0
731 731
732 732 if deltaTime > self.maxTimeStep:
733 733 self.flagTimeBlock = 1
734 734
735 735 return 1
736 736
737 737
738 738 def readNextBlock(self):
739 739 if not(self.__setNewBlock()):
740 740 return 0
741 741
742 742 if not(self.readBlock()):
743 743 return 0
744 744
745 745 return 1
746 746
747 747 def __rdProcessingHeader(self, fp=None):
748 748 if fp == None:
749 749 fp = self.fp
750 750
751 751 self.processingHeaderObj.read(fp)
752 752
753 753 def __rdRadarControllerHeader(self, fp=None):
754 754 if fp == None:
755 755 fp = self.fp
756 756
757 757 self.radarControllerHeaderObj.read(fp)
758 758
759 759 def __rdSystemHeader(self, fp=None):
760 760 if fp == None:
761 761 fp = self.fp
762 762
763 763 self.systemHeaderObj.read(fp)
764 764
765 765 def __rdBasicHeader(self, fp=None):
766 766 if fp == None:
767 767 fp = self.fp
768 768
769 769 self.basicHeaderObj.read(fp)
770 770
771 771
772 772 def __readFirstHeader(self):
773 773 self.__rdBasicHeader()
774 774 self.__rdSystemHeader()
775 775 self.__rdRadarControllerHeader()
776 776 self.__rdProcessingHeader()
777 777
778 778 self.firstHeaderSize = self.basicHeaderObj.size
779 779
780 780 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
781 781 if datatype == 0:
782 782 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
783 783 elif datatype == 1:
784 784 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
785 785 elif datatype == 2:
786 786 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
787 787 elif datatype == 3:
788 788 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
789 789 elif datatype == 4:
790 790 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
791 791 elif datatype == 5:
792 792 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
793 793 else:
794 794 raise ValueError, 'Data type was not defined'
795 795
796 796 self.dtype = datatype_str
797 797 self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
798 798 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
799 799 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
800 800 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
801 801 self.getBlockDimension()
802 802
803 803
804 804 def __verifyFile(self, filename, msgFlag=True):
805 805 msg = None
806 806 try:
807 807 fp = open(filename, 'rb')
808 808 currentPosition = fp.tell()
809 809 except:
810 810 if msgFlag:
811 811 print "The file %s can't be opened" % (filename)
812 812 return False
813 813
814 814 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
815 815
816 816 if neededSize == 0:
817 817 basicHeaderObj = BasicHeader(LOCALTIME)
818 818 systemHeaderObj = SystemHeader()
819 819 radarControllerHeaderObj = RadarControllerHeader()
820 820 processingHeaderObj = ProcessingHeader()
821 821
822 822 try:
823 823 if not( basicHeaderObj.read(fp) ): raise IOError
824 824 if not( systemHeaderObj.read(fp) ): raise IOError
825 825 if not( radarControllerHeaderObj.read(fp) ): raise IOError
826 826 if not( processingHeaderObj.read(fp) ): raise IOError
827 827 data_type = int(numpy.log2((processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
828 828
829 829 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
830 830
831 831 except:
832 832 if msgFlag:
833 833 print "\tThe file %s is empty or it hasn't enough data" % filename
834 834
835 835 fp.close()
836 836 return False
837 837 else:
838 838 msg = "\tSkipping the file %s due to it hasn't enough data" %filename
839 839
840 840 fp.close()
841 841 fileSize = os.path.getsize(filename)
842 842 currentSize = fileSize - currentPosition
843 843 if currentSize < neededSize:
844 844 if msgFlag and (msg != None):
845 845 print msg #print"\tSkipping the file %s due to it hasn't enough data" %filename
846 846 return False
847 847
848 848 return True
849 849
850 850 def setup(self,
851 851 path=None,
852 852 startDate=None,
853 853 endDate=None,
854 854 startTime=datetime.time(0,0,0),
855 855 endTime=datetime.time(23,59,59),
856 856 set=0,
857 857 expLabel = "",
858 858 ext = None,
859 859 online = False,
860 860 delay = 60,
861 861 walk = True):
862 862
863 863 if path == None:
864 864 raise ValueError, "The path is not valid"
865 865
866 866 if ext == None:
867 867 ext = self.ext
868 868
869 869 if online:
870 870 print "Searching files in online mode..."
871 871
872 872 for nTries in range( self.nTries ):
873 873 fullpath, foldercounter, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk)
874 874
875 875 if fullpath:
876 876 break
877 877
878 878 print '\tWaiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
879 879 time.sleep( self.delay )
880 880
881 881 if not(fullpath):
882 882 print "There 'isn't valied files in %s" % path
883 883 return None
884 884
885 885 self.year = year
886 886 self.doy = doy
887 887 self.set = set - 1
888 888 self.path = path
889 889 self.foldercounter = foldercounter
890 890
891 891 else:
892 892 print "Searching files in offline mode ..."
893 893 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
894 894 startTime=startTime, endTime=endTime,
895 895 set=set, expLabel=expLabel, ext=ext,
896 896 walk=walk)
897 897
898 898 if not(pathList):
899 899 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
900 900 datetime.datetime.combine(startDate,startTime).ctime(),
901 901 datetime.datetime.combine(endDate,endTime).ctime())
902 902
903 903 sys.exit(-1)
904 904
905 905
906 906 self.fileIndex = -1
907 907 self.pathList = pathList
908 908 self.filenameList = filenameList
909 909
910 910 self.online = online
911 911 self.delay = delay
912 912 ext = ext.lower()
913 913 self.ext = ext
914 914
915 915 if not(self.setNextFile()):
916 916 if (startDate!=None) and (endDate!=None):
917 917 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
918 918 elif startDate != None:
919 919 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
920 920 else:
921 921 print "No files"
922 922
923 923 sys.exit(-1)
924 924
925 925 # self.updateDataHeader()
926 926
927 927 return self.dataOut
928 928
929 929 def getBasicHeader(self):
930 930
931 931 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.ippSeconds
932 932
933 933 self.dataOut.flagTimeBlock = self.flagTimeBlock
934 934
935 935 self.dataOut.timeZone = self.basicHeaderObj.timeZone
936 936
937 937 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
938 938
939 939 self.dataOut.errorCount = self.basicHeaderObj.errorCount
940 940
941 941 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
942 942
943 943 def getFirstHeader(self):
944 944
945 945 raise ValueError, "This method has not been implemented"
946 946
947 947 def getData():
948 948
949 949 raise ValueError, "This method has not been implemented"
950 950
951 951 def hasNotDataInBuffer():
952 952
953 953 raise ValueError, "This method has not been implemented"
954 954
955 955 def readBlock():
956 956
957 957 raise ValueError, "This method has not been implemented"
958 958
959 959 def isEndProcess(self):
960 960
961 961 return self.flagNoMoreFiles
962 962
963 963 def printReadBlocks(self):
964 964
965 965 print "Number of read blocks per file %04d" %self.nReadBlocks
966 966
967 967 def printTotalBlocks(self):
968 968
969 969 print "Number of read blocks %04d" %self.nTotalBlocks
970 970
971 971 def printNumberOfBlock(self):
972 972
973 973 if self.flagIsNewBlock:
974 974 print "Block No. %04d, Total blocks %04d -> %s" %(self.basicHeaderObj.dataBlock, self.nTotalBlocks, self.dataOut.datatime.ctime())
975 975
976 976 def printInfo(self):
977 977
978 978 if self.__printInfo == False:
979 979 return
980 980
981 981 self.basicHeaderObj.printInfo()
982 982 self.systemHeaderObj.printInfo()
983 983 self.radarControllerHeaderObj.printInfo()
984 984 self.processingHeaderObj.printInfo()
985 985
986 986 self.__printInfo = False
987 987
988 988
989 989 def run(self, **kwargs):
990 990
991 991 if not(self.isConfig):
992 992
993 993 # self.dataOut = dataOut
994 994 self.setup(**kwargs)
995 995 self.isConfig = True
996 996
997 997 self.getData()
998 998
999 999 class JRODataWriter(JRODataIO, Operation):
1000 1000
1001 1001 """
1002 1002 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
1003 1003 de los datos siempre se realiza por bloques.
1004 1004 """
1005 1005
1006 1006 blockIndex = 0
1007 1007
1008 1008 path = None
1009 1009
1010 1010 setFile = None
1011 1011
1012 1012 profilesPerBlock = None
1013 1013
1014 1014 blocksPerFile = None
1015 1015
1016 1016 nWriteBlocks = 0
1017 1017
1018 1018 def __init__(self, dataOut=None):
1019 1019 raise ValueError, "Not implemented"
1020 1020
1021 1021
1022 1022 def hasAllDataInBuffer(self):
1023 1023 raise ValueError, "Not implemented"
1024 1024
1025 1025
1026 1026 def setBlockDimension(self):
1027 1027 raise ValueError, "Not implemented"
1028 1028
1029 1029
1030 1030 def writeBlock(self):
1031 1031 raise ValueError, "No implemented"
1032 1032
1033 1033
1034 1034 def putData(self):
1035 1035 raise ValueError, "No implemented"
1036 1036
1037 1037
1038 1038 def setBasicHeader(self):
1039 1039
1040 1040 self.basicHeaderObj.size = self.basicHeaderSize #bytes
1041 1041 self.basicHeaderObj.version = self.versionFile
1042 1042 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1043 1043
1044 1044 utc = numpy.floor(self.dataOut.utctime)
1045 1045 milisecond = (self.dataOut.utctime - utc)* 1000.0
1046 1046
1047 1047 self.basicHeaderObj.utc = utc
1048 1048 self.basicHeaderObj.miliSecond = milisecond
1049 1049 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1050 1050 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1051 1051 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1052 1052
1053 1053 def setFirstHeader(self):
1054 1054 """
1055 1055 Obtiene una copia del First Header
1056 1056
1057 1057 Affected:
1058 1058
1059 1059 self.basicHeaderObj
1060 1060 self.systemHeaderObj
1061 1061 self.radarControllerHeaderObj
1062 1062 self.processingHeaderObj self.
1063 1063
1064 1064 Return:
1065 1065 None
1066 1066 """
1067 1067
1068 1068 raise ValueError, "No implemented"
1069 1069
1070 1070 def __writeFirstHeader(self):
1071 1071 """
1072 1072 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1073 1073
1074 1074 Affected:
1075 1075 __dataType
1076 1076
1077 1077 Return:
1078 1078 None
1079 1079 """
1080 1080
1081 1081 # CALCULAR PARAMETROS
1082 1082
1083 1083 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1084 1084 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1085 1085
1086 1086 self.basicHeaderObj.write(self.fp)
1087 1087 self.systemHeaderObj.write(self.fp)
1088 1088 self.radarControllerHeaderObj.write(self.fp)
1089 1089 self.processingHeaderObj.write(self.fp)
1090 1090
1091 1091 self.dtype = self.dataOut.dtype
1092 1092
1093 1093 def __setNewBlock(self):
1094 1094 """
1095 1095 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1096 1096
1097 1097 Return:
1098 1098 0 : si no pudo escribir nada
1099 1099 1 : Si escribio el Basic el First Header
1100 1100 """
1101 1101 if self.fp == None:
1102 1102 self.setNextFile()
1103 1103
1104 1104 if self.flagIsNewFile:
1105 1105 return 1
1106 1106
1107 1107 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1108 1108 self.basicHeaderObj.write(self.fp)
1109 1109 return 1
1110 1110
1111 1111 if not( self.setNextFile() ):
1112 1112 return 0
1113 1113
1114 1114 return 1
1115 1115
1116 1116
1117 1117 def writeNextBlock(self):
1118 1118 """
1119 1119 Selecciona el bloque siguiente de datos y los escribe en un file
1120 1120
1121 1121 Return:
1122 1122 0 : Si no hizo pudo escribir el bloque de datos
1123 1123 1 : Si no pudo escribir el bloque de datos
1124 1124 """
1125 1125 if not( self.__setNewBlock() ):
1126 1126 return 0
1127 1127
1128 1128 self.writeBlock()
1129 1129
1130 1130 return 1
1131 1131
1132 1132 def setNextFile(self):
1133 1133 """
1134 1134 Determina el siguiente file que sera escrito
1135 1135
1136 1136 Affected:
1137 1137 self.filename
1138 1138 self.subfolder
1139 1139 self.fp
1140 1140 self.setFile
1141 1141 self.flagIsNewFile
1142 1142
1143 1143 Return:
1144 1144 0 : Si el archivo no puede ser escrito
1145 1145 1 : Si el archivo esta listo para ser escrito
1146 1146 """
1147 1147 ext = self.ext
1148 1148 path = self.path
1149 1149
1150 1150 if self.fp != None:
1151 1151 self.fp.close()
1152 1152
1153 1153 timeTuple = time.localtime( self.dataOut.utctime)
1154 1154 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1155 1155
1156 1156 fullpath = os.path.join( path, subfolder )
1157 1157 if not( os.path.exists(fullpath) ):
1158 1158 os.mkdir(fullpath)
1159 1159 self.setFile = -1 #inicializo mi contador de seteo
1160 1160 else:
1161 1161 filesList = os.listdir( fullpath )
1162 1162 if len( filesList ) > 0:
1163 1163 filesList = sorted( filesList, key=str.lower )
1164 1164 filen = filesList[-1]
1165 1165 # el filename debera tener el siguiente formato
1166 1166 # 0 1234 567 89A BCDE (hex)
1167 1167 # x YYYY DDD SSS .ext
1168 1168 if isNumber( filen[8:11] ):
1169 1169 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1170 1170 else:
1171 1171 self.setFile = -1
1172 1172 else:
1173 1173 self.setFile = -1 #inicializo mi contador de seteo
1174 1174
1175 1175 setFile = self.setFile
1176 1176 setFile += 1
1177 1177
1178 1178 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
1179 1179 timeTuple.tm_year,
1180 1180 timeTuple.tm_yday,
1181 1181 setFile,
1182 1182 ext )
1183 1183
1184 1184 filename = os.path.join( path, subfolder, file )
1185 1185
1186 1186 fp = open( filename,'wb' )
1187 1187
1188 1188 self.blockIndex = 0
1189 1189
1190 1190 #guardando atributos
1191 1191 self.filename = filename
1192 1192 self.subfolder = subfolder
1193 1193 self.fp = fp
1194 1194 self.setFile = setFile
1195 1195 self.flagIsNewFile = 1
1196 1196
1197 1197 self.setFirstHeader()
1198 1198
1199 1199 print 'Writing the file: %s'%self.filename
1200 1200
1201 1201 self.__writeFirstHeader()
1202 1202
1203 1203 return 1
1204 1204
1205 1205 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=None, set=0, ext=None):
1206 1206 """
1207 1207 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1208 1208
1209 1209 Inputs:
1210 1210 path : el path destino en el cual se escribiran los files a crear
1211 1211 format : formato en el cual sera salvado un file
1212 1212 set : el setebo del file
1213 1213
1214 1214 Return:
1215 1215 0 : Si no realizo un buen seteo
1216 1216 1 : Si realizo un buen seteo
1217 1217 """
1218 1218
1219 1219 if ext == None:
1220 1220 ext = self.ext
1221 1221
1222 1222 ext = ext.lower()
1223 1223
1224 1224 self.ext = ext
1225 1225
1226 1226 self.path = path
1227 1227
1228 1228 self.setFile = set - 1
1229 1229
1230 1230 self.blocksPerFile = blocksPerFile
1231 1231
1232 1232 self.profilesPerBlock = profilesPerBlock
1233 1233
1234 1234 self.dataOut = dataOut
1235 1235
1236 1236 if not(self.setNextFile()):
1237 1237 print "There isn't a next file"
1238 1238 return 0
1239 1239
1240 1240 self.setBlockDimension()
1241 1241
1242 1242 return 1
1243 1243
1244 1244 def run(self, dataOut, **kwargs):
1245 1245
1246 1246 if not(self.isConfig):
1247 1247
1248 1248 self.setup(dataOut, **kwargs)
1249 1249 self.isConfig = True
1250 1250
1251 1251 self.putData()
1252 1252
1253 1253 class VoltageReader(JRODataReader):
1254 1254 """
1255 1255 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
1256 1256 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
1257 1257 perfiles*alturas*canales) son almacenados en la variable "buffer".
1258 1258
1259 1259 perfiles * alturas * canales
1260 1260
1261 1261 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1262 1262 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
1263 1263 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
1264 1264 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1265 1265
1266 1266 Example:
1267 1267
1268 1268 dpath = "/home/myuser/data"
1269 1269
1270 1270 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1271 1271
1272 1272 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1273 1273
1274 1274 readerObj = VoltageReader()
1275 1275
1276 1276 readerObj.setup(dpath, startTime, endTime)
1277 1277
1278 1278 while(True):
1279 1279
1280 1280 #to get one profile
1281 1281 profile = readerObj.getData()
1282 1282
1283 1283 #print the profile
1284 1284 print profile
1285 1285
1286 1286 #If you want to see all datablock
1287 1287 print readerObj.datablock
1288 1288
1289 1289 if readerObj.flagNoMoreFiles:
1290 1290 break
1291 1291
1292 1292 """
1293 1293
1294 1294 ext = ".r"
1295 1295
1296 1296 optchar = "D"
1297 1297 dataOut = None
1298 1298
1299 1299
1300 1300 def __init__(self):
1301 1301 """
1302 1302 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
1303 1303
1304 1304 Input:
1305 1305 dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para
1306 1306 almacenar un perfil de datos cada vez que se haga un requerimiento
1307 1307 (getData). El perfil sera obtenido a partir del buffer de datos,
1308 1308 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1309 1309 bloque de datos.
1310 1310 Si este parametro no es pasado se creara uno internamente.
1311 1311
1312 1312 Variables afectadas:
1313 1313 self.dataOut
1314 1314
1315 1315 Return:
1316 1316 None
1317 1317 """
1318 1318
1319 1319 self.isConfig = False
1320 1320
1321 1321 self.datablock = None
1322 1322
1323 1323 self.utc = 0
1324 1324
1325 1325 self.ext = ".r"
1326 1326
1327 1327 self.optchar = "D"
1328 1328
1329 1329 self.basicHeaderObj = BasicHeader(LOCALTIME)
1330 1330
1331 1331 self.systemHeaderObj = SystemHeader()
1332 1332
1333 1333 self.radarControllerHeaderObj = RadarControllerHeader()
1334 1334
1335 1335 self.processingHeaderObj = ProcessingHeader()
1336 1336
1337 1337 self.online = 0
1338 1338
1339 1339 self.fp = None
1340 1340
1341 1341 self.idFile = None
1342 1342
1343 1343 self.dtype = None
1344 1344
1345 1345 self.fileSizeByHeader = None
1346 1346
1347 1347 self.filenameList = []
1348 1348
1349 1349 self.filename = None
1350 1350
1351 1351 self.fileSize = None
1352 1352
1353 1353 self.firstHeaderSize = 0
1354 1354
1355 1355 self.basicHeaderSize = 24
1356 1356
1357 1357 self.pathList = []
1358 1358
1359 1359 self.filenameList = []
1360 1360
1361 1361 self.lastUTTime = 0
1362 1362
1363 1363 self.maxTimeStep = 30
1364 1364
1365 1365 self.flagNoMoreFiles = 0
1366 1366
1367 1367 self.set = 0
1368 1368
1369 1369 self.path = None
1370 1370
1371 1371 self.profileIndex = 2**32-1
1372 1372
1373 1373 self.delay = 3 #seconds
1374 1374
1375 1375 self.nTries = 3 #quantity tries
1376 1376
1377 1377 self.nFiles = 3 #number of files for searching
1378 1378
1379 1379 self.nReadBlocks = 0
1380 1380
1381 1381 self.flagIsNewFile = 1
1382 1382
1383 1383 self.__isFirstTimeOnline = 1
1384 1384
1385 1385 self.ippSeconds = 0
1386 1386
1387 1387 self.flagTimeBlock = 0
1388 1388
1389 1389 self.flagIsNewBlock = 0
1390 1390
1391 1391 self.nTotalBlocks = 0
1392 1392
1393 1393 self.blocksize = 0
1394 1394
1395 1395 self.dataOut = self.createObjByDefault()
1396 1396
1397 1397 def createObjByDefault(self):
1398 1398
1399 1399 dataObj = Voltage()
1400 1400
1401 1401 return dataObj
1402 1402
1403 1403 def __hasNotDataInBuffer(self):
1404 1404 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1405 1405 return 1
1406 1406 return 0
1407 1407
1408 1408
1409 1409 def getBlockDimension(self):
1410 1410 """
1411 1411 Obtiene la cantidad de puntos a leer por cada bloque de datos
1412 1412
1413 1413 Affected:
1414 1414 self.blocksize
1415 1415
1416 1416 Return:
1417 1417 None
1418 1418 """
1419 1419 pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
1420 1420 self.blocksize = pts2read
1421 1421
1422 1422
1423 1423 def readBlock(self):
1424 1424 """
1425 1425 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
1426 1426 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1427 1427 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1428 1428 es seteado a 0
1429 1429
1430 1430 Inputs:
1431 1431 None
1432 1432
1433 1433 Return:
1434 1434 None
1435 1435
1436 1436 Affected:
1437 1437 self.profileIndex
1438 1438 self.datablock
1439 1439 self.flagIsNewFile
1440 1440 self.flagIsNewBlock
1441 1441 self.nTotalBlocks
1442 1442
1443 1443 Exceptions:
1444 1444 Si un bloque leido no es un bloque valido
1445 1445 """
1446 1446
1447 1447 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
1448 1448
1449 1449 try:
1450 1450 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
1451 1451 except:
1452 1452 print "The read block (%3d) has not enough data" %self.nReadBlocks
1453 1453 return 0
1454 1454
1455 1455 junk = numpy.transpose(junk, (2,0,1))
1456 1456 self.datablock = junk['real'] + junk['imag']*1j
1457 1457
1458 1458 self.profileIndex = 0
1459 1459
1460 1460 self.flagIsNewFile = 0
1461 1461 self.flagIsNewBlock = 1
1462 1462
1463 1463 self.nTotalBlocks += 1
1464 1464 self.nReadBlocks += 1
1465 1465
1466 1466 return 1
1467 1467
1468 1468 def getFirstHeader(self):
1469 1469
1470 1470 self.dataOut.dtype = self.dtype
1471 1471
1472 1472 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
1473 1473
1474 1474 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
1475 1475
1476 1476 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
1477 1477
1478 1478 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
1479 1479
1480 1480 self.dataOut.ippSeconds = self.ippSeconds
1481 1481
1482 1482 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt
1483 1483
1484 1484 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
1485 1485
1486 1486 self.dataOut.flagShiftFFT = False
1487 1487
1488 1488 if self.radarControllerHeaderObj.code != None:
1489 1489
1490 1490 self.dataOut.nCode = self.radarControllerHeaderObj.nCode
1491 1491
1492 1492 self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
1493 1493
1494 1494 self.dataOut.code = self.radarControllerHeaderObj.code
1495 1495
1496 1496 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
1497 1497
1498 1498 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
1499 1499
1500 1500 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
1501 1501
1502 1502 self.dataOut.flagDeflipData = False #asumo q la data no esta sin flip
1503 1503
1504 1504 self.dataOut.flagShiftFFT = False
1505 1505
1506 1506 def getData(self):
1507 1507 """
1508 1508 getData obtiene una unidad de datos del buffer de lectura y la copia a la clase "Voltage"
1509 1509 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
1510 1510 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
1511 1511
1512 1512 Ademas incrementa el contador del buffer en 1.
1513 1513
1514 1514 Return:
1515 1515 data : retorna un perfil de voltages (alturas * canales) copiados desde el
1516 1516 buffer. Si no hay mas archivos a leer retorna None.
1517 1517
1518 1518 Variables afectadas:
1519 1519 self.dataOut
1520 1520 self.profileIndex
1521 1521
1522 1522 Affected:
1523 1523 self.dataOut
1524 1524 self.profileIndex
1525 1525 self.flagTimeBlock
1526 1526 self.flagIsNewBlock
1527 1527 """
1528 1528
1529 1529 if self.flagNoMoreFiles:
1530 1530 self.dataOut.flagNoData = True
1531 1531 print 'Process finished'
1532 1532 return 0
1533 1533
1534 1534 self.flagTimeBlock = 0
1535 1535 self.flagIsNewBlock = 0
1536 1536
1537 1537 if self.__hasNotDataInBuffer():
1538 1538
1539 1539 if not( self.readNextBlock() ):
1540 1540 return 0
1541 1541
1542 1542 self.getFirstHeader()
1543 1543
1544 1544 if self.datablock == None:
1545 1545 self.dataOut.flagNoData = True
1546 1546 return 0
1547 1547
1548 1548 self.dataOut.data = self.datablock[:,self.profileIndex,:]
1549 1549
1550 1550 self.dataOut.flagNoData = False
1551 1551
1552 1552 self.getBasicHeader()
1553 1553
1554 1554 self.profileIndex += 1
1555 1555
1556 1556 return self.dataOut.data
1557 1557
1558 1558
1559 1559 class VoltageWriter(JRODataWriter):
1560 1560 """
1561 1561 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
1562 1562 de los datos siempre se realiza por bloques.
1563 1563 """
1564 1564
1565 1565 ext = ".r"
1566 1566
1567 1567 optchar = "D"
1568 1568
1569 1569 shapeBuffer = None
1570 1570
1571 1571
1572 1572 def __init__(self):
1573 1573 """
1574 1574 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
1575 1575
1576 1576 Affected:
1577 1577 self.dataOut
1578 1578
1579 1579 Return: None
1580 1580 """
1581 1581
1582 1582 self.nTotalBlocks = 0
1583 1583
1584 1584 self.profileIndex = 0
1585 1585
1586 1586 self.isConfig = False
1587 1587
1588 1588 self.fp = None
1589 1589
1590 1590 self.flagIsNewFile = 1
1591 1591
1592 1592 self.nTotalBlocks = 0
1593 1593
1594 1594 self.flagIsNewBlock = 0
1595 1595
1596 1596 self.setFile = None
1597 1597
1598 1598 self.dtype = None
1599 1599
1600 1600 self.path = None
1601 1601
1602 1602 self.filename = None
1603 1603
1604 1604 self.basicHeaderObj = BasicHeader(LOCALTIME)
1605 1605
1606 1606 self.systemHeaderObj = SystemHeader()
1607 1607
1608 1608 self.radarControllerHeaderObj = RadarControllerHeader()
1609 1609
1610 1610 self.processingHeaderObj = ProcessingHeader()
1611 1611
1612 1612 def hasAllDataInBuffer(self):
1613 1613 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1614 1614 return 1
1615 1615 return 0
1616 1616
1617 1617
1618 1618 def setBlockDimension(self):
1619 1619 """
1620 1620 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
1621 1621
1622 1622 Affected:
1623 1623 self.shape_spc_Buffer
1624 1624 self.shape_cspc_Buffer
1625 1625 self.shape_dc_Buffer
1626 1626
1627 1627 Return: None
1628 1628 """
1629 1629 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
1630 1630 self.processingHeaderObj.nHeights,
1631 1631 self.systemHeaderObj.nChannels)
1632 1632
1633 1633 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
1634 1634 self.processingHeaderObj.profilesPerBlock,
1635 1635 self.processingHeaderObj.nHeights),
1636 1636 dtype=numpy.dtype('complex64'))
1637 1637
1638 1638
1639 1639 def writeBlock(self):
1640 1640 """
1641 1641 Escribe el buffer en el file designado
1642 1642
1643 1643 Affected:
1644 1644 self.profileIndex
1645 1645 self.flagIsNewFile
1646 1646 self.flagIsNewBlock
1647 1647 self.nTotalBlocks
1648 1648 self.blockIndex
1649 1649
1650 1650 Return: None
1651 1651 """
1652 1652 data = numpy.zeros( self.shapeBuffer, self.dtype )
1653 1653
1654 1654 junk = numpy.transpose(self.datablock, (1,2,0))
1655 1655
1656 1656 data['real'] = junk.real
1657 1657 data['imag'] = junk.imag
1658 1658
1659 1659 data = data.reshape( (-1) )
1660 1660
1661 1661 data.tofile( self.fp )
1662 1662
1663 1663 self.datablock.fill(0)
1664 1664
1665 1665 self.profileIndex = 0
1666 1666 self.flagIsNewFile = 0
1667 1667 self.flagIsNewBlock = 1
1668 1668
1669 1669 self.blockIndex += 1
1670 1670 self.nTotalBlocks += 1
1671 1671
1672 1672 def putData(self):
1673 1673 """
1674 1674 Setea un bloque de datos y luego los escribe en un file
1675 1675
1676 1676 Affected:
1677 1677 self.flagIsNewBlock
1678 1678 self.profileIndex
1679 1679
1680 1680 Return:
1681 1681 0 : Si no hay data o no hay mas files que puedan escribirse
1682 1682 1 : Si se escribio la data de un bloque en un file
1683 1683 """
1684 1684 if self.dataOut.flagNoData:
1685 1685 return 0
1686 1686
1687 1687 self.flagIsNewBlock = 0
1688 1688
1689 1689 if self.dataOut.flagTimeBlock:
1690 1690
1691 1691 self.datablock.fill(0)
1692 1692 self.profileIndex = 0
1693 1693 self.setNextFile()
1694 1694
1695 1695 if self.profileIndex == 0:
1696 1696 self.setBasicHeader()
1697 1697
1698 1698 self.datablock[:,self.profileIndex,:] = self.dataOut.data
1699 1699
1700 1700 self.profileIndex += 1
1701 1701
1702 1702 if self.hasAllDataInBuffer():
1703 1703 #if self.flagIsNewFile:
1704 1704 self.writeNextBlock()
1705 1705 # self.setFirstHeader()
1706 1706
1707 1707 return 1
1708 1708
1709 1709 def __getProcessFlags(self):
1710 1710
1711 1711 processFlags = 0
1712 1712
1713 1713 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1714 1714 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1715 1715 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1716 1716 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1717 1717 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1718 1718 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1719 1719
1720 1720 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1721 1721
1722 1722
1723 1723
1724 1724 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
1725 1725 PROCFLAG.DATATYPE_SHORT,
1726 1726 PROCFLAG.DATATYPE_LONG,
1727 1727 PROCFLAG.DATATYPE_INT64,
1728 1728 PROCFLAG.DATATYPE_FLOAT,
1729 1729 PROCFLAG.DATATYPE_DOUBLE]
1730 1730
1731 1731
1732 1732 for index in range(len(dtypeList)):
1733 1733 if self.dataOut.dtype == dtypeList[index]:
1734 1734 dtypeValue = datatypeValueList[index]
1735 1735 break
1736 1736
1737 1737 processFlags += dtypeValue
1738 1738
1739 1739 if self.dataOut.flagDecodeData:
1740 1740 processFlags += PROCFLAG.DECODE_DATA
1741 1741
1742 1742 if self.dataOut.flagDeflipData:
1743 1743 processFlags += PROCFLAG.DEFLIP_DATA
1744 1744
1745 1745 if self.dataOut.code != None:
1746 1746 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1747 1747
1748 1748 if self.dataOut.nCohInt > 1:
1749 1749 processFlags += PROCFLAG.COHERENT_INTEGRATION
1750 1750
1751 1751 return processFlags
1752 1752
1753 1753
1754 1754 def __getBlockSize(self):
1755 1755 '''
1756 1756 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
1757 1757 '''
1758 1758
1759 1759 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1760 1760 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1761 1761 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1762 1762 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1763 1763 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1764 1764 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1765 1765
1766 1766 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1767 1767 datatypeValueList = [1,2,4,8,4,8]
1768 1768 for index in range(len(dtypeList)):
1769 1769 if self.dataOut.dtype == dtypeList[index]:
1770 1770 datatypeValue = datatypeValueList[index]
1771 1771 break
1772 1772
1773 1773 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels * self.dataOut.nProfiles * datatypeValue * 2)
1774 1774
1775 1775 return blocksize
1776 1776
1777 1777 def setFirstHeader(self):
1778 1778
1779 1779 """
1780 1780 Obtiene una copia del First Header
1781 1781
1782 1782 Affected:
1783 1783 self.systemHeaderObj
1784 1784 self.radarControllerHeaderObj
1785 1785 self.dtype
1786 1786
1787 1787 Return:
1788 1788 None
1789 1789 """
1790 1790
1791 1791 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
1792 1792 self.systemHeaderObj.nChannels = self.dataOut.nChannels
1793 1793 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
1794 1794
1795 1795 self.setBasicHeader()
1796 1796
1797 1797 processingHeaderSize = 40 # bytes
1798 1798 self.processingHeaderObj.dtype = 0 # Voltage
1799 1799 self.processingHeaderObj.blockSize = self.__getBlockSize()
1800 1800 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
1801 1801 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
1802 1802 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
1803 1803 self.processingHeaderObj.processFlags = self.__getProcessFlags()
1804 1804 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
1805 1805 self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage
1806 1806 self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage
1807 1807
1808 1808 if self.dataOut.code != None:
1809 1809 self.processingHeaderObj.code = self.dataOut.code
1810 1810 self.processingHeaderObj.nCode = self.dataOut.nCode
1811 1811 self.processingHeaderObj.nBaud = self.dataOut.nBaud
1812 1812 codesize = int(8 + 4 * self.dataOut.nCode * self.dataOut.nBaud)
1813 1813 processingHeaderSize += codesize
1814 1814
1815 1815 if self.processingHeaderObj.nWindows != 0:
1816 1816 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
1817 1817 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
1818 1818 self.processingHeaderObj.nHeights = self.dataOut.nHeights
1819 1819 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
1820 1820 processingHeaderSize += 12
1821 1821
1822 1822 self.processingHeaderObj.size = processingHeaderSize
1823 1823
1824 1824 class SpectraReader(JRODataReader):
1825 1825 """
1826 1826 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
1827 1827 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
1828 1828 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
1829 1829
1830 1830 paresCanalesIguales * alturas * perfiles (Self Spectra)
1831 1831 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
1832 1832 canales * alturas (DC Channels)
1833 1833
1834 1834 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1835 1835 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
1836 1836 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
1837 1837 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1838 1838
1839 1839 Example:
1840 1840 dpath = "/home/myuser/data"
1841 1841
1842 1842 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1843 1843
1844 1844 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1845 1845
1846 1846 readerObj = SpectraReader()
1847 1847
1848 1848 readerObj.setup(dpath, startTime, endTime)
1849 1849
1850 1850 while(True):
1851 1851
1852 1852 readerObj.getData()
1853 1853
1854 1854 print readerObj.data_spc
1855 1855
1856 1856 print readerObj.data_cspc
1857 1857
1858 1858 print readerObj.data_dc
1859 1859
1860 1860 if readerObj.flagNoMoreFiles:
1861 1861 break
1862 1862
1863 1863 """
1864 1864
1865 1865 pts2read_SelfSpectra = 0
1866 1866
1867 1867 pts2read_CrossSpectra = 0
1868 1868
1869 1869 pts2read_DCchannels = 0
1870 1870
1871 1871 ext = ".pdata"
1872 1872
1873 1873 optchar = "P"
1874 1874
1875 1875 dataOut = None
1876 1876
1877 1877 nRdChannels = None
1878 1878
1879 1879 nRdPairs = None
1880 1880
1881 1881 rdPairList = []
1882 1882
1883 1883 def __init__(self):
1884 1884 """
1885 1885 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
1886 1886
1887 1887 Inputs:
1888 1888 dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para
1889 1889 almacenar un perfil de datos cada vez que se haga un requerimiento
1890 1890 (getData). El perfil sera obtenido a partir del buffer de datos,
1891 1891 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1892 1892 bloque de datos.
1893 1893 Si este parametro no es pasado se creara uno internamente.
1894 1894
1895 1895 Affected:
1896 1896 self.dataOut
1897 1897
1898 1898 Return : None
1899 1899 """
1900 1900
1901 1901 self.isConfig = False
1902 1902
1903 1903 self.pts2read_SelfSpectra = 0
1904 1904
1905 1905 self.pts2read_CrossSpectra = 0
1906 1906
1907 1907 self.pts2read_DCchannels = 0
1908 1908
1909 1909 self.datablock = None
1910 1910
1911 1911 self.utc = None
1912 1912
1913 1913 self.ext = ".pdata"
1914 1914
1915 1915 self.optchar = "P"
1916 1916
1917 1917 self.basicHeaderObj = BasicHeader(LOCALTIME)
1918 1918
1919 1919 self.systemHeaderObj = SystemHeader()
1920 1920
1921 1921 self.radarControllerHeaderObj = RadarControllerHeader()
1922 1922
1923 1923 self.processingHeaderObj = ProcessingHeader()
1924 1924
1925 1925 self.online = 0
1926 1926
1927 1927 self.fp = None
1928 1928
1929 1929 self.idFile = None
1930 1930
1931 1931 self.dtype = None
1932 1932
1933 1933 self.fileSizeByHeader = None
1934 1934
1935 1935 self.filenameList = []
1936 1936
1937 1937 self.filename = None
1938 1938
1939 1939 self.fileSize = None
1940 1940
1941 1941 self.firstHeaderSize = 0
1942 1942
1943 1943 self.basicHeaderSize = 24
1944 1944
1945 1945 self.pathList = []
1946 1946
1947 1947 self.lastUTTime = 0
1948 1948
1949 1949 self.maxTimeStep = 30
1950 1950
1951 1951 self.flagNoMoreFiles = 0
1952 1952
1953 1953 self.set = 0
1954 1954
1955 1955 self.path = None
1956 1956
1957 1957 self.delay = 60 #seconds
1958 1958
1959 1959 self.nTries = 3 #quantity tries
1960 1960
1961 1961 self.nFiles = 3 #number of files for searching
1962 1962
1963 1963 self.nReadBlocks = 0
1964 1964
1965 1965 self.flagIsNewFile = 1
1966 1966
1967 1967 self.__isFirstTimeOnline = 1
1968 1968
1969 1969 self.ippSeconds = 0
1970 1970
1971 1971 self.flagTimeBlock = 0
1972 1972
1973 1973 self.flagIsNewBlock = 0
1974 1974
1975 1975 self.nTotalBlocks = 0
1976 1976
1977 1977 self.blocksize = 0
1978 1978
1979 1979 self.dataOut = self.createObjByDefault()
1980 1980
1981 1981 self.profileIndex = 1 #Always
1982 1982
1983 1983
1984 1984 def createObjByDefault(self):
1985 1985
1986 1986 dataObj = Spectra()
1987 1987
1988 1988 return dataObj
1989 1989
1990 1990 def __hasNotDataInBuffer(self):
1991 1991 return 1
1992 1992
1993 1993
1994 1994 def getBlockDimension(self):
1995 1995 """
1996 1996 Obtiene la cantidad de puntos a leer por cada bloque de datos
1997 1997
1998 1998 Affected:
1999 1999 self.nRdChannels
2000 2000 self.nRdPairs
2001 2001 self.pts2read_SelfSpectra
2002 2002 self.pts2read_CrossSpectra
2003 2003 self.pts2read_DCchannels
2004 2004 self.blocksize
2005 2005 self.dataOut.nChannels
2006 2006 self.dataOut.nPairs
2007 2007
2008 2008 Return:
2009 2009 None
2010 2010 """
2011 2011 self.nRdChannels = 0
2012 2012 self.nRdPairs = 0
2013 2013 self.rdPairList = []
2014 2014
2015 2015 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
2016 2016 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
2017 2017 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
2018 2018 else:
2019 2019 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
2020 2020 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
2021 2021
2022 2022 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
2023 2023
2024 2024 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
2025 2025 self.blocksize = self.pts2read_SelfSpectra
2026 2026
2027 2027 if self.processingHeaderObj.flag_cspc:
2028 2028 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
2029 2029 self.blocksize += self.pts2read_CrossSpectra
2030 2030
2031 2031 if self.processingHeaderObj.flag_dc:
2032 2032 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
2033 2033 self.blocksize += self.pts2read_DCchannels
2034 2034
2035 2035 # self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels
2036 2036
2037 2037
2038 2038 def readBlock(self):
2039 2039 """
2040 2040 Lee el bloque de datos desde la posicion actual del puntero del archivo
2041 2041 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
2042 2042 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
2043 2043 es seteado a 0
2044 2044
2045 2045 Return: None
2046 2046
2047 2047 Variables afectadas:
2048 2048
2049 2049 self.flagIsNewFile
2050 2050 self.flagIsNewBlock
2051 2051 self.nTotalBlocks
2052 2052 self.data_spc
2053 2053 self.data_cspc
2054 2054 self.data_dc
2055 2055
2056 2056 Exceptions:
2057 2057 Si un bloque leido no es un bloque valido
2058 2058 """
2059 2059 blockOk_flag = False
2060 2060 fpointer = self.fp.tell()
2061 2061
2062 2062 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
2063 2063 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
2064 2064
2065 2065 if self.processingHeaderObj.flag_cspc:
2066 2066 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
2067 2067 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
2068 2068
2069 2069 if self.processingHeaderObj.flag_dc:
2070 2070 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
2071 2071 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
2072 2072
2073 2073
2074 2074 if not(self.processingHeaderObj.shif_fft):
2075 2075 #desplaza a la derecha en el eje 2 determinadas posiciones
2076 2076 shift = int(self.processingHeaderObj.profilesPerBlock/2)
2077 2077 spc = numpy.roll( spc, shift , axis=2 )
2078 2078
2079 2079 if self.processingHeaderObj.flag_cspc:
2080 2080 #desplaza a la derecha en el eje 2 determinadas posiciones
2081 2081 cspc = numpy.roll( cspc, shift, axis=2 )
2082 2082
2083 2083 # self.processingHeaderObj.shif_fft = True
2084 2084
2085 2085 spc = numpy.transpose( spc, (0,2,1) )
2086 2086 self.data_spc = spc
2087 2087
2088 2088 if self.processingHeaderObj.flag_cspc:
2089 2089 cspc = numpy.transpose( cspc, (0,2,1) )
2090 2090 self.data_cspc = cspc['real'] + cspc['imag']*1j
2091 2091 else:
2092 2092 self.data_cspc = None
2093 2093
2094 2094 if self.processingHeaderObj.flag_dc:
2095 2095 self.data_dc = dc['real'] + dc['imag']*1j
2096 2096 else:
2097 2097 self.data_dc = None
2098 2098
2099 2099 self.flagIsNewFile = 0
2100 2100 self.flagIsNewBlock = 1
2101 2101
2102 2102 self.nTotalBlocks += 1
2103 2103 self.nReadBlocks += 1
2104 2104
2105 2105 return 1
2106 2106
2107 2107 def getFirstHeader(self):
2108 2108
2109 2109 self.dataOut.dtype = self.dtype
2110 2110
2111 2111 self.dataOut.nPairs = self.nRdPairs
2112 2112
2113 2113 self.dataOut.pairsList = self.rdPairList
2114 2114
2115 2115 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
2116 2116
2117 2117 self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
2118 2118
2119 2119 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
2120 2120
2121 2121 self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
2122 2122
2123 2123 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
2124 2124
2125 2125 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
2126 2126
2127 2127 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
2128 2128
2129 2129 self.dataOut.ippSeconds = self.ippSeconds
2130 2130
2131 2131 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt * self.processingHeaderObj.nIncohInt * self.dataOut.nFFTPoints
2132 2132
2133 2133 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
2134 2134
2135 2135 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
2136 2136
2137 2137 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
2138 2138
2139 2139 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
2140 2140
2141 2141 self.dataOut.flagDeflipData = True #asumo q la data no esta sin flip
2142 2142
2143 2143 if self.processingHeaderObj.code != None:
2144 2144
2145 2145 self.dataOut.nCode = self.processingHeaderObj.nCode
2146 2146
2147 2147 self.dataOut.nBaud = self.processingHeaderObj.nBaud
2148 2148
2149 2149 self.dataOut.code = self.processingHeaderObj.code
2150 2150
2151 2151 self.dataOut.flagDecodeData = True
2152 2152
2153 2153 def getData(self):
2154 2154 """
2155 2155 Copia el buffer de lectura a la clase "Spectra",
2156 2156 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
2157 2157 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
2158 2158
2159 2159 Return:
2160 2160 0 : Si no hay mas archivos disponibles
2161 2161 1 : Si hizo una buena copia del buffer
2162 2162
2163 2163 Affected:
2164 2164 self.dataOut
2165 2165
2166 2166 self.flagTimeBlock
2167 2167 self.flagIsNewBlock
2168 2168 """
2169 2169
2170 2170 if self.flagNoMoreFiles:
2171 2171 self.dataOut.flagNoData = True
2172 2172 print 'Process finished'
2173 2173 return 0
2174 2174
2175 2175 self.flagTimeBlock = 0
2176 2176 self.flagIsNewBlock = 0
2177 2177
2178 2178 if self.__hasNotDataInBuffer():
2179 2179
2180 2180 if not( self.readNextBlock() ):
2181 2181 self.dataOut.flagNoData = True
2182 2182 return 0
2183 2183
2184 2184 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
2185 2185
2186 2186 if self.data_dc == None:
2187 2187 self.dataOut.flagNoData = True
2188 2188 return 0
2189 2189
2190 2190 self.getBasicHeader()
2191 2191
2192 2192 self.getFirstHeader()
2193 2193
2194 2194 self.dataOut.data_spc = self.data_spc
2195 2195
2196 2196 self.dataOut.data_cspc = self.data_cspc
2197 2197
2198 2198 self.dataOut.data_dc = self.data_dc
2199 2199
2200 2200 self.dataOut.flagNoData = False
2201 2201
2202 2202 return self.dataOut.data_spc
2203 2203
2204 2204
2205 2205 class SpectraWriter(JRODataWriter):
2206 2206
2207 2207 """
2208 2208 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
2209 2209 de los datos siempre se realiza por bloques.
2210 2210 """
2211 2211
2212 2212 ext = ".pdata"
2213 2213
2214 2214 optchar = "P"
2215 2215
2216 2216 shape_spc_Buffer = None
2217 2217
2218 2218 shape_cspc_Buffer = None
2219 2219
2220 2220 shape_dc_Buffer = None
2221 2221
2222 2222 data_spc = None
2223 2223
2224 2224 data_cspc = None
2225 2225
2226 2226 data_dc = None
2227 2227
2228 2228 # dataOut = None
2229 2229
2230 2230 def __init__(self):
2231 2231 """
2232 2232 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
2233 2233
2234 2234 Affected:
2235 2235 self.dataOut
2236 2236 self.basicHeaderObj
2237 2237 self.systemHeaderObj
2238 2238 self.radarControllerHeaderObj
2239 2239 self.processingHeaderObj
2240 2240
2241 2241 Return: None
2242 2242 """
2243 2243
2244 2244 self.isConfig = False
2245 2245
2246 2246 self.nTotalBlocks = 0
2247 2247
2248 2248 self.data_spc = None
2249 2249
2250 2250 self.data_cspc = None
2251 2251
2252 2252 self.data_dc = None
2253 2253
2254 2254 self.fp = None
2255 2255
2256 2256 self.flagIsNewFile = 1
2257 2257
2258 2258 self.nTotalBlocks = 0
2259 2259
2260 2260 self.flagIsNewBlock = 0
2261 2261
2262 2262 self.setFile = None
2263 2263
2264 2264 self.dtype = None
2265 2265
2266 2266 self.path = None
2267 2267
2268 2268 self.noMoreFiles = 0
2269 2269
2270 2270 self.filename = None
2271 2271
2272 2272 self.basicHeaderObj = BasicHeader(LOCALTIME)
2273 2273
2274 2274 self.systemHeaderObj = SystemHeader()
2275 2275
2276 2276 self.radarControllerHeaderObj = RadarControllerHeader()
2277 2277
2278 2278 self.processingHeaderObj = ProcessingHeader()
2279 2279
2280 2280
2281 2281 def hasAllDataInBuffer(self):
2282 2282 return 1
2283 2283
2284 2284
2285 2285 def setBlockDimension(self):
2286 2286 """
2287 2287 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
2288 2288
2289 2289 Affected:
2290 2290 self.shape_spc_Buffer
2291 2291 self.shape_cspc_Buffer
2292 2292 self.shape_dc_Buffer
2293 2293
2294 2294 Return: None
2295 2295 """
2296 2296 self.shape_spc_Buffer = (self.dataOut.nChannels,
2297 2297 self.processingHeaderObj.nHeights,
2298 2298 self.processingHeaderObj.profilesPerBlock)
2299 2299
2300 2300 self.shape_cspc_Buffer = (self.dataOut.nPairs,
2301 2301 self.processingHeaderObj.nHeights,
2302 2302 self.processingHeaderObj.profilesPerBlock)
2303 2303
2304 2304 self.shape_dc_Buffer = (self.dataOut.nChannels,
2305 2305 self.processingHeaderObj.nHeights)
2306 2306
2307 2307
2308 2308 def writeBlock(self):
2309 2309 """
2310 2310 Escribe el buffer en el file designado
2311 2311
2312 2312 Affected:
2313 2313 self.data_spc
2314 2314 self.data_cspc
2315 2315 self.data_dc
2316 2316 self.flagIsNewFile
2317 2317 self.flagIsNewBlock
2318 2318 self.nTotalBlocks
2319 2319 self.nWriteBlocks
2320 2320
2321 2321 Return: None
2322 2322 """
2323 2323
2324 2324 spc = numpy.transpose( self.data_spc, (0,2,1) )
2325 2325 if not( self.processingHeaderObj.shif_fft ):
2326 2326 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2327 2327 data = spc.reshape((-1))
2328 2328 data = data.astype(self.dtype[0])
2329 2329 data.tofile(self.fp)
2330 2330
2331 2331 if self.data_cspc != None:
2332 2332 data = numpy.zeros( self.shape_cspc_Buffer, self.dtype )
2333 2333 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
2334 2334 if not( self.processingHeaderObj.shif_fft ):
2335 2335 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2336 2336 data['real'] = cspc.real
2337 2337 data['imag'] = cspc.imag
2338 2338 data = data.reshape((-1))
2339 2339 data.tofile(self.fp)
2340 2340
2341 2341 if self.data_dc != None:
2342 2342 data = numpy.zeros( self.shape_dc_Buffer, self.dtype )
2343 2343 dc = self.data_dc
2344 2344 data['real'] = dc.real
2345 2345 data['imag'] = dc.imag
2346 2346 data = data.reshape((-1))
2347 2347 data.tofile(self.fp)
2348 2348
2349 2349 self.data_spc.fill(0)
2350 2350 self.data_dc.fill(0)
2351 2351 if self.data_cspc != None:
2352 2352 self.data_cspc.fill(0)
2353 2353
2354 2354 self.flagIsNewFile = 0
2355 2355 self.flagIsNewBlock = 1
2356 2356 self.nTotalBlocks += 1
2357 2357 self.nWriteBlocks += 1
2358 2358 self.blockIndex += 1
2359 2359
2360 2360
2361 2361 def putData(self):
2362 2362 """
2363 2363 Setea un bloque de datos y luego los escribe en un file
2364 2364
2365 2365 Affected:
2366 2366 self.data_spc
2367 2367 self.data_cspc
2368 2368 self.data_dc
2369 2369
2370 2370 Return:
2371 2371 0 : Si no hay data o no hay mas files que puedan escribirse
2372 2372 1 : Si se escribio la data de un bloque en un file
2373 2373 """
2374 2374
2375 2375 if self.dataOut.flagNoData:
2376 2376 return 0
2377 2377
2378 2378 self.flagIsNewBlock = 0
2379 2379
2380 2380 if self.dataOut.flagTimeBlock:
2381 2381 self.data_spc.fill(0)
2382 2382 self.data_cspc.fill(0)
2383 2383 self.data_dc.fill(0)
2384 2384 self.setNextFile()
2385 2385
2386 2386 if self.flagIsNewFile == 0:
2387 2387 self.setBasicHeader()
2388 2388
2389 2389 self.data_spc = self.dataOut.data_spc.copy()
2390 2390 self.data_cspc = self.dataOut.data_cspc.copy()
2391 2391 self.data_dc = self.dataOut.data_dc.copy()
2392 2392
2393 2393 # #self.processingHeaderObj.dataBlocksPerFile)
2394 2394 if self.hasAllDataInBuffer():
2395 2395 # self.setFirstHeader()
2396 2396 self.writeNextBlock()
2397 2397
2398 2398 return 1
2399 2399
2400 2400
2401 2401 def __getProcessFlags(self):
2402 2402
2403 2403 processFlags = 0
2404 2404
2405 2405 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2406 2406 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2407 2407 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2408 2408 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2409 2409 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2410 2410 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2411 2411
2412 2412 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2413 2413
2414 2414
2415 2415
2416 2416 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
2417 2417 PROCFLAG.DATATYPE_SHORT,
2418 2418 PROCFLAG.DATATYPE_LONG,
2419 2419 PROCFLAG.DATATYPE_INT64,
2420 2420 PROCFLAG.DATATYPE_FLOAT,
2421 2421 PROCFLAG.DATATYPE_DOUBLE]
2422 2422
2423 2423
2424 2424 for index in range(len(dtypeList)):
2425 2425 if self.dataOut.dtype == dtypeList[index]:
2426 2426 dtypeValue = datatypeValueList[index]
2427 2427 break
2428 2428
2429 2429 processFlags += dtypeValue
2430 2430
2431 2431 if self.dataOut.flagDecodeData:
2432 2432 processFlags += PROCFLAG.DECODE_DATA
2433 2433
2434 2434 if self.dataOut.flagDeflipData:
2435 2435 processFlags += PROCFLAG.DEFLIP_DATA
2436 2436
2437 2437 if self.dataOut.code != None:
2438 2438 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
2439 2439
2440 2440 if self.dataOut.nIncohInt > 1:
2441 2441 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
2442 2442
2443 2443 if self.dataOut.data_dc != None:
2444 2444 processFlags += PROCFLAG.SAVE_CHANNELS_DC
2445 2445
2446 2446 return processFlags
2447 2447
2448 2448
2449 2449 def __getBlockSize(self):
2450 2450 '''
2451 2451 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
2452 2452 '''
2453 2453
2454 2454 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2455 2455 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2456 2456 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2457 2457 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2458 2458 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2459 2459 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2460 2460
2461 2461 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2462 2462 datatypeValueList = [1,2,4,8,4,8]
2463 2463 for index in range(len(dtypeList)):
2464 2464 if self.dataOut.dtype == dtypeList[index]:
2465 2465 datatypeValue = datatypeValueList[index]
2466 2466 break
2467 2467
2468 2468
2469 2469 pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints
2470 2470
2471 2471 pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write)
2472 2472 blocksize = (pts2write_SelfSpectra*datatypeValue)
2473 2473
2474 2474 if self.dataOut.data_cspc != None:
2475 2475 pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write)
2476 2476 blocksize += (pts2write_CrossSpectra*datatypeValue*2)
2477 2477
2478 2478 if self.dataOut.data_dc != None:
2479 2479 pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights)
2480 2480 blocksize += (pts2write_DCchannels*datatypeValue*2)
2481 2481
2482 2482 blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
2483 2483
2484 2484 return blocksize
2485 2485
2486 2486 def setFirstHeader(self):
2487 2487
2488 2488 """
2489 2489 Obtiene una copia del First Header
2490 2490
2491 2491 Affected:
2492 2492 self.systemHeaderObj
2493 2493 self.radarControllerHeaderObj
2494 2494 self.dtype
2495 2495
2496 2496 Return:
2497 2497 None
2498 2498 """
2499 2499
2500 2500 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
2501 2501 self.systemHeaderObj.nChannels = self.dataOut.nChannels
2502 2502 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
2503 2503
2504 2504 self.setBasicHeader()
2505 2505
2506 2506 processingHeaderSize = 40 # bytes
2507 2507 self.processingHeaderObj.dtype = 1 # Spectra
2508 2508 self.processingHeaderObj.blockSize = self.__getBlockSize()
2509 2509 self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints
2510 2510 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
2511 2511 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
2512 2512 self.processingHeaderObj.processFlags = self.__getProcessFlags()
2513 2513 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval
2514 2514 self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt
2515 2515 self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
2516 2516 self.processingHeaderObj.shif_fft = self.dataOut.flagShiftFFT
2517 2517
2518 2518 if self.processingHeaderObj.totalSpectra > 0:
2519 2519 channelList = []
2520 2520 for channel in range(self.dataOut.nChannels):
2521 2521 channelList.append(channel)
2522 2522 channelList.append(channel)
2523 2523
2524 2524 pairsList = []
2525 2525 for pair in self.dataOut.pairsList:
2526 2526 pairsList.append(pair[0])
2527 2527 pairsList.append(pair[1])
2528 2528 spectraComb = channelList + pairsList
2529 2529 spectraComb = numpy.array(spectraComb,dtype="u1")
2530 2530 self.processingHeaderObj.spectraComb = spectraComb
2531 2531 sizeOfSpcComb = len(spectraComb)
2532 2532 processingHeaderSize += sizeOfSpcComb
2533 2533
2534 2534 # The processing header should not have information about code
2535 2535 # if self.dataOut.code != None:
2536 2536 # self.processingHeaderObj.code = self.dataOut.code
2537 2537 # self.processingHeaderObj.nCode = self.dataOut.nCode
2538 2538 # self.processingHeaderObj.nBaud = self.dataOut.nBaud
2539 2539 # nCodeSize = 4 # bytes
2540 2540 # nBaudSize = 4 # bytes
2541 2541 # codeSize = 4 # bytes
2542 2542 # sizeOfCode = int(nCodeSize + nBaudSize + codeSize * self.dataOut.nCode * self.dataOut.nBaud)
2543 2543 # processingHeaderSize += sizeOfCode
2544 2544
2545 2545 if self.processingHeaderObj.nWindows != 0:
2546 2546 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
2547 2547 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
2548 2548 self.processingHeaderObj.nHeights = self.dataOut.nHeights
2549 2549 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
2550 2550 sizeOfFirstHeight = 4
2551 2551 sizeOfdeltaHeight = 4
2552 2552 sizeOfnHeights = 4
2553 2553 sizeOfWindows = (sizeOfFirstHeight + sizeOfdeltaHeight + sizeOfnHeights)*self.processingHeaderObj.nWindows
2554 2554 processingHeaderSize += sizeOfWindows
2555 2555
2556 2556 self.processingHeaderObj.size = processingHeaderSize
2557 2557
2558 2558 class SpectraHeisWriter(Operation):
2559 2559 # set = None
2560 2560 setFile = None
2561 2561 idblock = None
2562 2562 doypath = None
2563 2563 subfolder = None
2564 2564
2565 2565 def __init__(self):
2566 2566 self.wrObj = FITS()
2567 2567 # self.dataOut = dataOut
2568 2568 self.nTotalBlocks=0
2569 2569 # self.set = None
2570 2570 self.setFile = None
2571 2571 self.idblock = 0
2572 2572 self.wrpath = None
2573 2573 self.doypath = None
2574 2574 self.subfolder = None
2575 2575 self.isConfig = False
2576 2576
2577 2577 def isNumber(str):
2578 2578 """
2579 2579 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
2580 2580
2581 2581 Excepciones:
2582 2582 Si un determinado string no puede ser convertido a numero
2583 2583 Input:
2584 2584 str, string al cual se le analiza para determinar si convertible a un numero o no
2585 2585
2586 2586 Return:
2587 2587 True : si el string es uno numerico
2588 2588 False : no es un string numerico
2589 2589 """
2590 2590 try:
2591 2591 float( str )
2592 2592 return True
2593 2593 except:
2594 2594 return False
2595 2595
2596 2596 def setup(self, dataOut, wrpath):
2597 2597
2598 2598 if not(os.path.exists(wrpath)):
2599 2599 os.mkdir(wrpath)
2600 2600
2601 2601 self.wrpath = wrpath
2602 2602 # self.setFile = 0
2603 2603 self.dataOut = dataOut
2604 2604
2605 2605 def putData(self):
2606 2606 name= time.localtime( self.dataOut.utctime)
2607 2607 ext=".fits"
2608 2608
2609 2609 if self.doypath == None:
2610 2610 self.subfolder = 'F%4.4d%3.3d_%d' % (name.tm_year,name.tm_yday,time.mktime(datetime.datetime.now().timetuple()))
2611 2611 self.doypath = os.path.join( self.wrpath, self.subfolder )
2612 2612 os.mkdir(self.doypath)
2613 2613
2614 2614 if self.setFile == None:
2615 2615 # self.set = self.dataOut.set
2616 2616 self.setFile = 0
2617 2617 # if self.set != self.dataOut.set:
2618 2618 ## self.set = self.dataOut.set
2619 2619 # self.setFile = 0
2620 2620
2621 2621 #make the filename
2622 2622 file = 'D%4.4d%3.3d_%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext)
2623 2623
2624 2624 filename = os.path.join(self.wrpath,self.subfolder, file)
2625 2625
2626 2626 idblock = numpy.array([self.idblock],dtype="int64")
2627 2627 header=self.wrObj.cFImage(idblock=idblock,
2628 2628 year=time.gmtime(self.dataOut.utctime).tm_year,
2629 2629 month=time.gmtime(self.dataOut.utctime).tm_mon,
2630 2630 day=time.gmtime(self.dataOut.utctime).tm_mday,
2631 2631 hour=time.gmtime(self.dataOut.utctime).tm_hour,
2632 2632 minute=time.gmtime(self.dataOut.utctime).tm_min,
2633 2633 second=time.gmtime(self.dataOut.utctime).tm_sec)
2634 2634
2635 2635 c=3E8
2636 2636 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
2637 2637 freq=numpy.arange(-1*self.dataOut.nHeights/2.,self.dataOut.nHeights/2.)*(c/(2*deltaHeight*1000))
2638 2638
2639 2639 colList = []
2640 2640
2641 2641 colFreq=self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints)+'E', array=freq)
2642 2642
2643 2643 colList.append(colFreq)
2644 2644
2645 2645 nchannel=self.dataOut.nChannels
2646 2646
2647 2647 for i in range(nchannel):
2648 2648 col = self.wrObj.writeData(name="PCh"+str(i+1),
2649 2649 format=str(self.dataOut.nFFTPoints)+'E',
2650 2650 data=10*numpy.log10(self.dataOut.data_spc[i,:]))
2651 2651
2652 2652 colList.append(col)
2653 2653
2654 2654 data=self.wrObj.Ctable(colList=colList)
2655 2655
2656 2656 self.wrObj.CFile(header,data)
2657 2657
2658 2658 self.wrObj.wFile(filename)
2659 2659
2660 2660 #update the setFile
2661 2661 self.setFile += 1
2662 2662 self.idblock += 1
2663 2663
2664 2664 return 1
2665 2665
2666 2666 def run(self, dataOut, **kwargs):
2667 2667
2668 2668 if not(self.isConfig):
2669 2669
2670 2670 self.setup(dataOut, **kwargs)
2671 2671 self.isConfig = True
2672 2672
2673 2673 self.putData()
2674 2674
2675 2675
2676 2676 class FITS:
2677 2677 name=None
2678 2678 format=None
2679 2679 array =None
2680 2680 data =None
2681 2681 thdulist=None
2682 2682 prihdr=None
2683 2683 hdu=None
2684 2684
2685 2685 def __init__(self):
2686 2686
2687 2687 pass
2688 2688
2689 2689 def setColF(self,name,format,array):
2690 2690 self.name=name
2691 2691 self.format=format
2692 2692 self.array=array
2693 2693 a1=numpy.array([self.array],dtype=numpy.float32)
2694 2694 self.col1 = pyfits.Column(name=self.name, format=self.format, array=a1)
2695 2695 return self.col1
2696 2696
2697 2697 # def setColP(self,name,format,data):
2698 2698 # self.name=name
2699 2699 # self.format=format
2700 2700 # self.data=data
2701 2701 # a2=numpy.array([self.data],dtype=numpy.float32)
2702 2702 # self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2703 2703 # return self.col2
2704 2704
2705 2705
2706 2706 def writeData(self,name,format,data):
2707 2707 self.name=name
2708 2708 self.format=format
2709 2709 self.data=data
2710 2710 a2=numpy.array([self.data],dtype=numpy.float32)
2711 2711 self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2712 2712 return self.col2
2713 2713
2714 2714 def cFImage(self,idblock,year,month,day,hour,minute,second):
2715 2715 self.hdu= pyfits.PrimaryHDU(idblock)
2716 2716 self.hdu.header.set("Year",year)
2717 2717 self.hdu.header.set("Month",month)
2718 2718 self.hdu.header.set("Day",day)
2719 2719 self.hdu.header.set("Hour",hour)
2720 2720 self.hdu.header.set("Minute",minute)
2721 2721 self.hdu.header.set("Second",second)
2722 2722 return self.hdu
2723 2723
2724 2724
2725 2725 def Ctable(self,colList):
2726 2726 self.cols=pyfits.ColDefs(colList)
2727 2727 self.tbhdu = pyfits.new_table(self.cols)
2728 2728 return self.tbhdu
2729 2729
2730 2730
2731 2731 def CFile(self,hdu,tbhdu):
2732 2732 self.thdulist=pyfits.HDUList([hdu,tbhdu])
2733 2733
2734 2734 def wFile(self,filename):
2735 2735 if os.path.isfile(filename):
2736 2736 os.remove(filename)
2737 2737 self.thdulist.writeto(filename)
2738
2739
2740 class ParameterConf:
2741 ELEMENTNAME = 'Parameter'
2742 def __init__(self):
2743 self.name = ''
2744 self.value = ''
2745
2746 def readXml(self, parmElement):
2747 self.name = parmElement.get('name')
2748 self.value = parmElement.get('value')
2749
2750 def getElementName(self):
2751 return self.ELEMENTNAME
2752
2753 class Metadata:
2754
2755 def __init__(self, filename):
2756 self.parmConfObjList = []
2757 self.readXml(filename)
2758
2759 def readXml(self, filename):
2760 self.projectElement = None
2761 self.procUnitConfObjDict = {}
2762 self.projectElement = ElementTree().parse(filename)
2763 self.project = self.projectElement.tag
2764
2765 parmElementList = self.projectElement.getiterator(ParameterConf().getElementName())
2766
2767 for parmElement in parmElementList:
2768 parmConfObj = ParameterConf()
2769 parmConfObj.readXml(parmElement)
2770 self.parmConfObjList.append(parmConfObj)
2771
2772 class FitsWriter(Operation):
2773
2774 def __init__(self):
2775 self.isConfig = False
2776 self.dataBlocksPerFile = None
2777 self.blockIndex = 0
2778 self.flagIsNewFile = 1
2779 self.fitsObj = None
2780 self.optchar = 'P'
2781 self.ext = '.fits'
2782 self.setFile = 0
2783
2784 def setFitsHeader(self, dataOut, metadatafile):
2785
2786 header_data = pyfits.PrimaryHDU()
2787
2788 metadata4fits = Metadata(metadatafile)
2789 for parameter in metadata4fits.parmConfObjList:
2790 parm_name = parameter.name
2791 parm_value = parameter.value
2792
2793 if parm_value == 'fromdatadatetime':
2794 value = time.strftime("%b %d %Y %H:%M:%S", dataOut.datatime.timetuple())
2795 elif parm_value == 'fromdataheights':
2796 value = dataOut.nHeights
2797 elif parm_value == 'fromdatachannel':
2798 value = dataOut.nChannels
2799 elif parm_value == 'fromdatasamples':
2800 value = dataOut.nFFTPoints
2801 else:
2802 value = parm_value
2803
2804 header_data.header[parm_name] = value
2805
2806 header_data.header['NBLOCK'] = self.blockIndex
2807
2808 header_data.writeto(self.filename)
2809
2810
2811 def setup(self, dataOut, path, dataBlocksPerFile, metadatafile):
2812
2813 self.path = path
2814 self.dataOut = dataOut
2815 self.metadatafile = metadatafile
2816 self.dataBlocksPerFile = dataBlocksPerFile
2817
2818 def open(self):
2819 self.fitsObj = pyfits.open(self.filename, mode='update')
2820
2821
2822 def addData(self, data):
2823 self.open()
2824 extension = pyfits.ImageHDU(data=data, name=self.fitsObj[0].header['DATA'])
2825 extension.header['UTCTIME'] = self.dataOut.utctime
2826 self.fitsObj.append(extension)
2827 self.blockIndex += 1
2828 self.fitsObj[0].header['NBLOCK'] = self.blockIndex
2829
2830 self.write()
2831
2832 def write(self):
2833
2834 self.fitsObj.flush(verbose=True)
2835 self.fitsObj.close()
2836
2837
2838 def setNextFile(self):
2839
2840 ext = self.ext
2841 path = self.path
2842
2843 timeTuple = time.localtime( self.dataOut.utctime)
2844 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
2845
2846 fullpath = os.path.join( path, subfolder )
2847 if not( os.path.exists(fullpath) ):
2848 os.mkdir(fullpath)
2849 self.setFile = -1 #inicializo mi contador de seteo
2850 else:
2851 filesList = os.listdir( fullpath )
2852 if len( filesList ) > 0:
2853 filesList = sorted( filesList, key=str.lower )
2854 filen = filesList[-1]
2855
2856 if isNumber( filen[8:11] ):
2857 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
2858 else:
2859 self.setFile = -1
2860 else:
2861 self.setFile = -1 #inicializo mi contador de seteo
2862
2863 setFile = self.setFile
2864 setFile += 1
2865
2866 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
2867 timeTuple.tm_year,
2868 timeTuple.tm_yday,
2869 setFile,
2870 ext )
2871
2872 filename = os.path.join( path, subfolder, file )
2873
2874 self.blockIndex = 0
2875 self.filename = filename
2876 self.setFile = setFile
2877 self.flagIsNewFile = 1
2878
2879 print 'Writing the file: %s'%self.filename
2880
2881 self.setFitsHeader(self.dataOut, self.metadatafile)
2882
2883 return 1
2884
2885 def writeBlock(self):
2886 self.addData(self.dataOut.data_spc)
2887 self.flagIsNewFile = 0
2888
2889
2890 def __setNewBlock(self):
2891
2892 if self.flagIsNewFile:
2893 return 1
2894
2895 if self.blockIndex < self.dataBlocksPerFile:
2896 return 1
2897
2898 if not( self.setNextFile() ):
2899 return 0
2900
2901 return 1
2902
2903 def writeNextBlock(self):
2904 if not( self.__setNewBlock() ):
2905 return 0
2906 self.writeBlock()
2907 return 1
2908
2909 def putData(self):
2910 if self.flagIsNewFile:
2911 self.setNextFile()
2912 self.writeNextBlock()
2913
2914 def run(self, dataOut, **kwargs):
2915 if not(self.isConfig):
2916 self.setup(dataOut, **kwargs)
2917 self.isConfig = True
2918 self.putData()
2919
2920
@@ -1,76 +1,82
1 1 import os, sys
2 2
3 3 path = os.path.split(os.getcwd())[0]
4 4 sys.path.append(path)
5 5
6 6 from controller import *
7 7
8 8 desc = "Sun Experiment Test"
9 9 filename = "sunexp.xml"
10 10
11 11 controllerObj = Project()
12 12
13 13 controllerObj.setup(id = '191', name='test01', description=desc)
14 14 #/Users/dsuarez/Documents/RadarData/SunExperiment
15 15 #/Volumes/data_e/PaseDelSol/Raw/100KHZ
16 16 readUnitConfObj = controllerObj.addReadUnit(datatype='Voltage',
17 17 path='/Users/dsuarez/Documents/RadarData/SunExperiment',
18 18 startDate='2013/02/06',
19 19 endDate='2013/12/31',
20 20 startTime='00:30:00',
21 21 endTime='17:40:59',
22 22 online=0,
23 23 delay=3,
24 24 walk=1)
25 25
26 26 procUnitConfObj0 = controllerObj.addProcUnit(datatype='Voltage', inputId=readUnitConfObj.getId())
27 27
28 28 procUnitConfObj1 = controllerObj.addProcUnit(datatype='SpectraHeis', inputId=procUnitConfObj0.getId())
29 29
30 30 opObj11 = procUnitConfObj1.addOperation(name='IncohInt4SpectraHeis', optype='other')
31 31 opObj11.addParameter(name='timeInterval', value='5', format='float')
32 32
33 33 opObj11 = procUnitConfObj1.addOperation(name='SpectraHeisScope', optype='other')
34 34 opObj11.addParameter(name='idfigure', value='10', format='int')
35 35 opObj11.addParameter(name='wintitle', value='SpectraHeisPlot', format='str')
36 36 #opObj11.addParameter(name='ymin', value='125', format='int')
37 37 #opObj11.addParameter(name='ymax', value='140', format='int')
38 38 #opObj11.addParameter(name='channelList', value='0,1,2', format='intlist')
39 39 #opObj11.addParameter(name='showprofile', value='1', format='int')
40 opObj11.addParameter(name='save', value='1', format='bool')
41 opObj11.addParameter(name='figfile', value='spc-noise.png', format='str')
42 opObj11.addParameter(name='figpath', value='/Users/dsuarez/Pictures/sun_pics', format='str')
43 opObj11.addParameter(name='ftp', value='1', format='int')
44 opObj11.addParameter(name='ftpratio', value='10', format='int')
40 #opObj11.addParameter(name='save', value='1', format='bool')
41 #opObj11.addParameter(name='figfile', value='spc-noise.png', format='str')
42 #opObj11.addParameter(name='figpath', value='/Users/dsuarez/Pictures/sun_pics', format='str')
43 #opObj11.addParameter(name='ftp', value='1', format='int')
44 #opObj11.addParameter(name='ftpratio', value='10', format='int')
45 45
46 46 opObj11 = procUnitConfObj1.addOperation(name='RTIfromSpectraHeis', optype='other')
47 47 opObj11.addParameter(name='idfigure', value='6', format='int')
48 48 opObj11.addParameter(name='wintitle', value='RTIPLot', format='str')
49 #opObj11.addParameter(name='zmin', value='10', format='int')
50 #opObj11.addParameter(name='zmax', value='40', format='int')
49 opObj11.addParameter(name='xmin', value='11.5', format='float')
50 opObj11.addParameter(name='xmax', value='12.5', format='float')
51 51 opObj11.addParameter(name='ymin', value='60', format='int')
52 52 opObj11.addParameter(name='ymax', value='85', format='int')
53 53 #opObj11.addParameter(name='channelList', value='0,1,2,3', format='intlist')
54 54 #opObj11.addParameter(name='timerange', value='600', format='int')
55 55 #opObj11.addParameter(name='showprofile', value='0', format='int')
56 opObj11.addParameter(name='save', value='1', format='bool')
57 opObj11.addParameter(name='figfile', value='rti-noise.png', format='str')
58 opObj11.addParameter(name='figpath', value='/Users/dsuarez/Pictures/sun_pics', format='str')
59 opObj11.addParameter(name='ftp', value='1', format='int')
60 opObj11.addParameter(name='ftpratio', value='10', format='int')
56 #opObj11.addParameter(name='save', value='1', format='bool')
57 #opObj11.addParameter(name='figfile', value='rti-noise.png', format='str')
58 #opObj11.addParameter(name='figpath', value='/Users/dsuarez/Pictures/sun_pics', format='str')
59 #opObj11.addParameter(name='ftp', value='1', format='int')
60 #opObj11.addParameter(name='ftpratio', value='10', format='int')
61 opObj11.addParameter(name='useLocalTime', value='1', format='bool')
62 opObj11.addParameter(name='timezone', value='300', format='int')
61 63
62 opObj11 = procUnitConfObj1.addOperation(name='SpectraHeisWriter', optype='other')
63 opObj11.addParameter(name='wrpath', value='/Users/dsuarez/Remote', format='str')
64 #opObj11.addParameter(name='blocksPerFile', value='200', format='int')
64 #opObj11 = procUnitConfObj1.addOperation(name='SpectraHeisWriter', optype='other')
65 #opObj11.addParameter(name='wrpath', value='/Users/dsuarez/Remote', format='str')
66 ##opObj11.addParameter(name='blocksPerFile', value='200', format='int')
65 67
68 opObj11 = procUnitConfObj1.addOperation(name='FitsWriter', optype='other')
69 opObj11.addParameter(name='path', value='/Users/dsuarez/Remote', format='str')
70 opObj11.addParameter(name='dataBlocksPerFile', value='10', format='int')
71 opObj11.addParameter(name='metadatafile', value='/Users/dsuarez/Desktop/metadata_fits.xml', format='str')
66 72
67 73 print "Escribiendo el archivo XML"
68 74 controllerObj.writeXml(filename)
69 75 print "Leyendo el archivo XML"
70 76 controllerObj.readXml(filename)
71 77
72 78 controllerObj.createObjects()
73 79 controllerObj.connectObjects()
74 80 controllerObj.run()
75 81
76 82 No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now