##// END OF EJS Templates
Se corrige bug en la escritura de datos de tipo rawdata. Por error, el size del processingHeader estaba considerando los codigos. ...
Daniel Valdez -
r414:b13dc7a68522
parent child
Show More
@@ -1,3378 +1,3378
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JRODataIO.py 169 2012-11-19 21:57:03Z murco $
5 5 '''
6 6
7 7 import os, sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import time, datetime
13 13 from xml.etree.ElementTree import Element, SubElement, ElementTree
14 14 try:
15 15 import pyfits
16 16 except:
17 17 print "pyfits module has not been imported, it should be installed to save files in fits format"
18 18
19 19 from jrodata import *
20 20 from jroheaderIO import *
21 21 from jroprocessing import *
22 22
23 23 LOCALTIME = True #-18000
24 24
25 25 def isNumber(str):
26 26 """
27 27 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
28 28
29 29 Excepciones:
30 30 Si un determinado string no puede ser convertido a numero
31 31 Input:
32 32 str, string al cual se le analiza para determinar si convertible a un numero o no
33 33
34 34 Return:
35 35 True : si el string es uno numerico
36 36 False : no es un string numerico
37 37 """
38 38 try:
39 39 float( str )
40 40 return True
41 41 except:
42 42 return False
43 43
44 44 def isThisFileinRange(filename, startUTSeconds, endUTSeconds):
45 45 """
46 46 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
47 47
48 48 Inputs:
49 49 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
50 50
51 51 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
52 52 segundos contados desde 01/01/1970.
53 53 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
54 54 segundos contados desde 01/01/1970.
55 55
56 56 Return:
57 57 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
58 58 fecha especificado, de lo contrario retorna False.
59 59
60 60 Excepciones:
61 61 Si el archivo no existe o no puede ser abierto
62 62 Si la cabecera no puede ser leida.
63 63
64 64 """
65 65 basicHeaderObj = BasicHeader(LOCALTIME)
66 66
67 67 try:
68 68 fp = open(filename,'rb')
69 69 except:
70 70 raise IOError, "The file %s can't be opened" %(filename)
71 71
72 72 sts = basicHeaderObj.read(fp)
73 73 fp.close()
74 74
75 75 if not(sts):
76 76 print "Skipping the file %s because it has not a valid header" %(filename)
77 77 return 0
78 78
79 79 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
80 80 return 0
81 81
82 82 return 1
83 83
84 84 def isFileinThisTime(filename, startTime, endTime):
85 85 """
86 86 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
87 87
88 88 Inputs:
89 89 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
90 90
91 91 startTime : tiempo inicial del rango seleccionado en formato datetime.time
92 92
93 93 endTime : tiempo final del rango seleccionado en formato datetime.time
94 94
95 95 Return:
96 96 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
97 97 fecha especificado, de lo contrario retorna False.
98 98
99 99 Excepciones:
100 100 Si el archivo no existe o no puede ser abierto
101 101 Si la cabecera no puede ser leida.
102 102
103 103 """
104 104
105 105
106 106 try:
107 107 fp = open(filename,'rb')
108 108 except:
109 109 raise IOError, "The file %s can't be opened" %(filename)
110 110
111 111 basicHeaderObj = BasicHeader(LOCALTIME)
112 112 sts = basicHeaderObj.read(fp)
113 113 fp.close()
114 114
115 115 thisDatetime = basicHeaderObj.datatime
116 116 thisTime = basicHeaderObj.datatime.time()
117 117
118 118 if not(sts):
119 119 print "Skipping the file %s because it has not a valid header" %(filename)
120 120 return None
121 121
122 122 if not ((startTime <= thisTime) and (endTime > thisTime)):
123 123 return None
124 124
125 125 return thisDatetime
126 126
127 127 def getFileFromSet(path,ext,set):
128 128 validFilelist = []
129 129 fileList = os.listdir(path)
130 130
131 131 # 0 1234 567 89A BCDE
132 132 # H YYYY DDD SSS .ext
133 133
134 134 for file in fileList:
135 135 try:
136 136 year = int(file[1:5])
137 137 doy = int(file[5:8])
138 138
139 139
140 140 except:
141 141 continue
142 142
143 143 if (os.path.splitext(file)[-1].lower() != ext.lower()):
144 144 continue
145 145
146 146 validFilelist.append(file)
147 147
148 148 myfile = fnmatch.filter(validFilelist,'*%4.4d%3.3d%3.3d*'%(year,doy,set))
149 149
150 150 if len(myfile)!= 0:
151 151 return myfile[0]
152 152 else:
153 153 filename = '*%4.4d%3.3d%3.3d%s'%(year,doy,set,ext.lower())
154 154 print 'the filename %s does not exist'%filename
155 155 print '...going to the last file: '
156 156
157 157 if validFilelist:
158 158 validFilelist = sorted( validFilelist, key=str.lower )
159 159 return validFilelist[-1]
160 160
161 161 return None
162 162
163 163
164 164 def getlastFileFromPath(path, ext):
165 165 """
166 166 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
167 167 al final de la depuracion devuelve el ultimo file de la lista que quedo.
168 168
169 169 Input:
170 170 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
171 171 ext : extension de los files contenidos en una carpeta
172 172
173 173 Return:
174 174 El ultimo file de una determinada carpeta, no se considera el path.
175 175 """
176 176 validFilelist = []
177 177 fileList = os.listdir(path)
178 178
179 179 # 0 1234 567 89A BCDE
180 180 # H YYYY DDD SSS .ext
181 181
182 182 for file in fileList:
183 183 try:
184 184 year = int(file[1:5])
185 185 doy = int(file[5:8])
186 186
187 187
188 188 except:
189 189 continue
190 190
191 191 if (os.path.splitext(file)[-1].lower() != ext.lower()):
192 192 continue
193 193
194 194 validFilelist.append(file)
195 195
196 196 if validFilelist:
197 197 validFilelist = sorted( validFilelist, key=str.lower )
198 198 return validFilelist[-1]
199 199
200 200 return None
201 201
202 202 def checkForRealPath(path, foldercounter, year, doy, set, ext):
203 203 """
204 204 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
205 205 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
206 206 el path exacto de un determinado file.
207 207
208 208 Example :
209 209 nombre correcto del file es .../.../D2009307/P2009307367.ext
210 210
211 211 Entonces la funcion prueba con las siguientes combinaciones
212 212 .../.../y2009307367.ext
213 213 .../.../Y2009307367.ext
214 214 .../.../x2009307/y2009307367.ext
215 215 .../.../x2009307/Y2009307367.ext
216 216 .../.../X2009307/y2009307367.ext
217 217 .../.../X2009307/Y2009307367.ext
218 218 siendo para este caso, la ultima combinacion de letras, identica al file buscado
219 219
220 220 Return:
221 221 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
222 222 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
223 223 para el filename
224 224 """
225 225 fullfilename = None
226 226 find_flag = False
227 227 filename = None
228 228
229 229 prefixDirList = [None,'d','D']
230 230 if ext.lower() == ".r": #voltage
231 231 prefixFileList = ['d','D']
232 232 elif ext.lower() == ".pdata": #spectra
233 233 prefixFileList = ['p','P']
234 234 else:
235 235 return None, filename
236 236
237 237 #barrido por las combinaciones posibles
238 238 for prefixDir in prefixDirList:
239 239 thispath = path
240 240 if prefixDir != None:
241 241 #formo el nombre del directorio xYYYYDDD (x=d o x=D)
242 242 if foldercounter == 0:
243 243 thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy ))
244 244 else:
245 245 thispath = os.path.join(path, "%s%04d%03d_%02d" % ( prefixDir, year, doy , foldercounter))
246 246 for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D"
247 247 filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
248 248 fullfilename = os.path.join( thispath, filename ) #formo el path completo
249 249
250 250 if os.path.exists( fullfilename ): #verifico que exista
251 251 find_flag = True
252 252 break
253 253 if find_flag:
254 254 break
255 255
256 256 if not(find_flag):
257 257 return None, filename
258 258
259 259 return fullfilename, filename
260 260
261 261 def isDoyFolder(folder):
262 262 try:
263 263 year = int(folder[1:5])
264 264 except:
265 265 return 0
266 266
267 267 try:
268 268 doy = int(folder[5:8])
269 269 except:
270 270 return 0
271 271
272 272 return 1
273 273
274 274 class JRODataIO:
275 275
276 276 c = 3E8
277 277
278 278 isConfig = False
279 279
280 280 basicHeaderObj = BasicHeader(LOCALTIME)
281 281
282 282 systemHeaderObj = SystemHeader()
283 283
284 284 radarControllerHeaderObj = RadarControllerHeader()
285 285
286 286 processingHeaderObj = ProcessingHeader()
287 287
288 288 online = 0
289 289
290 290 dtype = None
291 291
292 292 pathList = []
293 293
294 294 filenameList = []
295 295
296 296 filename = None
297 297
298 298 ext = None
299 299
300 300 flagIsNewFile = 1
301 301
302 302 flagTimeBlock = 0
303 303
304 304 flagIsNewBlock = 0
305 305
306 306 fp = None
307 307
308 308 firstHeaderSize = 0
309 309
310 310 basicHeaderSize = 24
311 311
312 312 versionFile = 1103
313 313
314 314 fileSize = None
315 315
316 316 ippSeconds = None
317 317
318 318 fileSizeByHeader = None
319 319
320 320 fileIndex = None
321 321
322 322 profileIndex = None
323 323
324 324 blockIndex = None
325 325
326 326 nTotalBlocks = None
327 327
328 328 maxTimeStep = 30
329 329
330 330 lastUTTime = None
331 331
332 332 datablock = None
333 333
334 334 dataOut = None
335 335
336 336 blocksize = None
337 337
338 338 def __init__(self):
339 339
340 340 raise ValueError, "Not implemented"
341 341
342 342 def run(self):
343 343
344 344 raise ValueError, "Not implemented"
345 345
346 346 def getOutput(self):
347 347
348 348 return self.dataOut
349 349
350 350 class JRODataReader(JRODataIO, ProcessingUnit):
351 351
352 352 nReadBlocks = 0
353 353
354 354 delay = 10 #number of seconds waiting a new file
355 355
356 356 nTries = 3 #quantity tries
357 357
358 358 nFiles = 3 #number of files for searching
359 359
360 360 path = None
361 361
362 362 foldercounter = 0
363 363
364 364 flagNoMoreFiles = 0
365 365
366 366 datetimeList = []
367 367
368 368 __isFirstTimeOnline = 1
369 369
370 370 __printInfo = True
371 371
372 372 profileIndex = None
373 373
374 374 def __init__(self):
375 375
376 376 """
377 377
378 378 """
379 379
380 380 raise ValueError, "This method has not been implemented"
381 381
382 382
383 383 def createObjByDefault(self):
384 384 """
385 385
386 386 """
387 387 raise ValueError, "This method has not been implemented"
388 388
389 389 def getBlockDimension(self):
390 390
391 391 raise ValueError, "No implemented"
392 392
393 393 def __searchFilesOffLine(self,
394 394 path,
395 395 startDate,
396 396 endDate,
397 397 startTime=datetime.time(0,0,0),
398 398 endTime=datetime.time(23,59,59),
399 399 set=None,
400 400 expLabel='',
401 401 ext='.r',
402 402 walk=True):
403 403
404 404 pathList = []
405 405
406 406 if not walk:
407 407 pathList.append(path)
408 408
409 409 else:
410 410 dirList = []
411 411 for thisPath in os.listdir(path):
412 412 if not os.path.isdir(os.path.join(path,thisPath)):
413 413 continue
414 414 if not isDoyFolder(thisPath):
415 415 continue
416 416
417 417 dirList.append(thisPath)
418 418
419 419 if not(dirList):
420 420 return None, None
421 421
422 422 thisDate = startDate
423 423
424 424 while(thisDate <= endDate):
425 425 year = thisDate.timetuple().tm_year
426 426 doy = thisDate.timetuple().tm_yday
427 427
428 428 matchlist = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy) + '*')
429 429 if len(matchlist) == 0:
430 430 thisDate += datetime.timedelta(1)
431 431 continue
432 432 for match in matchlist:
433 433 pathList.append(os.path.join(path,match,expLabel))
434 434
435 435 thisDate += datetime.timedelta(1)
436 436
437 437 if pathList == []:
438 438 print "Any folder was found for the date range: %s-%s" %(startDate, endDate)
439 439 return None, None
440 440
441 441 print "%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate)
442 442
443 443 filenameList = []
444 444 datetimeList = []
445 445
446 446 for i in range(len(pathList)):
447 447
448 448 thisPath = pathList[i]
449 449
450 450 fileList = glob.glob1(thisPath, "*%s" %ext)
451 451 fileList.sort()
452 452
453 453 for file in fileList:
454 454
455 455 filename = os.path.join(thisPath,file)
456 456 thisDatetime = isFileinThisTime(filename, startTime, endTime)
457 457
458 458 if not(thisDatetime):
459 459 continue
460 460
461 461 filenameList.append(filename)
462 462 datetimeList.append(thisDatetime)
463 463
464 464 if not(filenameList):
465 465 print "Any file was found for the time range %s - %s" %(startTime, endTime)
466 466 return None, None
467 467
468 468 print "%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime)
469 469 print
470 470
471 471 for i in range(len(filenameList)):
472 472 print "%s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
473 473
474 474 self.filenameList = filenameList
475 475 self.datetimeList = datetimeList
476 476
477 477 return pathList, filenameList
478 478
479 479 def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True, set=None):
480 480
481 481 """
482 482 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
483 483 devuelve el archivo encontrado ademas de otros datos.
484 484
485 485 Input:
486 486 path : carpeta donde estan contenidos los files que contiene data
487 487
488 488 expLabel : Nombre del subexperimento (subfolder)
489 489
490 490 ext : extension de los files
491 491
492 492 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
493 493
494 494 Return:
495 495 directory : eL directorio donde esta el file encontrado
496 496 filename : el ultimo file de una determinada carpeta
497 497 year : el anho
498 498 doy : el numero de dia del anho
499 499 set : el set del archivo
500 500
501 501
502 502 """
503 503 dirList = []
504 504
505 505 if not walk:
506 506 fullpath = path
507 507 foldercounter = 0
508 508 else:
509 509 #Filtra solo los directorios
510 510 for thisPath in os.listdir(path):
511 511 if not os.path.isdir(os.path.join(path,thisPath)):
512 512 continue
513 513 if not isDoyFolder(thisPath):
514 514 continue
515 515
516 516 dirList.append(thisPath)
517 517
518 518 if not(dirList):
519 519 return None, None, None, None, None, None
520 520
521 521 dirList = sorted( dirList, key=str.lower )
522 522
523 523 doypath = dirList[-1]
524 524 foldercounter = int(doypath.split('_')[1]) if len(doypath.split('_'))>1 else 0
525 525 fullpath = os.path.join(path, doypath, expLabel)
526 526
527 527
528 528 print "%s folder was found: " %(fullpath )
529 529
530 530 if set == None:
531 531 filename = getlastFileFromPath(fullpath, ext)
532 532 else:
533 533 filename = getFileFromSet(fullpath, ext, set)
534 534
535 535 if not(filename):
536 536 return None, None, None, None, None, None
537 537
538 538 print "%s file was found" %(filename)
539 539
540 540 if not(self.__verifyFile(os.path.join(fullpath, filename))):
541 541 return None, None, None, None, None, None
542 542
543 543 year = int( filename[1:5] )
544 544 doy = int( filename[5:8] )
545 545 set = int( filename[8:11] )
546 546
547 547 return fullpath, foldercounter, filename, year, doy, set
548 548
549 549 def __setNextFileOffline(self):
550 550
551 551 idFile = self.fileIndex
552 552
553 553 while (True):
554 554 idFile += 1
555 555 if not(idFile < len(self.filenameList)):
556 556 self.flagNoMoreFiles = 1
557 557 print "No more Files"
558 558 return 0
559 559
560 560 filename = self.filenameList[idFile]
561 561
562 562 if not(self.__verifyFile(filename)):
563 563 continue
564 564
565 565 fileSize = os.path.getsize(filename)
566 566 fp = open(filename,'rb')
567 567 break
568 568
569 569 self.flagIsNewFile = 1
570 570 self.fileIndex = idFile
571 571 self.filename = filename
572 572 self.fileSize = fileSize
573 573 self.fp = fp
574 574
575 575 print "Setting the file: %s"%self.filename
576 576
577 577 return 1
578 578
579 579 def __setNextFileOnline(self):
580 580 """
581 581 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
582 582 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
583 583 siguientes.
584 584
585 585 Affected:
586 586 self.flagIsNewFile
587 587 self.filename
588 588 self.fileSize
589 589 self.fp
590 590 self.set
591 591 self.flagNoMoreFiles
592 592
593 593 Return:
594 594 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
595 595 1 : si el file fue abierto con exito y esta listo a ser leido
596 596
597 597 Excepciones:
598 598 Si un determinado file no puede ser abierto
599 599 """
600 600 nFiles = 0
601 601 fileOk_flag = False
602 602 firstTime_flag = True
603 603
604 604 self.set += 1
605 605
606 606 if self.set > 999:
607 607 self.set = 0
608 608 self.foldercounter += 1
609 609
610 610 #busca el 1er file disponible
611 611 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
612 612 if fullfilename:
613 613 if self.__verifyFile(fullfilename, False):
614 614 fileOk_flag = True
615 615
616 616 #si no encuentra un file entonces espera y vuelve a buscar
617 617 if not(fileOk_flag):
618 618 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
619 619
620 620 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
621 621 tries = self.nTries
622 622 else:
623 623 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
624 624
625 625 for nTries in range( tries ):
626 626 if firstTime_flag:
627 627 print "\tWaiting %0.2f sec for the file \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
628 628 time.sleep( self.delay )
629 629 else:
630 630 print "\tSearching next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
631 631
632 632 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
633 633 if fullfilename:
634 634 if self.__verifyFile(fullfilename):
635 635 fileOk_flag = True
636 636 break
637 637
638 638 if fileOk_flag:
639 639 break
640 640
641 641 firstTime_flag = False
642 642
643 643 print "\tSkipping the file \"%s\" due to this file doesn't exist" % filename
644 644 self.set += 1
645 645
646 646 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
647 647 self.set = 0
648 648 self.doy += 1
649 649 self.foldercounter = 0
650 650
651 651 if fileOk_flag:
652 652 self.fileSize = os.path.getsize( fullfilename )
653 653 self.filename = fullfilename
654 654 self.flagIsNewFile = 1
655 655 if self.fp != None: self.fp.close()
656 656 self.fp = open(fullfilename, 'rb')
657 657 self.flagNoMoreFiles = 0
658 658 print 'Setting the file: %s' % fullfilename
659 659 else:
660 660 self.fileSize = 0
661 661 self.filename = None
662 662 self.flagIsNewFile = 0
663 663 self.fp = None
664 664 self.flagNoMoreFiles = 1
665 665 print 'No more Files'
666 666
667 667 return fileOk_flag
668 668
669 669
670 670 def setNextFile(self):
671 671 if self.fp != None:
672 672 self.fp.close()
673 673
674 674 if self.online:
675 675 newFile = self.__setNextFileOnline()
676 676 else:
677 677 newFile = self.__setNextFileOffline()
678 678
679 679 if not(newFile):
680 680 return 0
681 681
682 682 self.__readFirstHeader()
683 683 self.nReadBlocks = 0
684 684 return 1
685 685
686 686 def __waitNewBlock(self):
687 687 """
688 688 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
689 689
690 690 Si el modo de lectura es OffLine siempre retorn 0
691 691 """
692 692 if not self.online:
693 693 return 0
694 694
695 695 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
696 696 return 0
697 697
698 698 currentPointer = self.fp.tell()
699 699
700 700 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
701 701
702 702 for nTries in range( self.nTries ):
703 703
704 704 self.fp.close()
705 705 self.fp = open( self.filename, 'rb' )
706 706 self.fp.seek( currentPointer )
707 707
708 708 self.fileSize = os.path.getsize( self.filename )
709 709 currentSize = self.fileSize - currentPointer
710 710
711 711 if ( currentSize >= neededSize ):
712 712 self.__rdBasicHeader()
713 713 return 1
714 714
715 715 if self.fileSize == self.fileSizeByHeader:
716 716 # self.flagEoF = True
717 717 return 0
718 718
719 719 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
720 720 time.sleep( self.delay )
721 721
722 722
723 723 return 0
724 724
725 725 def __jumpToLastBlock(self):
726 726
727 727 if not(self.__isFirstTimeOnline):
728 728 return
729 729
730 730 csize = self.fileSize - self.fp.tell()
731 731 blocksize = self.processingHeaderObj.blockSize
732 732
733 733 #salta el primer bloque de datos
734 734 if csize > self.processingHeaderObj.blockSize:
735 735 self.fp.seek(self.fp.tell() + blocksize)
736 736 else:
737 737 return
738 738
739 739 csize = self.fileSize - self.fp.tell()
740 740 neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
741 741 while True:
742 742
743 743 if self.fp.tell()<self.fileSize:
744 744 self.fp.seek(self.fp.tell() + neededsize)
745 745 else:
746 746 self.fp.seek(self.fp.tell() - neededsize)
747 747 break
748 748
749 749 # csize = self.fileSize - self.fp.tell()
750 750 # neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
751 751 # factor = int(csize/neededsize)
752 752 # if factor > 0:
753 753 # self.fp.seek(self.fp.tell() + factor*neededsize)
754 754
755 755 self.flagIsNewFile = 0
756 756 self.__isFirstTimeOnline = 0
757 757
758 758
759 759 def __setNewBlock(self):
760 760
761 761 if self.fp == None:
762 762 return 0
763 763
764 764 if self.online:
765 765 self.__jumpToLastBlock()
766 766
767 767 if self.flagIsNewFile:
768 768 return 1
769 769
770 770 self.lastUTTime = self.basicHeaderObj.utc
771 771 currentSize = self.fileSize - self.fp.tell()
772 772 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
773 773
774 774 if (currentSize >= neededSize):
775 775 self.__rdBasicHeader()
776 776 return 1
777 777
778 778 if self.__waitNewBlock():
779 779 return 1
780 780
781 781 if not(self.setNextFile()):
782 782 return 0
783 783
784 784 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
785 785
786 786 self.flagTimeBlock = 0
787 787
788 788 if deltaTime > self.maxTimeStep:
789 789 self.flagTimeBlock = 1
790 790
791 791 return 1
792 792
793 793
794 794 def readNextBlock(self):
795 795 if not(self.__setNewBlock()):
796 796 return 0
797 797
798 798 if not(self.readBlock()):
799 799 return 0
800 800
801 801 return 1
802 802
803 803 def __rdProcessingHeader(self, fp=None):
804 804 if fp == None:
805 805 fp = self.fp
806 806
807 807 self.processingHeaderObj.read(fp)
808 808
809 809 def __rdRadarControllerHeader(self, fp=None):
810 810 if fp == None:
811 811 fp = self.fp
812 812
813 813 self.radarControllerHeaderObj.read(fp)
814 814
815 815 def __rdSystemHeader(self, fp=None):
816 816 if fp == None:
817 817 fp = self.fp
818 818
819 819 self.systemHeaderObj.read(fp)
820 820
821 821 def __rdBasicHeader(self, fp=None):
822 822 if fp == None:
823 823 fp = self.fp
824 824
825 825 self.basicHeaderObj.read(fp)
826 826
827 827
828 828 def __readFirstHeader(self):
829 829 self.__rdBasicHeader()
830 830 self.__rdSystemHeader()
831 831 self.__rdRadarControllerHeader()
832 832 self.__rdProcessingHeader()
833 833
834 834 self.firstHeaderSize = self.basicHeaderObj.size
835 835
836 836 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
837 837 if datatype == 0:
838 838 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
839 839 elif datatype == 1:
840 840 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
841 841 elif datatype == 2:
842 842 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
843 843 elif datatype == 3:
844 844 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
845 845 elif datatype == 4:
846 846 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
847 847 elif datatype == 5:
848 848 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
849 849 else:
850 850 raise ValueError, 'Data type was not defined'
851 851
852 852 self.dtype = datatype_str
853 853 self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
854 854 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
855 855 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
856 856 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
857 857 self.getBlockDimension()
858 858
859 859
860 860 def __verifyFile(self, filename, msgFlag=True):
861 861 msg = None
862 862 try:
863 863 fp = open(filename, 'rb')
864 864 currentPosition = fp.tell()
865 865 except:
866 866 if msgFlag:
867 867 print "The file %s can't be opened" % (filename)
868 868 return False
869 869
870 870 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
871 871
872 872 if neededSize == 0:
873 873 basicHeaderObj = BasicHeader(LOCALTIME)
874 874 systemHeaderObj = SystemHeader()
875 875 radarControllerHeaderObj = RadarControllerHeader()
876 876 processingHeaderObj = ProcessingHeader()
877 877
878 878 try:
879 879 if not( basicHeaderObj.read(fp) ): raise IOError
880 880 if not( systemHeaderObj.read(fp) ): raise IOError
881 881 if not( radarControllerHeaderObj.read(fp) ): raise IOError
882 882 if not( processingHeaderObj.read(fp) ): raise IOError
883 883 data_type = int(numpy.log2((processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
884 884
885 885 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
886 886
887 887 except:
888 888 if msgFlag:
889 889 print "\tThe file %s is empty or it hasn't enough data" % filename
890 890
891 891 fp.close()
892 892 return False
893 893 else:
894 894 msg = "\tSkipping the file %s due to it hasn't enough data" %filename
895 895
896 896 fp.close()
897 897 fileSize = os.path.getsize(filename)
898 898 currentSize = fileSize - currentPosition
899 899 if currentSize < neededSize:
900 900 if msgFlag and (msg != None):
901 901 print msg #print"\tSkipping the file %s due to it hasn't enough data" %filename
902 902 return False
903 903
904 904 return True
905 905
906 906 def setup(self,
907 907 path=None,
908 908 startDate=None,
909 909 endDate=None,
910 910 startTime=datetime.time(0,0,0),
911 911 endTime=datetime.time(23,59,59),
912 912 set=None,
913 913 expLabel = "",
914 914 ext = None,
915 915 online = False,
916 916 delay = 60,
917 917 walk = True):
918 918
919 919 if path == None:
920 920 raise ValueError, "The path is not valid"
921 921
922 922 if ext == None:
923 923 ext = self.ext
924 924
925 925 if online:
926 926 print "Searching files in online mode..."
927 927
928 928 for nTries in range( self.nTries ):
929 929 fullpath, foldercounter, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk, set=set)
930 930
931 931 if fullpath:
932 932 break
933 933
934 934 print '\tWaiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
935 935 time.sleep( self.delay )
936 936
937 937 if not(fullpath):
938 938 print "There 'isn't valied files in %s" % path
939 939 return None
940 940
941 941 self.year = year
942 942 self.doy = doy
943 943 self.set = set - 1
944 944 self.path = path
945 945 self.foldercounter = foldercounter
946 946
947 947 else:
948 948 print "Searching files in offline mode ..."
949 949 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
950 950 startTime=startTime, endTime=endTime,
951 951 set=set, expLabel=expLabel, ext=ext,
952 952 walk=walk)
953 953
954 954 if not(pathList):
955 955 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
956 956 datetime.datetime.combine(startDate,startTime).ctime(),
957 957 datetime.datetime.combine(endDate,endTime).ctime())
958 958
959 959 sys.exit(-1)
960 960
961 961
962 962 self.fileIndex = -1
963 963 self.pathList = pathList
964 964 self.filenameList = filenameList
965 965
966 966 self.online = online
967 967 self.delay = delay
968 968 ext = ext.lower()
969 969 self.ext = ext
970 970
971 971 if not(self.setNextFile()):
972 972 if (startDate!=None) and (endDate!=None):
973 973 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
974 974 elif startDate != None:
975 975 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
976 976 else:
977 977 print "No files"
978 978
979 979 sys.exit(-1)
980 980
981 981 # self.updateDataHeader()
982 982
983 983 return self.dataOut
984 984
985 985 def getBasicHeader(self):
986 986
987 987 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.ippSeconds
988 988
989 989 self.dataOut.flagTimeBlock = self.flagTimeBlock
990 990
991 991 self.dataOut.timeZone = self.basicHeaderObj.timeZone
992 992
993 993 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
994 994
995 995 self.dataOut.errorCount = self.basicHeaderObj.errorCount
996 996
997 997 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
998 998
999 999 def getFirstHeader(self):
1000 1000
1001 1001 raise ValueError, "This method has not been implemented"
1002 1002
1003 1003 def getData():
1004 1004
1005 1005 raise ValueError, "This method has not been implemented"
1006 1006
1007 1007 def hasNotDataInBuffer():
1008 1008
1009 1009 raise ValueError, "This method has not been implemented"
1010 1010
1011 1011 def readBlock():
1012 1012
1013 1013 raise ValueError, "This method has not been implemented"
1014 1014
1015 1015 def isEndProcess(self):
1016 1016
1017 1017 return self.flagNoMoreFiles
1018 1018
1019 1019 def printReadBlocks(self):
1020 1020
1021 1021 print "Number of read blocks per file %04d" %self.nReadBlocks
1022 1022
1023 1023 def printTotalBlocks(self):
1024 1024
1025 1025 print "Number of read blocks %04d" %self.nTotalBlocks
1026 1026
1027 1027 def printNumberOfBlock(self):
1028 1028
1029 1029 if self.flagIsNewBlock:
1030 1030 print "Block No. %04d, Total blocks %04d -> %s" %(self.basicHeaderObj.dataBlock, self.nTotalBlocks, self.dataOut.datatime.ctime())
1031 1031
1032 1032 def printInfo(self):
1033 1033
1034 1034 if self.__printInfo == False:
1035 1035 return
1036 1036
1037 1037 self.basicHeaderObj.printInfo()
1038 1038 self.systemHeaderObj.printInfo()
1039 1039 self.radarControllerHeaderObj.printInfo()
1040 1040 self.processingHeaderObj.printInfo()
1041 1041
1042 1042 self.__printInfo = False
1043 1043
1044 1044
1045 1045 def run(self, **kwargs):
1046 1046
1047 1047 if not(self.isConfig):
1048 1048
1049 1049 # self.dataOut = dataOut
1050 1050 self.setup(**kwargs)
1051 1051 self.isConfig = True
1052 1052
1053 1053 self.getData()
1054 1054
1055 1055 class JRODataWriter(JRODataIO, Operation):
1056 1056
1057 1057 """
1058 1058 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
1059 1059 de los datos siempre se realiza por bloques.
1060 1060 """
1061 1061
1062 1062 blockIndex = 0
1063 1063
1064 1064 path = None
1065 1065
1066 1066 setFile = None
1067 1067
1068 1068 profilesPerBlock = None
1069 1069
1070 1070 blocksPerFile = None
1071 1071
1072 1072 nWriteBlocks = 0
1073 1073
1074 1074 def __init__(self, dataOut=None):
1075 1075 raise ValueError, "Not implemented"
1076 1076
1077 1077
1078 1078 def hasAllDataInBuffer(self):
1079 1079 raise ValueError, "Not implemented"
1080 1080
1081 1081
1082 1082 def setBlockDimension(self):
1083 1083 raise ValueError, "Not implemented"
1084 1084
1085 1085
1086 1086 def writeBlock(self):
1087 1087 raise ValueError, "No implemented"
1088 1088
1089 1089
1090 1090 def putData(self):
1091 1091 raise ValueError, "No implemented"
1092 1092
1093 1093
1094 1094 def setBasicHeader(self):
1095 1095
1096 1096 self.basicHeaderObj.size = self.basicHeaderSize #bytes
1097 1097 self.basicHeaderObj.version = self.versionFile
1098 1098 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1099 1099
1100 1100 utc = numpy.floor(self.dataOut.utctime)
1101 1101 milisecond = (self.dataOut.utctime - utc)* 1000.0
1102 1102
1103 1103 self.basicHeaderObj.utc = utc
1104 1104 self.basicHeaderObj.miliSecond = milisecond
1105 1105 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1106 1106 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1107 1107 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1108 1108
1109 1109 def setFirstHeader(self):
1110 1110 """
1111 1111 Obtiene una copia del First Header
1112 1112
1113 1113 Affected:
1114 1114
1115 1115 self.basicHeaderObj
1116 1116 self.systemHeaderObj
1117 1117 self.radarControllerHeaderObj
1118 1118 self.processingHeaderObj self.
1119 1119
1120 1120 Return:
1121 1121 None
1122 1122 """
1123 1123
1124 1124 raise ValueError, "No implemented"
1125 1125
1126 1126 def __writeFirstHeader(self):
1127 1127 """
1128 1128 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1129 1129
1130 1130 Affected:
1131 1131 __dataType
1132 1132
1133 1133 Return:
1134 1134 None
1135 1135 """
1136 1136
1137 1137 # CALCULAR PARAMETROS
1138 1138
1139 1139 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1140 1140 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1141 1141
1142 1142 self.basicHeaderObj.write(self.fp)
1143 1143 self.systemHeaderObj.write(self.fp)
1144 1144 self.radarControllerHeaderObj.write(self.fp)
1145 1145 self.processingHeaderObj.write(self.fp)
1146 1146
1147 1147 self.dtype = self.dataOut.dtype
1148 1148
1149 1149 def __setNewBlock(self):
1150 1150 """
1151 1151 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1152 1152
1153 1153 Return:
1154 1154 0 : si no pudo escribir nada
1155 1155 1 : Si escribio el Basic el First Header
1156 1156 """
1157 1157 if self.fp == None:
1158 1158 self.setNextFile()
1159 1159
1160 1160 if self.flagIsNewFile:
1161 1161 return 1
1162 1162
1163 1163 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1164 1164 self.basicHeaderObj.write(self.fp)
1165 1165 return 1
1166 1166
1167 1167 if not( self.setNextFile() ):
1168 1168 return 0
1169 1169
1170 1170 return 1
1171 1171
1172 1172
1173 1173 def writeNextBlock(self):
1174 1174 """
1175 1175 Selecciona el bloque siguiente de datos y los escribe en un file
1176 1176
1177 1177 Return:
1178 1178 0 : Si no hizo pudo escribir el bloque de datos
1179 1179 1 : Si no pudo escribir el bloque de datos
1180 1180 """
1181 1181 if not( self.__setNewBlock() ):
1182 1182 return 0
1183 1183
1184 1184 self.writeBlock()
1185 1185
1186 1186 return 1
1187 1187
1188 1188 def setNextFile(self):
1189 1189 """
1190 1190 Determina el siguiente file que sera escrito
1191 1191
1192 1192 Affected:
1193 1193 self.filename
1194 1194 self.subfolder
1195 1195 self.fp
1196 1196 self.setFile
1197 1197 self.flagIsNewFile
1198 1198
1199 1199 Return:
1200 1200 0 : Si el archivo no puede ser escrito
1201 1201 1 : Si el archivo esta listo para ser escrito
1202 1202 """
1203 1203 ext = self.ext
1204 1204 path = self.path
1205 1205
1206 1206 if self.fp != None:
1207 1207 self.fp.close()
1208 1208
1209 1209 timeTuple = time.localtime( self.dataOut.utctime)
1210 1210 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1211 1211
1212 1212 fullpath = os.path.join( path, subfolder )
1213 1213 if not( os.path.exists(fullpath) ):
1214 1214 os.mkdir(fullpath)
1215 1215 self.setFile = -1 #inicializo mi contador de seteo
1216 1216 else:
1217 1217 filesList = os.listdir( fullpath )
1218 1218 if len( filesList ) > 0:
1219 1219 filesList = sorted( filesList, key=str.lower )
1220 1220 filen = filesList[-1]
1221 1221 # el filename debera tener el siguiente formato
1222 1222 # 0 1234 567 89A BCDE (hex)
1223 1223 # x YYYY DDD SSS .ext
1224 1224 if isNumber( filen[8:11] ):
1225 1225 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1226 1226 else:
1227 1227 self.setFile = -1
1228 1228 else:
1229 1229 self.setFile = -1 #inicializo mi contador de seteo
1230 1230
1231 1231 setFile = self.setFile
1232 1232 setFile += 1
1233 1233
1234 1234 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
1235 1235 timeTuple.tm_year,
1236 1236 timeTuple.tm_yday,
1237 1237 setFile,
1238 1238 ext )
1239 1239
1240 1240 filename = os.path.join( path, subfolder, file )
1241 1241
1242 1242 fp = open( filename,'wb' )
1243 1243
1244 1244 self.blockIndex = 0
1245 1245
1246 1246 #guardando atributos
1247 1247 self.filename = filename
1248 1248 self.subfolder = subfolder
1249 1249 self.fp = fp
1250 1250 self.setFile = setFile
1251 1251 self.flagIsNewFile = 1
1252 1252
1253 1253 self.setFirstHeader()
1254 1254
1255 1255 print 'Writing the file: %s'%self.filename
1256 1256
1257 1257 self.__writeFirstHeader()
1258 1258
1259 1259 return 1
1260 1260
1261 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=None, set=0, ext=None):
1261 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=0, ext=None):
1262 1262 """
1263 1263 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1264 1264
1265 1265 Inputs:
1266 1266 path : el path destino en el cual se escribiran los files a crear
1267 1267 format : formato en el cual sera salvado un file
1268 1268 set : el setebo del file
1269 1269
1270 1270 Return:
1271 1271 0 : Si no realizo un buen seteo
1272 1272 1 : Si realizo un buen seteo
1273 1273 """
1274 1274
1275 1275 if ext == None:
1276 1276 ext = self.ext
1277 1277
1278 1278 ext = ext.lower()
1279 1279
1280 1280 self.ext = ext
1281 1281
1282 1282 self.path = path
1283 1283
1284 1284 self.setFile = set - 1
1285 1285
1286 1286 self.blocksPerFile = blocksPerFile
1287 1287
1288 1288 self.profilesPerBlock = profilesPerBlock
1289 1289
1290 1290 self.dataOut = dataOut
1291 1291
1292 1292 if not(self.setNextFile()):
1293 1293 print "There isn't a next file"
1294 1294 return 0
1295 1295
1296 1296 self.setBlockDimension()
1297 1297
1298 1298 return 1
1299 1299
1300 1300 def run(self, dataOut, **kwargs):
1301 1301
1302 1302 if not(self.isConfig):
1303 1303
1304 1304 self.setup(dataOut, **kwargs)
1305 1305 self.isConfig = True
1306 1306
1307 1307 self.putData()
1308 1308
1309 1309 class VoltageReader(JRODataReader):
1310 1310 """
1311 1311 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
1312 1312 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
1313 1313 perfiles*alturas*canales) son almacenados en la variable "buffer".
1314 1314
1315 1315 perfiles * alturas * canales
1316 1316
1317 1317 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1318 1318 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
1319 1319 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
1320 1320 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1321 1321
1322 1322 Example:
1323 1323
1324 1324 dpath = "/home/myuser/data"
1325 1325
1326 1326 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1327 1327
1328 1328 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1329 1329
1330 1330 readerObj = VoltageReader()
1331 1331
1332 1332 readerObj.setup(dpath, startTime, endTime)
1333 1333
1334 1334 while(True):
1335 1335
1336 1336 #to get one profile
1337 1337 profile = readerObj.getData()
1338 1338
1339 1339 #print the profile
1340 1340 print profile
1341 1341
1342 1342 #If you want to see all datablock
1343 1343 print readerObj.datablock
1344 1344
1345 1345 if readerObj.flagNoMoreFiles:
1346 1346 break
1347 1347
1348 1348 """
1349 1349
1350 1350 ext = ".r"
1351 1351
1352 1352 optchar = "D"
1353 1353 dataOut = None
1354 1354
1355 1355
1356 1356 def __init__(self):
1357 1357 """
1358 1358 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
1359 1359
1360 1360 Input:
1361 1361 dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para
1362 1362 almacenar un perfil de datos cada vez que se haga un requerimiento
1363 1363 (getData). El perfil sera obtenido a partir del buffer de datos,
1364 1364 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1365 1365 bloque de datos.
1366 1366 Si este parametro no es pasado se creara uno internamente.
1367 1367
1368 1368 Variables afectadas:
1369 1369 self.dataOut
1370 1370
1371 1371 Return:
1372 1372 None
1373 1373 """
1374 1374
1375 1375 self.isConfig = False
1376 1376
1377 1377 self.datablock = None
1378 1378
1379 1379 self.utc = 0
1380 1380
1381 1381 self.ext = ".r"
1382 1382
1383 1383 self.optchar = "D"
1384 1384
1385 1385 self.basicHeaderObj = BasicHeader(LOCALTIME)
1386 1386
1387 1387 self.systemHeaderObj = SystemHeader()
1388 1388
1389 1389 self.radarControllerHeaderObj = RadarControllerHeader()
1390 1390
1391 1391 self.processingHeaderObj = ProcessingHeader()
1392 1392
1393 1393 self.online = 0
1394 1394
1395 1395 self.fp = None
1396 1396
1397 1397 self.idFile = None
1398 1398
1399 1399 self.dtype = None
1400 1400
1401 1401 self.fileSizeByHeader = None
1402 1402
1403 1403 self.filenameList = []
1404 1404
1405 1405 self.filename = None
1406 1406
1407 1407 self.fileSize = None
1408 1408
1409 1409 self.firstHeaderSize = 0
1410 1410
1411 1411 self.basicHeaderSize = 24
1412 1412
1413 1413 self.pathList = []
1414 1414
1415 1415 self.filenameList = []
1416 1416
1417 1417 self.lastUTTime = 0
1418 1418
1419 1419 self.maxTimeStep = 30
1420 1420
1421 1421 self.flagNoMoreFiles = 0
1422 1422
1423 1423 self.set = 0
1424 1424
1425 1425 self.path = None
1426 1426
1427 1427 self.profileIndex = 2**32-1
1428 1428
1429 1429 self.delay = 3 #seconds
1430 1430
1431 1431 self.nTries = 3 #quantity tries
1432 1432
1433 1433 self.nFiles = 3 #number of files for searching
1434 1434
1435 1435 self.nReadBlocks = 0
1436 1436
1437 1437 self.flagIsNewFile = 1
1438 1438
1439 1439 self.__isFirstTimeOnline = 1
1440 1440
1441 1441 self.ippSeconds = 0
1442 1442
1443 1443 self.flagTimeBlock = 0
1444 1444
1445 1445 self.flagIsNewBlock = 0
1446 1446
1447 1447 self.nTotalBlocks = 0
1448 1448
1449 1449 self.blocksize = 0
1450 1450
1451 1451 self.dataOut = self.createObjByDefault()
1452 1452
1453 1453 def createObjByDefault(self):
1454 1454
1455 1455 dataObj = Voltage()
1456 1456
1457 1457 return dataObj
1458 1458
1459 1459 def __hasNotDataInBuffer(self):
1460 1460 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1461 1461 return 1
1462 1462 return 0
1463 1463
1464 1464
1465 1465 def getBlockDimension(self):
1466 1466 """
1467 1467 Obtiene la cantidad de puntos a leer por cada bloque de datos
1468 1468
1469 1469 Affected:
1470 1470 self.blocksize
1471 1471
1472 1472 Return:
1473 1473 None
1474 1474 """
1475 1475 pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
1476 1476 self.blocksize = pts2read
1477 1477
1478 1478
1479 1479 def readBlock(self):
1480 1480 """
1481 1481 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
1482 1482 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1483 1483 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1484 1484 es seteado a 0
1485 1485
1486 1486 Inputs:
1487 1487 None
1488 1488
1489 1489 Return:
1490 1490 None
1491 1491
1492 1492 Affected:
1493 1493 self.profileIndex
1494 1494 self.datablock
1495 1495 self.flagIsNewFile
1496 1496 self.flagIsNewBlock
1497 1497 self.nTotalBlocks
1498 1498
1499 1499 Exceptions:
1500 1500 Si un bloque leido no es un bloque valido
1501 1501 """
1502 1502
1503 1503 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
1504 1504
1505 1505 try:
1506 1506 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
1507 1507 except:
1508 1508 print "The read block (%3d) has not enough data" %self.nReadBlocks
1509 1509 return 0
1510 1510
1511 1511 junk = numpy.transpose(junk, (2,0,1))
1512 1512 self.datablock = junk['real'] + junk['imag']*1j
1513 1513
1514 1514 self.profileIndex = 0
1515 1515
1516 1516 self.flagIsNewFile = 0
1517 1517 self.flagIsNewBlock = 1
1518 1518
1519 1519 self.nTotalBlocks += 1
1520 1520 self.nReadBlocks += 1
1521 1521
1522 1522 return 1
1523 1523
1524 1524 def getFirstHeader(self):
1525 1525
1526 1526 self.dataOut.dtype = self.dtype
1527 1527
1528 1528 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
1529 1529
1530 1530 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
1531 1531
1532 1532 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
1533 1533
1534 1534 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
1535 1535
1536 1536 self.dataOut.ippSeconds = self.ippSeconds
1537 1537
1538 1538 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt
1539 1539
1540 1540 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
1541 1541
1542 1542 self.dataOut.flagShiftFFT = False
1543 1543
1544 1544 if self.radarControllerHeaderObj.code != None:
1545 1545
1546 1546 self.dataOut.nCode = self.radarControllerHeaderObj.nCode
1547 1547
1548 1548 self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
1549 1549
1550 1550 self.dataOut.code = self.radarControllerHeaderObj.code
1551 1551
1552 1552 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
1553 1553
1554 1554 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
1555 1555
1556 1556 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
1557 1557
1558 1558 self.dataOut.flagDeflipData = False #asumo q la data no esta sin flip
1559 1559
1560 1560 self.dataOut.flagShiftFFT = False
1561 1561
1562 1562 def getData(self):
1563 1563 """
1564 1564 getData obtiene una unidad de datos del buffer de lectura y la copia a la clase "Voltage"
1565 1565 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
1566 1566 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
1567 1567
1568 1568 Ademas incrementa el contador del buffer en 1.
1569 1569
1570 1570 Return:
1571 1571 data : retorna un perfil de voltages (alturas * canales) copiados desde el
1572 1572 buffer. Si no hay mas archivos a leer retorna None.
1573 1573
1574 1574 Variables afectadas:
1575 1575 self.dataOut
1576 1576 self.profileIndex
1577 1577
1578 1578 Affected:
1579 1579 self.dataOut
1580 1580 self.profileIndex
1581 1581 self.flagTimeBlock
1582 1582 self.flagIsNewBlock
1583 1583 """
1584 1584
1585 1585 if self.flagNoMoreFiles:
1586 1586 self.dataOut.flagNoData = True
1587 1587 print 'Process finished'
1588 1588 return 0
1589 1589
1590 1590 self.flagTimeBlock = 0
1591 1591 self.flagIsNewBlock = 0
1592 1592
1593 1593 if self.__hasNotDataInBuffer():
1594 1594
1595 1595 if not( self.readNextBlock() ):
1596 1596 return 0
1597 1597
1598 1598 self.getFirstHeader()
1599 1599
1600 1600 if self.datablock == None:
1601 1601 self.dataOut.flagNoData = True
1602 1602 return 0
1603 1603
1604 1604 self.dataOut.data = self.datablock[:,self.profileIndex,:]
1605 1605
1606 1606 self.dataOut.flagNoData = False
1607 1607
1608 1608 self.getBasicHeader()
1609 1609
1610 1610 self.profileIndex += 1
1611 1611
1612 1612 self.dataOut.realtime = self.online
1613 1613
1614 1614 return self.dataOut.data
1615 1615
1616 1616
1617 1617 class VoltageWriter(JRODataWriter):
1618 1618 """
1619 1619 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
1620 1620 de los datos siempre se realiza por bloques.
1621 1621 """
1622 1622
1623 1623 ext = ".r"
1624 1624
1625 1625 optchar = "D"
1626 1626
1627 1627 shapeBuffer = None
1628 1628
1629 1629
1630 1630 def __init__(self):
1631 1631 """
1632 1632 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
1633 1633
1634 1634 Affected:
1635 1635 self.dataOut
1636 1636
1637 1637 Return: None
1638 1638 """
1639 1639
1640 1640 self.nTotalBlocks = 0
1641 1641
1642 1642 self.profileIndex = 0
1643 1643
1644 1644 self.isConfig = False
1645 1645
1646 1646 self.fp = None
1647 1647
1648 1648 self.flagIsNewFile = 1
1649 1649
1650 1650 self.nTotalBlocks = 0
1651 1651
1652 1652 self.flagIsNewBlock = 0
1653 1653
1654 1654 self.setFile = None
1655 1655
1656 1656 self.dtype = None
1657 1657
1658 1658 self.path = None
1659 1659
1660 1660 self.filename = None
1661 1661
1662 1662 self.basicHeaderObj = BasicHeader(LOCALTIME)
1663 1663
1664 1664 self.systemHeaderObj = SystemHeader()
1665 1665
1666 1666 self.radarControllerHeaderObj = RadarControllerHeader()
1667 1667
1668 1668 self.processingHeaderObj = ProcessingHeader()
1669 1669
1670 1670 def hasAllDataInBuffer(self):
1671 1671 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1672 1672 return 1
1673 1673 return 0
1674 1674
1675 1675
1676 1676 def setBlockDimension(self):
1677 1677 """
1678 1678 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
1679 1679
1680 1680 Affected:
1681 1681 self.shape_spc_Buffer
1682 1682 self.shape_cspc_Buffer
1683 1683 self.shape_dc_Buffer
1684 1684
1685 1685 Return: None
1686 1686 """
1687 1687 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
1688 1688 self.processingHeaderObj.nHeights,
1689 1689 self.systemHeaderObj.nChannels)
1690 1690
1691 1691 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
1692 1692 self.processingHeaderObj.profilesPerBlock,
1693 1693 self.processingHeaderObj.nHeights),
1694 1694 dtype=numpy.dtype('complex64'))
1695 1695
1696 1696
1697 1697 def writeBlock(self):
1698 1698 """
1699 1699 Escribe el buffer en el file designado
1700 1700
1701 1701 Affected:
1702 1702 self.profileIndex
1703 1703 self.flagIsNewFile
1704 1704 self.flagIsNewBlock
1705 1705 self.nTotalBlocks
1706 1706 self.blockIndex
1707 1707
1708 1708 Return: None
1709 1709 """
1710 1710 data = numpy.zeros( self.shapeBuffer, self.dtype )
1711 1711
1712 1712 junk = numpy.transpose(self.datablock, (1,2,0))
1713 1713
1714 1714 data['real'] = junk.real
1715 1715 data['imag'] = junk.imag
1716 1716
1717 1717 data = data.reshape( (-1) )
1718 1718
1719 1719 data.tofile( self.fp )
1720 1720
1721 1721 self.datablock.fill(0)
1722 1722
1723 1723 self.profileIndex = 0
1724 1724 self.flagIsNewFile = 0
1725 1725 self.flagIsNewBlock = 1
1726 1726
1727 1727 self.blockIndex += 1
1728 1728 self.nTotalBlocks += 1
1729 1729
1730 1730 def putData(self):
1731 1731 """
1732 1732 Setea un bloque de datos y luego los escribe en un file
1733 1733
1734 1734 Affected:
1735 1735 self.flagIsNewBlock
1736 1736 self.profileIndex
1737 1737
1738 1738 Return:
1739 1739 0 : Si no hay data o no hay mas files que puedan escribirse
1740 1740 1 : Si se escribio la data de un bloque en un file
1741 1741 """
1742 1742 if self.dataOut.flagNoData:
1743 1743 return 0
1744 1744
1745 1745 self.flagIsNewBlock = 0
1746 1746
1747 1747 if self.dataOut.flagTimeBlock:
1748 1748
1749 1749 self.datablock.fill(0)
1750 1750 self.profileIndex = 0
1751 1751 self.setNextFile()
1752 1752
1753 1753 if self.profileIndex == 0:
1754 1754 self.setBasicHeader()
1755 1755
1756 1756 self.datablock[:,self.profileIndex,:] = self.dataOut.data
1757 1757
1758 1758 self.profileIndex += 1
1759 1759
1760 1760 if self.hasAllDataInBuffer():
1761 1761 #if self.flagIsNewFile:
1762 1762 self.writeNextBlock()
1763 1763 # self.setFirstHeader()
1764 1764
1765 1765 return 1
1766 1766
1767 1767 def __getProcessFlags(self):
1768 1768
1769 1769 processFlags = 0
1770 1770
1771 1771 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1772 1772 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1773 1773 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1774 1774 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1775 1775 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1776 1776 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1777 1777
1778 1778 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1779 1779
1780 1780
1781 1781
1782 1782 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
1783 1783 PROCFLAG.DATATYPE_SHORT,
1784 1784 PROCFLAG.DATATYPE_LONG,
1785 1785 PROCFLAG.DATATYPE_INT64,
1786 1786 PROCFLAG.DATATYPE_FLOAT,
1787 1787 PROCFLAG.DATATYPE_DOUBLE]
1788 1788
1789 1789
1790 1790 for index in range(len(dtypeList)):
1791 1791 if self.dataOut.dtype == dtypeList[index]:
1792 1792 dtypeValue = datatypeValueList[index]
1793 1793 break
1794 1794
1795 1795 processFlags += dtypeValue
1796 1796
1797 1797 if self.dataOut.flagDecodeData:
1798 1798 processFlags += PROCFLAG.DECODE_DATA
1799 1799
1800 1800 if self.dataOut.flagDeflipData:
1801 1801 processFlags += PROCFLAG.DEFLIP_DATA
1802 1802
1803 1803 if self.dataOut.code != None:
1804 1804 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1805 1805
1806 1806 if self.dataOut.nCohInt > 1:
1807 1807 processFlags += PROCFLAG.COHERENT_INTEGRATION
1808 1808
1809 1809 return processFlags
1810 1810
1811 1811
1812 1812 def __getBlockSize(self):
1813 1813 '''
1814 1814 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
1815 1815 '''
1816 1816
1817 1817 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1818 1818 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1819 1819 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1820 1820 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1821 1821 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1822 1822 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1823 1823
1824 1824 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1825 1825 datatypeValueList = [1,2,4,8,4,8]
1826 1826 for index in range(len(dtypeList)):
1827 1827 if self.dataOut.dtype == dtypeList[index]:
1828 1828 datatypeValue = datatypeValueList[index]
1829 1829 break
1830 1830
1831 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels * self.dataOut.nProfiles * datatypeValue * 2)
1831 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels * self.profilesPerBlock * datatypeValue * 2)
1832 1832
1833 1833 return blocksize
1834 1834
1835 1835 def setFirstHeader(self):
1836 1836
1837 1837 """
1838 1838 Obtiene una copia del First Header
1839 1839
1840 1840 Affected:
1841 1841 self.systemHeaderObj
1842 1842 self.radarControllerHeaderObj
1843 1843 self.dtype
1844 1844
1845 1845 Return:
1846 1846 None
1847 1847 """
1848 1848
1849 1849 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
1850 1850 self.systemHeaderObj.nChannels = self.dataOut.nChannels
1851 1851 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
1852 1852
1853 1853 self.setBasicHeader()
1854 1854
1855 1855 processingHeaderSize = 40 # bytes
1856 1856 self.processingHeaderObj.dtype = 0 # Voltage
1857 1857 self.processingHeaderObj.blockSize = self.__getBlockSize()
1858 1858 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
1859 1859 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
1860 1860 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
1861 1861 self.processingHeaderObj.processFlags = self.__getProcessFlags()
1862 1862 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
1863 1863 self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage
1864 1864 self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage
1865 1865
1866 if self.dataOut.code != None:
1867 self.processingHeaderObj.code = self.dataOut.code
1868 self.processingHeaderObj.nCode = self.dataOut.nCode
1869 self.processingHeaderObj.nBaud = self.dataOut.nBaud
1870 codesize = int(8 + 4 * self.dataOut.nCode * self.dataOut.nBaud)
1871 processingHeaderSize += codesize
1866 # if self.dataOut.code != None:
1867 # self.processingHeaderObj.code = self.dataOut.code
1868 # self.processingHeaderObj.nCode = self.dataOut.nCode
1869 # self.processingHeaderObj.nBaud = self.dataOut.nBaud
1870 # codesize = int(8 + 4 * self.dataOut.nCode * self.dataOut.nBaud)
1871 # processingHeaderSize += codesize
1872 1872
1873 1873 if self.processingHeaderObj.nWindows != 0:
1874 1874 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
1875 1875 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
1876 1876 self.processingHeaderObj.nHeights = self.dataOut.nHeights
1877 1877 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
1878 1878 processingHeaderSize += 12
1879 1879
1880 1880 self.processingHeaderObj.size = processingHeaderSize
1881 1881
1882 1882 class SpectraReader(JRODataReader):
1883 1883 """
1884 1884 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
1885 1885 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
1886 1886 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
1887 1887
1888 1888 paresCanalesIguales * alturas * perfiles (Self Spectra)
1889 1889 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
1890 1890 canales * alturas (DC Channels)
1891 1891
1892 1892 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1893 1893 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
1894 1894 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
1895 1895 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1896 1896
1897 1897 Example:
1898 1898 dpath = "/home/myuser/data"
1899 1899
1900 1900 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1901 1901
1902 1902 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1903 1903
1904 1904 readerObj = SpectraReader()
1905 1905
1906 1906 readerObj.setup(dpath, startTime, endTime)
1907 1907
1908 1908 while(True):
1909 1909
1910 1910 readerObj.getData()
1911 1911
1912 1912 print readerObj.data_spc
1913 1913
1914 1914 print readerObj.data_cspc
1915 1915
1916 1916 print readerObj.data_dc
1917 1917
1918 1918 if readerObj.flagNoMoreFiles:
1919 1919 break
1920 1920
1921 1921 """
1922 1922
1923 1923 pts2read_SelfSpectra = 0
1924 1924
1925 1925 pts2read_CrossSpectra = 0
1926 1926
1927 1927 pts2read_DCchannels = 0
1928 1928
1929 1929 ext = ".pdata"
1930 1930
1931 1931 optchar = "P"
1932 1932
1933 1933 dataOut = None
1934 1934
1935 1935 nRdChannels = None
1936 1936
1937 1937 nRdPairs = None
1938 1938
1939 1939 rdPairList = []
1940 1940
1941 1941 def __init__(self):
1942 1942 """
1943 1943 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
1944 1944
1945 1945 Inputs:
1946 1946 dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para
1947 1947 almacenar un perfil de datos cada vez que se haga un requerimiento
1948 1948 (getData). El perfil sera obtenido a partir del buffer de datos,
1949 1949 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1950 1950 bloque de datos.
1951 1951 Si este parametro no es pasado se creara uno internamente.
1952 1952
1953 1953 Affected:
1954 1954 self.dataOut
1955 1955
1956 1956 Return : None
1957 1957 """
1958 1958
1959 1959 self.isConfig = False
1960 1960
1961 1961 self.pts2read_SelfSpectra = 0
1962 1962
1963 1963 self.pts2read_CrossSpectra = 0
1964 1964
1965 1965 self.pts2read_DCchannels = 0
1966 1966
1967 1967 self.datablock = None
1968 1968
1969 1969 self.utc = None
1970 1970
1971 1971 self.ext = ".pdata"
1972 1972
1973 1973 self.optchar = "P"
1974 1974
1975 1975 self.basicHeaderObj = BasicHeader(LOCALTIME)
1976 1976
1977 1977 self.systemHeaderObj = SystemHeader()
1978 1978
1979 1979 self.radarControllerHeaderObj = RadarControllerHeader()
1980 1980
1981 1981 self.processingHeaderObj = ProcessingHeader()
1982 1982
1983 1983 self.online = 0
1984 1984
1985 1985 self.fp = None
1986 1986
1987 1987 self.idFile = None
1988 1988
1989 1989 self.dtype = None
1990 1990
1991 1991 self.fileSizeByHeader = None
1992 1992
1993 1993 self.filenameList = []
1994 1994
1995 1995 self.filename = None
1996 1996
1997 1997 self.fileSize = None
1998 1998
1999 1999 self.firstHeaderSize = 0
2000 2000
2001 2001 self.basicHeaderSize = 24
2002 2002
2003 2003 self.pathList = []
2004 2004
2005 2005 self.lastUTTime = 0
2006 2006
2007 2007 self.maxTimeStep = 30
2008 2008
2009 2009 self.flagNoMoreFiles = 0
2010 2010
2011 2011 self.set = 0
2012 2012
2013 2013 self.path = None
2014 2014
2015 2015 self.delay = 60 #seconds
2016 2016
2017 2017 self.nTries = 3 #quantity tries
2018 2018
2019 2019 self.nFiles = 3 #number of files for searching
2020 2020
2021 2021 self.nReadBlocks = 0
2022 2022
2023 2023 self.flagIsNewFile = 1
2024 2024
2025 2025 self.__isFirstTimeOnline = 1
2026 2026
2027 2027 self.ippSeconds = 0
2028 2028
2029 2029 self.flagTimeBlock = 0
2030 2030
2031 2031 self.flagIsNewBlock = 0
2032 2032
2033 2033 self.nTotalBlocks = 0
2034 2034
2035 2035 self.blocksize = 0
2036 2036
2037 2037 self.dataOut = self.createObjByDefault()
2038 2038
2039 2039 self.profileIndex = 1 #Always
2040 2040
2041 2041
2042 2042 def createObjByDefault(self):
2043 2043
2044 2044 dataObj = Spectra()
2045 2045
2046 2046 return dataObj
2047 2047
2048 2048 def __hasNotDataInBuffer(self):
2049 2049 return 1
2050 2050
2051 2051
2052 2052 def getBlockDimension(self):
2053 2053 """
2054 2054 Obtiene la cantidad de puntos a leer por cada bloque de datos
2055 2055
2056 2056 Affected:
2057 2057 self.nRdChannels
2058 2058 self.nRdPairs
2059 2059 self.pts2read_SelfSpectra
2060 2060 self.pts2read_CrossSpectra
2061 2061 self.pts2read_DCchannels
2062 2062 self.blocksize
2063 2063 self.dataOut.nChannels
2064 2064 self.dataOut.nPairs
2065 2065
2066 2066 Return:
2067 2067 None
2068 2068 """
2069 2069 self.nRdChannels = 0
2070 2070 self.nRdPairs = 0
2071 2071 self.rdPairList = []
2072 2072
2073 2073 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
2074 2074 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
2075 2075 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
2076 2076 else:
2077 2077 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
2078 2078 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
2079 2079
2080 2080 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
2081 2081
2082 2082 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
2083 2083 self.blocksize = self.pts2read_SelfSpectra
2084 2084
2085 2085 if self.processingHeaderObj.flag_cspc:
2086 2086 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
2087 2087 self.blocksize += self.pts2read_CrossSpectra
2088 2088
2089 2089 if self.processingHeaderObj.flag_dc:
2090 2090 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
2091 2091 self.blocksize += self.pts2read_DCchannels
2092 2092
2093 2093 # self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels
2094 2094
2095 2095
2096 2096 def readBlock(self):
2097 2097 """
2098 2098 Lee el bloque de datos desde la posicion actual del puntero del archivo
2099 2099 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
2100 2100 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
2101 2101 es seteado a 0
2102 2102
2103 2103 Return: None
2104 2104
2105 2105 Variables afectadas:
2106 2106
2107 2107 self.flagIsNewFile
2108 2108 self.flagIsNewBlock
2109 2109 self.nTotalBlocks
2110 2110 self.data_spc
2111 2111 self.data_cspc
2112 2112 self.data_dc
2113 2113
2114 2114 Exceptions:
2115 2115 Si un bloque leido no es un bloque valido
2116 2116 """
2117 2117 blockOk_flag = False
2118 2118 fpointer = self.fp.tell()
2119 2119
2120 2120 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
2121 2121 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
2122 2122
2123 2123 if self.processingHeaderObj.flag_cspc:
2124 2124 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
2125 2125 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
2126 2126
2127 2127 if self.processingHeaderObj.flag_dc:
2128 2128 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
2129 2129 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
2130 2130
2131 2131
2132 2132 if not(self.processingHeaderObj.shif_fft):
2133 2133 #desplaza a la derecha en el eje 2 determinadas posiciones
2134 2134 shift = int(self.processingHeaderObj.profilesPerBlock/2)
2135 2135 spc = numpy.roll( spc, shift , axis=2 )
2136 2136
2137 2137 if self.processingHeaderObj.flag_cspc:
2138 2138 #desplaza a la derecha en el eje 2 determinadas posiciones
2139 2139 cspc = numpy.roll( cspc, shift, axis=2 )
2140 2140
2141 2141 # self.processingHeaderObj.shif_fft = True
2142 2142
2143 2143 spc = numpy.transpose( spc, (0,2,1) )
2144 2144 self.data_spc = spc
2145 2145
2146 2146 if self.processingHeaderObj.flag_cspc:
2147 2147 cspc = numpy.transpose( cspc, (0,2,1) )
2148 2148 self.data_cspc = cspc['real'] + cspc['imag']*1j
2149 2149 else:
2150 2150 self.data_cspc = None
2151 2151
2152 2152 if self.processingHeaderObj.flag_dc:
2153 2153 self.data_dc = dc['real'] + dc['imag']*1j
2154 2154 else:
2155 2155 self.data_dc = None
2156 2156
2157 2157 self.flagIsNewFile = 0
2158 2158 self.flagIsNewBlock = 1
2159 2159
2160 2160 self.nTotalBlocks += 1
2161 2161 self.nReadBlocks += 1
2162 2162
2163 2163 return 1
2164 2164
2165 2165 def getFirstHeader(self):
2166 2166
2167 2167 self.dataOut.dtype = self.dtype
2168 2168
2169 2169 self.dataOut.nPairs = self.nRdPairs
2170 2170
2171 2171 self.dataOut.pairsList = self.rdPairList
2172 2172
2173 2173 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
2174 2174
2175 2175 self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
2176 2176
2177 2177 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
2178 2178
2179 2179 self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
2180 2180
2181 2181 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
2182 2182
2183 2183 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
2184 2184
2185 2185 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
2186 2186
2187 2187 self.dataOut.ippSeconds = self.ippSeconds
2188 2188
2189 2189 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt * self.processingHeaderObj.nIncohInt * self.dataOut.nFFTPoints
2190 2190
2191 2191 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
2192 2192
2193 2193 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
2194 2194
2195 2195 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
2196 2196
2197 2197 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
2198 2198
2199 2199 self.dataOut.flagDeflipData = True #asumo q la data no esta sin flip
2200 2200
2201 2201 if self.processingHeaderObj.code != None:
2202 2202
2203 2203 self.dataOut.nCode = self.processingHeaderObj.nCode
2204 2204
2205 2205 self.dataOut.nBaud = self.processingHeaderObj.nBaud
2206 2206
2207 2207 self.dataOut.code = self.processingHeaderObj.code
2208 2208
2209 2209 self.dataOut.flagDecodeData = True
2210 2210
2211 2211 def getData(self):
2212 2212 """
2213 2213 Copia el buffer de lectura a la clase "Spectra",
2214 2214 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
2215 2215 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
2216 2216
2217 2217 Return:
2218 2218 0 : Si no hay mas archivos disponibles
2219 2219 1 : Si hizo una buena copia del buffer
2220 2220
2221 2221 Affected:
2222 2222 self.dataOut
2223 2223
2224 2224 self.flagTimeBlock
2225 2225 self.flagIsNewBlock
2226 2226 """
2227 2227
2228 2228 if self.flagNoMoreFiles:
2229 2229 self.dataOut.flagNoData = True
2230 2230 print 'Process finished'
2231 2231 return 0
2232 2232
2233 2233 self.flagTimeBlock = 0
2234 2234 self.flagIsNewBlock = 0
2235 2235
2236 2236 if self.__hasNotDataInBuffer():
2237 2237
2238 2238 if not( self.readNextBlock() ):
2239 2239 self.dataOut.flagNoData = True
2240 2240 return 0
2241 2241
2242 2242 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
2243 2243
2244 2244 if self.data_dc == None:
2245 2245 self.dataOut.flagNoData = True
2246 2246 return 0
2247 2247
2248 2248 self.getBasicHeader()
2249 2249
2250 2250 self.getFirstHeader()
2251 2251
2252 2252 self.dataOut.data_spc = self.data_spc
2253 2253
2254 2254 self.dataOut.data_cspc = self.data_cspc
2255 2255
2256 2256 self.dataOut.data_dc = self.data_dc
2257 2257
2258 2258 self.dataOut.flagNoData = False
2259 2259
2260 2260 self.dataOut.realtime = self.online
2261 2261
2262 2262 return self.dataOut.data_spc
2263 2263
2264 2264
2265 2265 class SpectraWriter(JRODataWriter):
2266 2266
2267 2267 """
2268 2268 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
2269 2269 de los datos siempre se realiza por bloques.
2270 2270 """
2271 2271
2272 2272 ext = ".pdata"
2273 2273
2274 2274 optchar = "P"
2275 2275
2276 2276 shape_spc_Buffer = None
2277 2277
2278 2278 shape_cspc_Buffer = None
2279 2279
2280 2280 shape_dc_Buffer = None
2281 2281
2282 2282 data_spc = None
2283 2283
2284 2284 data_cspc = None
2285 2285
2286 2286 data_dc = None
2287 2287
2288 2288 # dataOut = None
2289 2289
2290 2290 def __init__(self):
2291 2291 """
2292 2292 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
2293 2293
2294 2294 Affected:
2295 2295 self.dataOut
2296 2296 self.basicHeaderObj
2297 2297 self.systemHeaderObj
2298 2298 self.radarControllerHeaderObj
2299 2299 self.processingHeaderObj
2300 2300
2301 2301 Return: None
2302 2302 """
2303 2303
2304 2304 self.isConfig = False
2305 2305
2306 2306 self.nTotalBlocks = 0
2307 2307
2308 2308 self.data_spc = None
2309 2309
2310 2310 self.data_cspc = None
2311 2311
2312 2312 self.data_dc = None
2313 2313
2314 2314 self.fp = None
2315 2315
2316 2316 self.flagIsNewFile = 1
2317 2317
2318 2318 self.nTotalBlocks = 0
2319 2319
2320 2320 self.flagIsNewBlock = 0
2321 2321
2322 2322 self.setFile = None
2323 2323
2324 2324 self.dtype = None
2325 2325
2326 2326 self.path = None
2327 2327
2328 2328 self.noMoreFiles = 0
2329 2329
2330 2330 self.filename = None
2331 2331
2332 2332 self.basicHeaderObj = BasicHeader(LOCALTIME)
2333 2333
2334 2334 self.systemHeaderObj = SystemHeader()
2335 2335
2336 2336 self.radarControllerHeaderObj = RadarControllerHeader()
2337 2337
2338 2338 self.processingHeaderObj = ProcessingHeader()
2339 2339
2340 2340
2341 2341 def hasAllDataInBuffer(self):
2342 2342 return 1
2343 2343
2344 2344
2345 2345 def setBlockDimension(self):
2346 2346 """
2347 2347 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
2348 2348
2349 2349 Affected:
2350 2350 self.shape_spc_Buffer
2351 2351 self.shape_cspc_Buffer
2352 2352 self.shape_dc_Buffer
2353 2353
2354 2354 Return: None
2355 2355 """
2356 2356 self.shape_spc_Buffer = (self.dataOut.nChannels,
2357 2357 self.processingHeaderObj.nHeights,
2358 2358 self.processingHeaderObj.profilesPerBlock)
2359 2359
2360 2360 self.shape_cspc_Buffer = (self.dataOut.nPairs,
2361 2361 self.processingHeaderObj.nHeights,
2362 2362 self.processingHeaderObj.profilesPerBlock)
2363 2363
2364 2364 self.shape_dc_Buffer = (self.dataOut.nChannels,
2365 2365 self.processingHeaderObj.nHeights)
2366 2366
2367 2367
2368 2368 def writeBlock(self):
2369 2369 """
2370 2370 Escribe el buffer en el file designado
2371 2371
2372 2372 Affected:
2373 2373 self.data_spc
2374 2374 self.data_cspc
2375 2375 self.data_dc
2376 2376 self.flagIsNewFile
2377 2377 self.flagIsNewBlock
2378 2378 self.nTotalBlocks
2379 2379 self.nWriteBlocks
2380 2380
2381 2381 Return: None
2382 2382 """
2383 2383
2384 2384 spc = numpy.transpose( self.data_spc, (0,2,1) )
2385 2385 if not( self.processingHeaderObj.shif_fft ):
2386 2386 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2387 2387 data = spc.reshape((-1))
2388 2388 data = data.astype(self.dtype[0])
2389 2389 data.tofile(self.fp)
2390 2390
2391 2391 if self.data_cspc != None:
2392 2392 data = numpy.zeros( self.shape_cspc_Buffer, self.dtype )
2393 2393 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
2394 2394 if not( self.processingHeaderObj.shif_fft ):
2395 2395 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2396 2396 data['real'] = cspc.real
2397 2397 data['imag'] = cspc.imag
2398 2398 data = data.reshape((-1))
2399 2399 data.tofile(self.fp)
2400 2400
2401 2401 if self.data_dc != None:
2402 2402 data = numpy.zeros( self.shape_dc_Buffer, self.dtype )
2403 2403 dc = self.data_dc
2404 2404 data['real'] = dc.real
2405 2405 data['imag'] = dc.imag
2406 2406 data = data.reshape((-1))
2407 2407 data.tofile(self.fp)
2408 2408
2409 2409 self.data_spc.fill(0)
2410 2410
2411 2411 if self.data_dc != None:
2412 2412 self.data_dc.fill(0)
2413 2413
2414 2414 if self.data_cspc != None:
2415 2415 self.data_cspc.fill(0)
2416 2416
2417 2417 self.flagIsNewFile = 0
2418 2418 self.flagIsNewBlock = 1
2419 2419 self.nTotalBlocks += 1
2420 2420 self.nWriteBlocks += 1
2421 2421 self.blockIndex += 1
2422 2422
2423 2423
2424 2424 def putData(self):
2425 2425 """
2426 2426 Setea un bloque de datos y luego los escribe en un file
2427 2427
2428 2428 Affected:
2429 2429 self.data_spc
2430 2430 self.data_cspc
2431 2431 self.data_dc
2432 2432
2433 2433 Return:
2434 2434 0 : Si no hay data o no hay mas files que puedan escribirse
2435 2435 1 : Si se escribio la data de un bloque en un file
2436 2436 """
2437 2437
2438 2438 if self.dataOut.flagNoData:
2439 2439 return 0
2440 2440
2441 2441 self.flagIsNewBlock = 0
2442 2442
2443 2443 if self.dataOut.flagTimeBlock:
2444 2444 self.data_spc.fill(0)
2445 2445 self.data_cspc.fill(0)
2446 2446 self.data_dc.fill(0)
2447 2447 self.setNextFile()
2448 2448
2449 2449 if self.flagIsNewFile == 0:
2450 2450 self.setBasicHeader()
2451 2451
2452 2452 self.data_spc = self.dataOut.data_spc.copy()
2453 2453 if self.dataOut.data_cspc != None:
2454 2454 self.data_cspc = self.dataOut.data_cspc.copy()
2455 2455 self.data_dc = self.dataOut.data_dc.copy()
2456 2456
2457 2457 # #self.processingHeaderObj.dataBlocksPerFile)
2458 2458 if self.hasAllDataInBuffer():
2459 2459 # self.setFirstHeader()
2460 2460 self.writeNextBlock()
2461 2461
2462 2462 return 1
2463 2463
2464 2464
2465 2465 def __getProcessFlags(self):
2466 2466
2467 2467 processFlags = 0
2468 2468
2469 2469 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2470 2470 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2471 2471 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2472 2472 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2473 2473 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2474 2474 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2475 2475
2476 2476 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2477 2477
2478 2478
2479 2479
2480 2480 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
2481 2481 PROCFLAG.DATATYPE_SHORT,
2482 2482 PROCFLAG.DATATYPE_LONG,
2483 2483 PROCFLAG.DATATYPE_INT64,
2484 2484 PROCFLAG.DATATYPE_FLOAT,
2485 2485 PROCFLAG.DATATYPE_DOUBLE]
2486 2486
2487 2487
2488 2488 for index in range(len(dtypeList)):
2489 2489 if self.dataOut.dtype == dtypeList[index]:
2490 2490 dtypeValue = datatypeValueList[index]
2491 2491 break
2492 2492
2493 2493 processFlags += dtypeValue
2494 2494
2495 2495 if self.dataOut.flagDecodeData:
2496 2496 processFlags += PROCFLAG.DECODE_DATA
2497 2497
2498 2498 if self.dataOut.flagDeflipData:
2499 2499 processFlags += PROCFLAG.DEFLIP_DATA
2500 2500
2501 2501 if self.dataOut.code != None:
2502 2502 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
2503 2503
2504 2504 if self.dataOut.nIncohInt > 1:
2505 2505 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
2506 2506
2507 2507 if self.dataOut.data_dc != None:
2508 2508 processFlags += PROCFLAG.SAVE_CHANNELS_DC
2509 2509
2510 2510 return processFlags
2511 2511
2512 2512
2513 2513 def __getBlockSize(self):
2514 2514 '''
2515 2515 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
2516 2516 '''
2517 2517
2518 2518 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2519 2519 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2520 2520 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2521 2521 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2522 2522 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2523 2523 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2524 2524
2525 2525 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2526 2526 datatypeValueList = [1,2,4,8,4,8]
2527 2527 for index in range(len(dtypeList)):
2528 2528 if self.dataOut.dtype == dtypeList[index]:
2529 2529 datatypeValue = datatypeValueList[index]
2530 2530 break
2531 2531
2532 2532
2533 2533 pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints
2534 2534
2535 2535 pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write)
2536 2536 blocksize = (pts2write_SelfSpectra*datatypeValue)
2537 2537
2538 2538 if self.dataOut.data_cspc != None:
2539 2539 pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write)
2540 2540 blocksize += (pts2write_CrossSpectra*datatypeValue*2)
2541 2541
2542 2542 if self.dataOut.data_dc != None:
2543 2543 pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights)
2544 2544 blocksize += (pts2write_DCchannels*datatypeValue*2)
2545 2545
2546 2546 blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
2547 2547
2548 2548 return blocksize
2549 2549
2550 2550 def setFirstHeader(self):
2551 2551
2552 2552 """
2553 2553 Obtiene una copia del First Header
2554 2554
2555 2555 Affected:
2556 2556 self.systemHeaderObj
2557 2557 self.radarControllerHeaderObj
2558 2558 self.dtype
2559 2559
2560 2560 Return:
2561 2561 None
2562 2562 """
2563 2563
2564 2564 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
2565 2565 self.systemHeaderObj.nChannels = self.dataOut.nChannels
2566 2566 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
2567 2567
2568 2568 self.setBasicHeader()
2569 2569
2570 2570 processingHeaderSize = 40 # bytes
2571 2571 self.processingHeaderObj.dtype = 1 # Spectra
2572 2572 self.processingHeaderObj.blockSize = self.__getBlockSize()
2573 2573 self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints
2574 2574 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
2575 2575 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
2576 2576 self.processingHeaderObj.processFlags = self.__getProcessFlags()
2577 2577 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval
2578 2578 self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt
2579 2579 self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
2580 2580 self.processingHeaderObj.shif_fft = self.dataOut.flagShiftFFT
2581 2581
2582 2582 if self.processingHeaderObj.totalSpectra > 0:
2583 2583 channelList = []
2584 2584 for channel in range(self.dataOut.nChannels):
2585 2585 channelList.append(channel)
2586 2586 channelList.append(channel)
2587 2587
2588 2588 pairsList = []
2589 2589 if self.dataOut.nPairs > 0:
2590 2590 for pair in self.dataOut.pairsList:
2591 2591 pairsList.append(pair[0])
2592 2592 pairsList.append(pair[1])
2593 2593
2594 2594 spectraComb = channelList + pairsList
2595 2595 spectraComb = numpy.array(spectraComb,dtype="u1")
2596 2596 self.processingHeaderObj.spectraComb = spectraComb
2597 2597 sizeOfSpcComb = len(spectraComb)
2598 2598 processingHeaderSize += sizeOfSpcComb
2599 2599
2600 2600 # The processing header should not have information about code
2601 2601 # if self.dataOut.code != None:
2602 2602 # self.processingHeaderObj.code = self.dataOut.code
2603 2603 # self.processingHeaderObj.nCode = self.dataOut.nCode
2604 2604 # self.processingHeaderObj.nBaud = self.dataOut.nBaud
2605 2605 # nCodeSize = 4 # bytes
2606 2606 # nBaudSize = 4 # bytes
2607 2607 # codeSize = 4 # bytes
2608 2608 # sizeOfCode = int(nCodeSize + nBaudSize + codeSize * self.dataOut.nCode * self.dataOut.nBaud)
2609 2609 # processingHeaderSize += sizeOfCode
2610 2610
2611 2611 if self.processingHeaderObj.nWindows != 0:
2612 2612 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
2613 2613 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
2614 2614 self.processingHeaderObj.nHeights = self.dataOut.nHeights
2615 2615 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
2616 2616 sizeOfFirstHeight = 4
2617 2617 sizeOfdeltaHeight = 4
2618 2618 sizeOfnHeights = 4
2619 2619 sizeOfWindows = (sizeOfFirstHeight + sizeOfdeltaHeight + sizeOfnHeights)*self.processingHeaderObj.nWindows
2620 2620 processingHeaderSize += sizeOfWindows
2621 2621
2622 2622 self.processingHeaderObj.size = processingHeaderSize
2623 2623
2624 2624 class SpectraHeisWriter(Operation):
2625 2625 # set = None
2626 2626 setFile = None
2627 2627 idblock = None
2628 2628 doypath = None
2629 2629 subfolder = None
2630 2630
2631 2631 def __init__(self):
2632 2632 self.wrObj = FITS()
2633 2633 # self.dataOut = dataOut
2634 2634 self.nTotalBlocks=0
2635 2635 # self.set = None
2636 2636 self.setFile = None
2637 2637 self.idblock = 0
2638 2638 self.wrpath = None
2639 2639 self.doypath = None
2640 2640 self.subfolder = None
2641 2641 self.isConfig = False
2642 2642
2643 2643 def isNumber(str):
2644 2644 """
2645 2645 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
2646 2646
2647 2647 Excepciones:
2648 2648 Si un determinado string no puede ser convertido a numero
2649 2649 Input:
2650 2650 str, string al cual se le analiza para determinar si convertible a un numero o no
2651 2651
2652 2652 Return:
2653 2653 True : si el string es uno numerico
2654 2654 False : no es un string numerico
2655 2655 """
2656 2656 try:
2657 2657 float( str )
2658 2658 return True
2659 2659 except:
2660 2660 return False
2661 2661
2662 2662 def setup(self, dataOut, wrpath):
2663 2663
2664 2664 if not(os.path.exists(wrpath)):
2665 2665 os.mkdir(wrpath)
2666 2666
2667 2667 self.wrpath = wrpath
2668 2668 # self.setFile = 0
2669 2669 self.dataOut = dataOut
2670 2670
2671 2671 def putData(self):
2672 2672 name= time.localtime( self.dataOut.utctime)
2673 2673 ext=".fits"
2674 2674
2675 2675 if self.doypath == None:
2676 2676 self.subfolder = 'F%4.4d%3.3d_%d' % (name.tm_year,name.tm_yday,time.mktime(datetime.datetime.now().timetuple()))
2677 2677 self.doypath = os.path.join( self.wrpath, self.subfolder )
2678 2678 os.mkdir(self.doypath)
2679 2679
2680 2680 if self.setFile == None:
2681 2681 # self.set = self.dataOut.set
2682 2682 self.setFile = 0
2683 2683 # if self.set != self.dataOut.set:
2684 2684 ## self.set = self.dataOut.set
2685 2685 # self.setFile = 0
2686 2686
2687 2687 #make the filename
2688 2688 file = 'D%4.4d%3.3d_%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext)
2689 2689
2690 2690 filename = os.path.join(self.wrpath,self.subfolder, file)
2691 2691
2692 2692 idblock = numpy.array([self.idblock],dtype="int64")
2693 2693 header=self.wrObj.cFImage(idblock=idblock,
2694 2694 year=time.gmtime(self.dataOut.utctime).tm_year,
2695 2695 month=time.gmtime(self.dataOut.utctime).tm_mon,
2696 2696 day=time.gmtime(self.dataOut.utctime).tm_mday,
2697 2697 hour=time.gmtime(self.dataOut.utctime).tm_hour,
2698 2698 minute=time.gmtime(self.dataOut.utctime).tm_min,
2699 2699 second=time.gmtime(self.dataOut.utctime).tm_sec)
2700 2700
2701 2701 c=3E8
2702 2702 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
2703 2703 freq=numpy.arange(-1*self.dataOut.nHeights/2.,self.dataOut.nHeights/2.)*(c/(2*deltaHeight*1000))
2704 2704
2705 2705 colList = []
2706 2706
2707 2707 colFreq=self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints)+'E', array=freq)
2708 2708
2709 2709 colList.append(colFreq)
2710 2710
2711 2711 nchannel=self.dataOut.nChannels
2712 2712
2713 2713 for i in range(nchannel):
2714 2714 col = self.wrObj.writeData(name="PCh"+str(i+1),
2715 2715 format=str(self.dataOut.nFFTPoints)+'E',
2716 2716 data=10*numpy.log10(self.dataOut.data_spc[i,:]))
2717 2717
2718 2718 colList.append(col)
2719 2719
2720 2720 data=self.wrObj.Ctable(colList=colList)
2721 2721
2722 2722 self.wrObj.CFile(header,data)
2723 2723
2724 2724 self.wrObj.wFile(filename)
2725 2725
2726 2726 #update the setFile
2727 2727 self.setFile += 1
2728 2728 self.idblock += 1
2729 2729
2730 2730 return 1
2731 2731
2732 2732 def run(self, dataOut, **kwargs):
2733 2733
2734 2734 if not(self.isConfig):
2735 2735
2736 2736 self.setup(dataOut, **kwargs)
2737 2737 self.isConfig = True
2738 2738
2739 2739 self.putData()
2740 2740
2741 2741
2742 2742 class FITS:
2743 2743 name=None
2744 2744 format=None
2745 2745 array =None
2746 2746 data =None
2747 2747 thdulist=None
2748 2748 prihdr=None
2749 2749 hdu=None
2750 2750
2751 2751 def __init__(self):
2752 2752
2753 2753 pass
2754 2754
2755 2755 def setColF(self,name,format,array):
2756 2756 self.name=name
2757 2757 self.format=format
2758 2758 self.array=array
2759 2759 a1=numpy.array([self.array],dtype=numpy.float32)
2760 2760 self.col1 = pyfits.Column(name=self.name, format=self.format, array=a1)
2761 2761 return self.col1
2762 2762
2763 2763 # def setColP(self,name,format,data):
2764 2764 # self.name=name
2765 2765 # self.format=format
2766 2766 # self.data=data
2767 2767 # a2=numpy.array([self.data],dtype=numpy.float32)
2768 2768 # self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2769 2769 # return self.col2
2770 2770
2771 2771
2772 2772 def writeData(self,name,format,data):
2773 2773 self.name=name
2774 2774 self.format=format
2775 2775 self.data=data
2776 2776 a2=numpy.array([self.data],dtype=numpy.float32)
2777 2777 self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2778 2778 return self.col2
2779 2779
2780 2780 def cFImage(self,idblock,year,month,day,hour,minute,second):
2781 2781 self.hdu= pyfits.PrimaryHDU(idblock)
2782 2782 self.hdu.header.set("Year",year)
2783 2783 self.hdu.header.set("Month",month)
2784 2784 self.hdu.header.set("Day",day)
2785 2785 self.hdu.header.set("Hour",hour)
2786 2786 self.hdu.header.set("Minute",minute)
2787 2787 self.hdu.header.set("Second",second)
2788 2788 return self.hdu
2789 2789
2790 2790
2791 2791 def Ctable(self,colList):
2792 2792 self.cols=pyfits.ColDefs(colList)
2793 2793 self.tbhdu = pyfits.new_table(self.cols)
2794 2794 return self.tbhdu
2795 2795
2796 2796
2797 2797 def CFile(self,hdu,tbhdu):
2798 2798 self.thdulist=pyfits.HDUList([hdu,tbhdu])
2799 2799
2800 2800 def wFile(self,filename):
2801 2801 if os.path.isfile(filename):
2802 2802 os.remove(filename)
2803 2803 self.thdulist.writeto(filename)
2804 2804
2805 2805
2806 2806 class ParameterConf:
2807 2807 ELEMENTNAME = 'Parameter'
2808 2808 def __init__(self):
2809 2809 self.name = ''
2810 2810 self.value = ''
2811 2811
2812 2812 def readXml(self, parmElement):
2813 2813 self.name = parmElement.get('name')
2814 2814 self.value = parmElement.get('value')
2815 2815
2816 2816 def getElementName(self):
2817 2817 return self.ELEMENTNAME
2818 2818
2819 2819 class Metadata:
2820 2820
2821 2821 def __init__(self, filename):
2822 2822 self.parmConfObjList = []
2823 2823 self.readXml(filename)
2824 2824
2825 2825 def readXml(self, filename):
2826 2826 self.projectElement = None
2827 2827 self.procUnitConfObjDict = {}
2828 2828 self.projectElement = ElementTree().parse(filename)
2829 2829 self.project = self.projectElement.tag
2830 2830
2831 2831 parmElementList = self.projectElement.getiterator(ParameterConf().getElementName())
2832 2832
2833 2833 for parmElement in parmElementList:
2834 2834 parmConfObj = ParameterConf()
2835 2835 parmConfObj.readXml(parmElement)
2836 2836 self.parmConfObjList.append(parmConfObj)
2837 2837
2838 2838 class FitsWriter(Operation):
2839 2839
2840 2840 def __init__(self):
2841 2841 self.isConfig = False
2842 2842 self.dataBlocksPerFile = None
2843 2843 self.blockIndex = 0
2844 2844 self.flagIsNewFile = 1
2845 2845 self.fitsObj = None
2846 2846 self.optchar = 'P'
2847 2847 self.ext = '.fits'
2848 2848 self.setFile = 0
2849 2849
2850 2850 def setFitsHeader(self, dataOut, metadatafile):
2851 2851
2852 2852 header_data = pyfits.PrimaryHDU()
2853 2853
2854 2854 metadata4fits = Metadata(metadatafile)
2855 2855 for parameter in metadata4fits.parmConfObjList:
2856 2856 parm_name = parameter.name
2857 2857 parm_value = parameter.value
2858 2858
2859 2859 if parm_value == 'fromdatadatetime':
2860 2860 value = time.strftime("%b %d %Y %H:%M:%S", dataOut.datatime.timetuple())
2861 2861 elif parm_value == 'fromdataheights':
2862 2862 value = dataOut.nHeights
2863 2863 elif parm_value == 'fromdatachannel':
2864 2864 value = dataOut.nChannels
2865 2865 elif parm_value == 'fromdatasamples':
2866 2866 value = dataOut.nFFTPoints
2867 2867 else:
2868 2868 value = parm_value
2869 2869
2870 2870 header_data.header[parm_name] = value
2871 2871
2872 2872 header_data.header['NBLOCK'] = self.blockIndex
2873 2873
2874 2874 header_data.writeto(self.filename)
2875 2875
2876 2876
2877 2877 def setup(self, dataOut, path, dataBlocksPerFile, metadatafile):
2878 2878
2879 2879 self.path = path
2880 2880 self.dataOut = dataOut
2881 2881 self.metadatafile = metadatafile
2882 2882 self.dataBlocksPerFile = dataBlocksPerFile
2883 2883
2884 2884 def open(self):
2885 2885 self.fitsObj = pyfits.open(self.filename, mode='update')
2886 2886
2887 2887
2888 2888 def addData(self, data):
2889 2889 self.open()
2890 2890 extension = pyfits.ImageHDU(data=data, name=self.fitsObj[0].header['DATA'])
2891 2891 extension.header['UTCTIME'] = self.dataOut.utctime
2892 2892 self.fitsObj.append(extension)
2893 2893 self.blockIndex += 1
2894 2894 self.fitsObj[0].header['NBLOCK'] = self.blockIndex
2895 2895
2896 2896 self.write()
2897 2897
2898 2898 def write(self):
2899 2899
2900 2900 self.fitsObj.flush(verbose=True)
2901 2901 self.fitsObj.close()
2902 2902
2903 2903
2904 2904 def setNextFile(self):
2905 2905
2906 2906 ext = self.ext
2907 2907 path = self.path
2908 2908
2909 2909 timeTuple = time.localtime( self.dataOut.utctime)
2910 2910 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
2911 2911
2912 2912 fullpath = os.path.join( path, subfolder )
2913 2913 if not( os.path.exists(fullpath) ):
2914 2914 os.mkdir(fullpath)
2915 2915 self.setFile = -1 #inicializo mi contador de seteo
2916 2916 else:
2917 2917 filesList = os.listdir( fullpath )
2918 2918 if len( filesList ) > 0:
2919 2919 filesList = sorted( filesList, key=str.lower )
2920 2920 filen = filesList[-1]
2921 2921
2922 2922 if isNumber( filen[8:11] ):
2923 2923 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
2924 2924 else:
2925 2925 self.setFile = -1
2926 2926 else:
2927 2927 self.setFile = -1 #inicializo mi contador de seteo
2928 2928
2929 2929 setFile = self.setFile
2930 2930 setFile += 1
2931 2931
2932 2932 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
2933 2933 timeTuple.tm_year,
2934 2934 timeTuple.tm_yday,
2935 2935 setFile,
2936 2936 ext )
2937 2937
2938 2938 filename = os.path.join( path, subfolder, file )
2939 2939
2940 2940 self.blockIndex = 0
2941 2941 self.filename = filename
2942 2942 self.setFile = setFile
2943 2943 self.flagIsNewFile = 1
2944 2944
2945 2945 print 'Writing the file: %s'%self.filename
2946 2946
2947 2947 self.setFitsHeader(self.dataOut, self.metadatafile)
2948 2948
2949 2949 return 1
2950 2950
2951 2951 def writeBlock(self):
2952 2952 self.addData(self.dataOut.data_spc)
2953 2953 self.flagIsNewFile = 0
2954 2954
2955 2955
2956 2956 def __setNewBlock(self):
2957 2957
2958 2958 if self.flagIsNewFile:
2959 2959 return 1
2960 2960
2961 2961 if self.blockIndex < self.dataBlocksPerFile:
2962 2962 return 1
2963 2963
2964 2964 if not( self.setNextFile() ):
2965 2965 return 0
2966 2966
2967 2967 return 1
2968 2968
2969 2969 def writeNextBlock(self):
2970 2970 if not( self.__setNewBlock() ):
2971 2971 return 0
2972 2972 self.writeBlock()
2973 2973 return 1
2974 2974
2975 2975 def putData(self):
2976 2976 if self.flagIsNewFile:
2977 2977 self.setNextFile()
2978 2978 self.writeNextBlock()
2979 2979
2980 2980 def run(self, dataOut, **kwargs):
2981 2981 if not(self.isConfig):
2982 2982 self.setup(dataOut, **kwargs)
2983 2983 self.isConfig = True
2984 2984 self.putData()
2985 2985
2986 2986
2987 2987 class FitsReader(ProcessingUnit):
2988 2988
2989 2989 __TIMEZONE = time.timezone
2990 2990
2991 2991 expName = None
2992 2992 datetimestr = None
2993 2993 utc = None
2994 2994 nChannels = None
2995 2995 nSamples = None
2996 2996 dataBlocksPerFile = None
2997 2997 comments = None
2998 2998 lastUTTime = None
2999 2999 header_dict = None
3000 3000 data = None
3001 3001 data_header_dict = None
3002 3002
3003 3003 def __init__(self):
3004 3004 self.isConfig = False
3005 3005 self.ext = '.fits'
3006 3006 self.setFile = 0
3007 3007 self.flagNoMoreFiles = 0
3008 3008 self.flagIsNewFile = 1
3009 3009 self.flagTimeBlock = None
3010 3010 self.fileIndex = None
3011 3011 self.filename = None
3012 3012 self.fileSize = None
3013 3013 self.fitsObj = None
3014 3014 self.nReadBlocks = 0
3015 3015 self.nTotalBlocks = 0
3016 3016 self.dataOut = self.createObjByDefault()
3017 3017 self.maxTimeStep = 10# deberia ser definido por el usuario usando el metodo setup()
3018 3018 self.blockIndex = 1
3019 3019
3020 3020 def createObjByDefault(self):
3021 3021
3022 3022 dataObj = Fits()
3023 3023
3024 3024 return dataObj
3025 3025
3026 3026 def isFileinThisTime(self, filename, startTime, endTime, useLocalTime=False):
3027 3027 try:
3028 3028 fitsObj = pyfits.open(filename,'readonly')
3029 3029 except:
3030 3030 raise IOError, "The file %s can't be opened" %(filename)
3031 3031
3032 3032 header = fitsObj[0].header
3033 3033 struct_time = time.strptime(header['DATETIME'], "%b %d %Y %H:%M:%S")
3034 3034 utc = time.mktime(struct_time) - time.timezone #TIMEZONE debe ser un parametro del header FITS
3035 3035
3036 3036 ltc = utc
3037 3037 if useLocalTime:
3038 3038 ltc -= time.timezone
3039 3039 thisDatetime = datetime.datetime.utcfromtimestamp(ltc)
3040 3040 thisTime = thisDatetime.time()
3041 3041
3042 3042 if not ((startTime <= thisTime) and (endTime > thisTime)):
3043 3043 return None
3044 3044
3045 3045 return thisDatetime
3046 3046
3047 3047 def __setNextFileOnline(self):
3048 3048 raise ValueError, "No implemented"
3049 3049
3050 3050 def __setNextFileOffline(self):
3051 3051 idFile = self.fileIndex
3052 3052
3053 3053 while (True):
3054 3054 idFile += 1
3055 3055 if not(idFile < len(self.filenameList)):
3056 3056 self.flagNoMoreFiles = 1
3057 3057 print "No more Files"
3058 3058 return 0
3059 3059
3060 3060 filename = self.filenameList[idFile]
3061 3061
3062 3062 # if not(self.__verifyFile(filename)):
3063 3063 # continue
3064 3064
3065 3065 fileSize = os.path.getsize(filename)
3066 3066 fitsObj = pyfits.open(filename,'readonly')
3067 3067 break
3068 3068
3069 3069 self.flagIsNewFile = 1
3070 3070 self.fileIndex = idFile
3071 3071 self.filename = filename
3072 3072 self.fileSize = fileSize
3073 3073 self.fitsObj = fitsObj
3074 3074
3075 3075 print "Setting the file: %s"%self.filename
3076 3076
3077 3077 return 1
3078 3078
3079 3079 def readHeader(self):
3080 3080 headerObj = self.fitsObj[0]
3081 3081
3082 3082 self.header_dict = headerObj.header
3083 3083 self.expName = headerObj.header['EXPNAME']
3084 3084 self.datetimestr = headerObj.header['DATETIME']
3085 3085 struct_time = time.strptime(headerObj.header['DATETIME'], "%b %d %Y %H:%M:%S")
3086 3086 # self.utc = time.mktime(struct_time) - self.__TIMEZONE
3087 3087 self.nChannels = headerObj.header['NCHANNEL']
3088 3088 self.nSamples = headerObj.header['NSAMPLE']
3089 3089 self.dataBlocksPerFile = headerObj.header['NBLOCK']
3090 3090 self.comments = headerObj.header['COMMENT']
3091 3091
3092 3092
3093 3093 def setNextFile(self):
3094 3094
3095 3095 if self.online:
3096 3096 newFile = self.__setNextFileOnline()
3097 3097 else:
3098 3098 newFile = self.__setNextFileOffline()
3099 3099
3100 3100 if not(newFile):
3101 3101 return 0
3102 3102
3103 3103 self.readHeader()
3104 3104
3105 3105 self.nReadBlocks = 0
3106 3106 self.blockIndex = 1
3107 3107 return 1
3108 3108
3109 3109 def __searchFilesOffLine(self,
3110 3110 path,
3111 3111 startDate,
3112 3112 endDate,
3113 3113 startTime=datetime.time(0,0,0),
3114 3114 endTime=datetime.time(23,59,59),
3115 3115 set=None,
3116 3116 expLabel='',
3117 3117 ext='.fits',
3118 3118 walk=True):
3119 3119
3120 3120 pathList = []
3121 3121
3122 3122 if not walk:
3123 3123 pathList.append(path)
3124 3124
3125 3125 else:
3126 3126 dirList = []
3127 3127 for thisPath in os.listdir(path):
3128 3128 if not os.path.isdir(os.path.join(path,thisPath)):
3129 3129 continue
3130 3130 if not isDoyFolder(thisPath):
3131 3131 continue
3132 3132
3133 3133 dirList.append(thisPath)
3134 3134
3135 3135 if not(dirList):
3136 3136 return None, None
3137 3137
3138 3138 thisDate = startDate
3139 3139
3140 3140 while(thisDate <= endDate):
3141 3141 year = thisDate.timetuple().tm_year
3142 3142 doy = thisDate.timetuple().tm_yday
3143 3143
3144 3144 matchlist = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy) + '*')
3145 3145 if len(matchlist) == 0:
3146 3146 thisDate += datetime.timedelta(1)
3147 3147 continue
3148 3148 for match in matchlist:
3149 3149 pathList.append(os.path.join(path,match,expLabel))
3150 3150
3151 3151 thisDate += datetime.timedelta(1)
3152 3152
3153 3153 if pathList == []:
3154 3154 print "Any folder was found for the date range: %s-%s" %(startDate, endDate)
3155 3155 return None, None
3156 3156
3157 3157 print "%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate)
3158 3158
3159 3159 filenameList = []
3160 3160 datetimeList = []
3161 3161
3162 3162 for i in range(len(pathList)):
3163 3163
3164 3164 thisPath = pathList[i]
3165 3165
3166 3166 fileList = glob.glob1(thisPath, "*%s" %ext)
3167 3167 fileList.sort()
3168 3168
3169 3169 for file in fileList:
3170 3170
3171 3171 filename = os.path.join(thisPath,file)
3172 3172 thisDatetime = self.isFileinThisTime(filename, startTime, endTime, useLocalTime=True)
3173 3173
3174 3174 if not(thisDatetime):
3175 3175 continue
3176 3176
3177 3177 filenameList.append(filename)
3178 3178 datetimeList.append(thisDatetime)
3179 3179
3180 3180 if not(filenameList):
3181 3181 print "Any file was found for the time range %s - %s" %(startTime, endTime)
3182 3182 return None, None
3183 3183
3184 3184 print "%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime)
3185 3185 print
3186 3186
3187 3187 for i in range(len(filenameList)):
3188 3188 print "%s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
3189 3189
3190 3190 self.filenameList = filenameList
3191 3191 self.datetimeList = datetimeList
3192 3192
3193 3193 return pathList, filenameList
3194 3194
3195 3195 def setup(self, path=None,
3196 3196 startDate=None,
3197 3197 endDate=None,
3198 3198 startTime=datetime.time(0,0,0),
3199 3199 endTime=datetime.time(23,59,59),
3200 3200 set=0,
3201 3201 expLabel = "",
3202 3202 ext = None,
3203 3203 online = False,
3204 3204 delay = 60,
3205 3205 walk = True):
3206 3206
3207 3207 if path == None:
3208 3208 raise ValueError, "The path is not valid"
3209 3209
3210 3210 if ext == None:
3211 3211 ext = self.ext
3212 3212
3213 3213 if not(online):
3214 3214 print "Searching files in offline mode ..."
3215 3215 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
3216 3216 startTime=startTime, endTime=endTime,
3217 3217 set=set, expLabel=expLabel, ext=ext,
3218 3218 walk=walk)
3219 3219
3220 3220 if not(pathList):
3221 3221 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
3222 3222 datetime.datetime.combine(startDate,startTime).ctime(),
3223 3223 datetime.datetime.combine(endDate,endTime).ctime())
3224 3224
3225 3225 sys.exit(-1)
3226 3226
3227 3227 self.fileIndex = -1
3228 3228 self.pathList = pathList
3229 3229 self.filenameList = filenameList
3230 3230
3231 3231 self.online = online
3232 3232 self.delay = delay
3233 3233 ext = ext.lower()
3234 3234 self.ext = ext
3235 3235
3236 3236 if not(self.setNextFile()):
3237 3237 if (startDate!=None) and (endDate!=None):
3238 3238 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
3239 3239 elif startDate != None:
3240 3240 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
3241 3241 else:
3242 3242 print "No files"
3243 3243
3244 3244 sys.exit(-1)
3245 3245
3246 3246
3247 3247
3248 3248 def readBlock(self):
3249 3249 dataObj = self.fitsObj[self.blockIndex]
3250 3250
3251 3251 self.data = dataObj.data
3252 3252 self.data_header_dict = dataObj.header
3253 3253 self.utc = self.data_header_dict['UTCTIME']
3254 3254
3255 3255 self.flagIsNewFile = 0
3256 3256 self.blockIndex += 1
3257 3257 self.nTotalBlocks += 1
3258 3258 self.nReadBlocks += 1
3259 3259
3260 3260 return 1
3261 3261
3262 3262 def __jumpToLastBlock(self):
3263 3263 raise ValueError, "No implemented"
3264 3264
3265 3265 def __waitNewBlock(self):
3266 3266 """
3267 3267 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
3268 3268
3269 3269 Si el modo de lectura es OffLine siempre retorn 0
3270 3270 """
3271 3271 if not self.online:
3272 3272 return 0
3273 3273
3274 3274 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
3275 3275 return 0
3276 3276
3277 3277 currentPointer = self.fp.tell()
3278 3278
3279 3279 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
3280 3280
3281 3281 for nTries in range( self.nTries ):
3282 3282
3283 3283 self.fp.close()
3284 3284 self.fp = open( self.filename, 'rb' )
3285 3285 self.fp.seek( currentPointer )
3286 3286
3287 3287 self.fileSize = os.path.getsize( self.filename )
3288 3288 currentSize = self.fileSize - currentPointer
3289 3289
3290 3290 if ( currentSize >= neededSize ):
3291 3291 self.__rdBasicHeader()
3292 3292 return 1
3293 3293
3294 3294 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
3295 3295 time.sleep( self.delay )
3296 3296
3297 3297
3298 3298 return 0
3299 3299
3300 3300 def __setNewBlock(self):
3301 3301
3302 3302 if self.online:
3303 3303 self.__jumpToLastBlock()
3304 3304
3305 3305 if self.flagIsNewFile:
3306 3306 return 1
3307 3307
3308 3308 self.lastUTTime = self.utc
3309 3309
3310 3310 if self.online:
3311 3311 if self.__waitNewBlock():
3312 3312 return 1
3313 3313
3314 3314 if self.nReadBlocks < self.dataBlocksPerFile:
3315 3315 return 1
3316 3316
3317 3317 if not(self.setNextFile()):
3318 3318 return 0
3319 3319
3320 3320 deltaTime = self.utc - self.lastUTTime
3321 3321
3322 3322 self.flagTimeBlock = 0
3323 3323
3324 3324 if deltaTime > self.maxTimeStep:
3325 3325 self.flagTimeBlock = 1
3326 3326
3327 3327 return 1
3328 3328
3329 3329
3330 3330 def readNextBlock(self):
3331 3331 if not(self.__setNewBlock()):
3332 3332 return 0
3333 3333
3334 3334 if not(self.readBlock()):
3335 3335 return 0
3336 3336
3337 3337 return 1
3338 3338
3339 3339
3340 3340 def getData(self):
3341 3341
3342 3342 if self.flagNoMoreFiles:
3343 3343 self.dataOut.flagNoData = True
3344 3344 print 'Process finished'
3345 3345 return 0
3346 3346
3347 3347 self.flagTimeBlock = 0
3348 3348 self.flagIsNewBlock = 0
3349 3349
3350 3350 if not(self.readNextBlock()):
3351 3351 return 0
3352 3352
3353 3353 if self.data == None:
3354 3354 self.dataOut.flagNoData = True
3355 3355 return 0
3356 3356
3357 3357 self.dataOut.data = self.data
3358 3358 self.dataOut.data_header = self.data_header_dict
3359 3359 self.dataOut.utctime = self.utc
3360 3360
3361 3361 self.dataOut.header = self.header_dict
3362 3362 self.dataOut.expName = self.expName
3363 3363 self.dataOut.nChannels = self.nChannels
3364 3364 self.dataOut.nSamples = self.nSamples
3365 3365 self.dataOut.dataBlocksPerFile = self.dataBlocksPerFile
3366 3366 self.dataOut.comments = self.comments
3367 3367
3368 3368 self.dataOut.flagNoData = False
3369 3369
3370 3370 return self.dataOut.data
3371 3371
3372 3372 def run(self, **kwargs):
3373 3373
3374 3374 if not(self.isConfig):
3375 3375 self.setup(**kwargs)
3376 3376 self.isConfig = True
3377 3377
3378 3378 self.getData() No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now