##// END OF EJS Templates
en el metodo readblocks se encuentra un error cuando se intenta hacer el reshape del arreglo junk, este error se produce porque no hay bytes suficientes para un bloque de datos. Para esto se propuso el nuevo metodo waitDataBlock que recalcula el numero de byte necesarios para un bloque y espera un tiempo (sleep) en caso no esten disponibles, se hacen tres intentos, si no se tiene exito el programa retorna 0.
Daniel Valdez -
r434:586e60daf0fb
parent child
Show More
@@ -1,3378 +1,3405
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JRODataIO.py 169 2012-11-19 21:57:03Z murco $
5 5 '''
6 6
7 7 import os, sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import time, datetime
13 13 from xml.etree.ElementTree import Element, SubElement, ElementTree
14 14 try:
15 15 import pyfits
16 16 except:
17 17 print "pyfits module has not been imported, it should be installed to save files in fits format"
18 18
19 19 from jrodata import *
20 20 from jroheaderIO import *
21 21 from jroprocessing import *
22 22
23 23 LOCALTIME = True #-18000
24 24
25 25 def isNumber(str):
26 26 """
27 27 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
28 28
29 29 Excepciones:
30 30 Si un determinado string no puede ser convertido a numero
31 31 Input:
32 32 str, string al cual se le analiza para determinar si convertible a un numero o no
33 33
34 34 Return:
35 35 True : si el string es uno numerico
36 36 False : no es un string numerico
37 37 """
38 38 try:
39 39 float( str )
40 40 return True
41 41 except:
42 42 return False
43 43
44 44 def isThisFileinRange(filename, startUTSeconds, endUTSeconds):
45 45 """
46 46 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
47 47
48 48 Inputs:
49 49 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
50 50
51 51 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
52 52 segundos contados desde 01/01/1970.
53 53 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
54 54 segundos contados desde 01/01/1970.
55 55
56 56 Return:
57 57 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
58 58 fecha especificado, de lo contrario retorna False.
59 59
60 60 Excepciones:
61 61 Si el archivo no existe o no puede ser abierto
62 62 Si la cabecera no puede ser leida.
63 63
64 64 """
65 65 basicHeaderObj = BasicHeader(LOCALTIME)
66 66
67 67 try:
68 68 fp = open(filename,'rb')
69 69 except:
70 70 raise IOError, "The file %s can't be opened" %(filename)
71 71
72 72 sts = basicHeaderObj.read(fp)
73 73 fp.close()
74 74
75 75 if not(sts):
76 76 print "Skipping the file %s because it has not a valid header" %(filename)
77 77 return 0
78 78
79 79 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
80 80 return 0
81 81
82 82 return 1
83 83
84 84 def isFileinThisTime(filename, startTime, endTime):
85 85 """
86 86 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
87 87
88 88 Inputs:
89 89 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
90 90
91 91 startTime : tiempo inicial del rango seleccionado en formato datetime.time
92 92
93 93 endTime : tiempo final del rango seleccionado en formato datetime.time
94 94
95 95 Return:
96 96 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
97 97 fecha especificado, de lo contrario retorna False.
98 98
99 99 Excepciones:
100 100 Si el archivo no existe o no puede ser abierto
101 101 Si la cabecera no puede ser leida.
102 102
103 103 """
104 104
105 105
106 106 try:
107 107 fp = open(filename,'rb')
108 108 except:
109 109 raise IOError, "The file %s can't be opened" %(filename)
110 110
111 111 basicHeaderObj = BasicHeader(LOCALTIME)
112 112 sts = basicHeaderObj.read(fp)
113 113 fp.close()
114 114
115 115 thisDatetime = basicHeaderObj.datatime
116 116 thisTime = basicHeaderObj.datatime.time()
117 117
118 118 if not(sts):
119 119 print "Skipping the file %s because it has not a valid header" %(filename)
120 120 return None
121 121
122 122 if not ((startTime <= thisTime) and (endTime > thisTime)):
123 123 return None
124 124
125 125 return thisDatetime
126 126
127 127 def getFileFromSet(path,ext,set):
128 128 validFilelist = []
129 129 fileList = os.listdir(path)
130 130
131 131 # 0 1234 567 89A BCDE
132 132 # H YYYY DDD SSS .ext
133 133
134 134 for file in fileList:
135 135 try:
136 136 year = int(file[1:5])
137 137 doy = int(file[5:8])
138 138
139 139
140 140 except:
141 141 continue
142 142
143 143 if (os.path.splitext(file)[-1].lower() != ext.lower()):
144 144 continue
145 145
146 146 validFilelist.append(file)
147 147
148 148 myfile = fnmatch.filter(validFilelist,'*%4.4d%3.3d%3.3d*'%(year,doy,set))
149 149
150 150 if len(myfile)!= 0:
151 151 return myfile[0]
152 152 else:
153 153 filename = '*%4.4d%3.3d%3.3d%s'%(year,doy,set,ext.lower())
154 154 print 'the filename %s does not exist'%filename
155 155 print '...going to the last file: '
156 156
157 157 if validFilelist:
158 158 validFilelist = sorted( validFilelist, key=str.lower )
159 159 return validFilelist[-1]
160 160
161 161 return None
162 162
163 163
164 164 def getlastFileFromPath(path, ext):
165 165 """
166 166 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
167 167 al final de la depuracion devuelve el ultimo file de la lista que quedo.
168 168
169 169 Input:
170 170 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
171 171 ext : extension de los files contenidos en una carpeta
172 172
173 173 Return:
174 174 El ultimo file de una determinada carpeta, no se considera el path.
175 175 """
176 176 validFilelist = []
177 177 fileList = os.listdir(path)
178 178
179 179 # 0 1234 567 89A BCDE
180 180 # H YYYY DDD SSS .ext
181 181
182 182 for file in fileList:
183 183 try:
184 184 year = int(file[1:5])
185 185 doy = int(file[5:8])
186 186
187 187
188 188 except:
189 189 continue
190 190
191 191 if (os.path.splitext(file)[-1].lower() != ext.lower()):
192 192 continue
193 193
194 194 validFilelist.append(file)
195 195
196 196 if validFilelist:
197 197 validFilelist = sorted( validFilelist, key=str.lower )
198 198 return validFilelist[-1]
199 199
200 200 return None
201 201
202 202 def checkForRealPath(path, foldercounter, year, doy, set, ext):
203 203 """
204 204 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
205 205 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
206 206 el path exacto de un determinado file.
207 207
208 208 Example :
209 209 nombre correcto del file es .../.../D2009307/P2009307367.ext
210 210
211 211 Entonces la funcion prueba con las siguientes combinaciones
212 212 .../.../y2009307367.ext
213 213 .../.../Y2009307367.ext
214 214 .../.../x2009307/y2009307367.ext
215 215 .../.../x2009307/Y2009307367.ext
216 216 .../.../X2009307/y2009307367.ext
217 217 .../.../X2009307/Y2009307367.ext
218 218 siendo para este caso, la ultima combinacion de letras, identica al file buscado
219 219
220 220 Return:
221 221 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
222 222 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
223 223 para el filename
224 224 """
225 225 fullfilename = None
226 226 find_flag = False
227 227 filename = None
228 228
229 229 prefixDirList = [None,'d','D']
230 230 if ext.lower() == ".r": #voltage
231 231 prefixFileList = ['d','D']
232 232 elif ext.lower() == ".pdata": #spectra
233 233 prefixFileList = ['p','P']
234 234 else:
235 235 return None, filename
236 236
237 237 #barrido por las combinaciones posibles
238 238 for prefixDir in prefixDirList:
239 239 thispath = path
240 240 if prefixDir != None:
241 241 #formo el nombre del directorio xYYYYDDD (x=d o x=D)
242 242 if foldercounter == 0:
243 243 thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy ))
244 244 else:
245 245 thispath = os.path.join(path, "%s%04d%03d_%02d" % ( prefixDir, year, doy , foldercounter))
246 246 for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D"
247 247 filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
248 248 fullfilename = os.path.join( thispath, filename ) #formo el path completo
249 249
250 250 if os.path.exists( fullfilename ): #verifico que exista
251 251 find_flag = True
252 252 break
253 253 if find_flag:
254 254 break
255 255
256 256 if not(find_flag):
257 257 return None, filename
258 258
259 259 return fullfilename, filename
260 260
261 261 def isDoyFolder(folder):
262 262 try:
263 263 year = int(folder[1:5])
264 264 except:
265 265 return 0
266 266
267 267 try:
268 268 doy = int(folder[5:8])
269 269 except:
270 270 return 0
271 271
272 272 return 1
273 273
274 274 class JRODataIO:
275 275
276 276 c = 3E8
277 277
278 278 isConfig = False
279 279
280 280 basicHeaderObj = BasicHeader(LOCALTIME)
281 281
282 282 systemHeaderObj = SystemHeader()
283 283
284 284 radarControllerHeaderObj = RadarControllerHeader()
285 285
286 286 processingHeaderObj = ProcessingHeader()
287 287
288 288 online = 0
289 289
290 290 dtype = None
291 291
292 292 pathList = []
293 293
294 294 filenameList = []
295 295
296 296 filename = None
297 297
298 298 ext = None
299 299
300 300 flagIsNewFile = 1
301 301
302 302 flagTimeBlock = 0
303 303
304 304 flagIsNewBlock = 0
305 305
306 306 fp = None
307 307
308 308 firstHeaderSize = 0
309 309
310 310 basicHeaderSize = 24
311 311
312 312 versionFile = 1103
313 313
314 314 fileSize = None
315 315
316 316 ippSeconds = None
317 317
318 318 fileSizeByHeader = None
319 319
320 320 fileIndex = None
321 321
322 322 profileIndex = None
323 323
324 324 blockIndex = None
325 325
326 326 nTotalBlocks = None
327 327
328 328 maxTimeStep = 30
329 329
330 330 lastUTTime = None
331 331
332 332 datablock = None
333 333
334 334 dataOut = None
335 335
336 336 blocksize = None
337 337
338 338 def __init__(self):
339 339
340 340 raise ValueError, "Not implemented"
341 341
342 342 def run(self):
343 343
344 344 raise ValueError, "Not implemented"
345 345
346 346 def getOutput(self):
347 347
348 348 return self.dataOut
349 349
350 350 class JRODataReader(JRODataIO, ProcessingUnit):
351 351
352 352 nReadBlocks = 0
353 353
354 354 delay = 10 #number of seconds waiting a new file
355 355
356 356 nTries = 3 #quantity tries
357 357
358 358 nFiles = 3 #number of files for searching
359 359
360 360 path = None
361 361
362 362 foldercounter = 0
363 363
364 364 flagNoMoreFiles = 0
365 365
366 366 datetimeList = []
367 367
368 368 __isFirstTimeOnline = 1
369 369
370 370 __printInfo = True
371 371
372 372 profileIndex = None
373 373
374 374 def __init__(self):
375 375
376 376 """
377 377
378 378 """
379 379
380 380 raise ValueError, "This method has not been implemented"
381 381
382 382
383 383 def createObjByDefault(self):
384 384 """
385 385
386 386 """
387 387 raise ValueError, "This method has not been implemented"
388 388
389 389 def getBlockDimension(self):
390 390
391 391 raise ValueError, "No implemented"
392 392
393 393 def __searchFilesOffLine(self,
394 394 path,
395 395 startDate,
396 396 endDate,
397 397 startTime=datetime.time(0,0,0),
398 398 endTime=datetime.time(23,59,59),
399 399 set=None,
400 400 expLabel='',
401 401 ext='.r',
402 402 walk=True):
403 403
404 404 pathList = []
405 405
406 406 if not walk:
407 407 pathList.append(path)
408 408
409 409 else:
410 410 dirList = []
411 411 for thisPath in os.listdir(path):
412 412 if not os.path.isdir(os.path.join(path,thisPath)):
413 413 continue
414 414 if not isDoyFolder(thisPath):
415 415 continue
416 416
417 417 dirList.append(thisPath)
418 418
419 419 if not(dirList):
420 420 return None, None
421 421
422 422 thisDate = startDate
423 423
424 424 while(thisDate <= endDate):
425 425 year = thisDate.timetuple().tm_year
426 426 doy = thisDate.timetuple().tm_yday
427 427
428 428 matchlist = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy) + '*')
429 429 if len(matchlist) == 0:
430 430 thisDate += datetime.timedelta(1)
431 431 continue
432 432 for match in matchlist:
433 433 pathList.append(os.path.join(path,match,expLabel))
434 434
435 435 thisDate += datetime.timedelta(1)
436 436
437 437 if pathList == []:
438 438 print "Any folder was found for the date range: %s-%s" %(startDate, endDate)
439 439 return None, None
440 440
441 441 print "%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate)
442 442
443 443 filenameList = []
444 444 datetimeList = []
445 445
446 446 for i in range(len(pathList)):
447 447
448 448 thisPath = pathList[i]
449 449
450 450 fileList = glob.glob1(thisPath, "*%s" %ext)
451 451 fileList.sort()
452 452
453 453 for file in fileList:
454 454
455 455 filename = os.path.join(thisPath,file)
456 456 thisDatetime = isFileinThisTime(filename, startTime, endTime)
457 457
458 458 if not(thisDatetime):
459 459 continue
460 460
461 461 filenameList.append(filename)
462 462 datetimeList.append(thisDatetime)
463 463
464 464 if not(filenameList):
465 465 print "Any file was found for the time range %s - %s" %(startTime, endTime)
466 466 return None, None
467 467
468 468 print "%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime)
469 469 print
470 470
471 471 for i in range(len(filenameList)):
472 472 print "%s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
473 473
474 474 self.filenameList = filenameList
475 475 self.datetimeList = datetimeList
476 476
477 477 return pathList, filenameList
478 478
479 479 def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True, set=None):
480 480
481 481 """
482 482 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
483 483 devuelve el archivo encontrado ademas de otros datos.
484 484
485 485 Input:
486 486 path : carpeta donde estan contenidos los files que contiene data
487 487
488 488 expLabel : Nombre del subexperimento (subfolder)
489 489
490 490 ext : extension de los files
491 491
492 492 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
493 493
494 494 Return:
495 495 directory : eL directorio donde esta el file encontrado
496 496 filename : el ultimo file de una determinada carpeta
497 497 year : el anho
498 498 doy : el numero de dia del anho
499 499 set : el set del archivo
500 500
501 501
502 502 """
503 503 dirList = []
504 504
505 505 if not walk:
506 506 fullpath = path
507 507 foldercounter = 0
508 508 else:
509 509 #Filtra solo los directorios
510 510 for thisPath in os.listdir(path):
511 511 if not os.path.isdir(os.path.join(path,thisPath)):
512 512 continue
513 513 if not isDoyFolder(thisPath):
514 514 continue
515 515
516 516 dirList.append(thisPath)
517 517
518 518 if not(dirList):
519 519 return None, None, None, None, None, None
520 520
521 521 dirList = sorted( dirList, key=str.lower )
522 522
523 523 doypath = dirList[-1]
524 524 foldercounter = int(doypath.split('_')[1]) if len(doypath.split('_'))>1 else 0
525 525 fullpath = os.path.join(path, doypath, expLabel)
526 526
527 527
528 528 print "%s folder was found: " %(fullpath )
529 529
530 530 if set == None:
531 531 filename = getlastFileFromPath(fullpath, ext)
532 532 else:
533 533 filename = getFileFromSet(fullpath, ext, set)
534 534
535 535 if not(filename):
536 536 return None, None, None, None, None, None
537 537
538 538 print "%s file was found" %(filename)
539 539
540 540 if not(self.__verifyFile(os.path.join(fullpath, filename))):
541 541 return None, None, None, None, None, None
542 542
543 543 year = int( filename[1:5] )
544 544 doy = int( filename[5:8] )
545 545 set = int( filename[8:11] )
546 546
547 547 return fullpath, foldercounter, filename, year, doy, set
548 548
549 549 def __setNextFileOffline(self):
550 550
551 551 idFile = self.fileIndex
552 552
553 553 while (True):
554 554 idFile += 1
555 555 if not(idFile < len(self.filenameList)):
556 556 self.flagNoMoreFiles = 1
557 557 print "No more Files"
558 558 return 0
559 559
560 560 filename = self.filenameList[idFile]
561 561
562 562 if not(self.__verifyFile(filename)):
563 563 continue
564 564
565 565 fileSize = os.path.getsize(filename)
566 566 fp = open(filename,'rb')
567 567 break
568 568
569 569 self.flagIsNewFile = 1
570 570 self.fileIndex = idFile
571 571 self.filename = filename
572 572 self.fileSize = fileSize
573 573 self.fp = fp
574 574
575 575 print "Setting the file: %s"%self.filename
576 576
577 577 return 1
578 578
579 579 def __setNextFileOnline(self):
580 580 """
581 581 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
582 582 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
583 583 siguientes.
584 584
585 585 Affected:
586 586 self.flagIsNewFile
587 587 self.filename
588 588 self.fileSize
589 589 self.fp
590 590 self.set
591 591 self.flagNoMoreFiles
592 592
593 593 Return:
594 594 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
595 595 1 : si el file fue abierto con exito y esta listo a ser leido
596 596
597 597 Excepciones:
598 598 Si un determinado file no puede ser abierto
599 599 """
600 600 nFiles = 0
601 601 fileOk_flag = False
602 602 firstTime_flag = True
603 603
604 604 self.set += 1
605 605
606 606 if self.set > 999:
607 607 self.set = 0
608 608 self.foldercounter += 1
609 609
610 610 #busca el 1er file disponible
611 611 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
612 612 if fullfilename:
613 613 if self.__verifyFile(fullfilename, False):
614 614 fileOk_flag = True
615 615
616 616 #si no encuentra un file entonces espera y vuelve a buscar
617 617 if not(fileOk_flag):
618 618 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
619 619
620 620 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
621 621 tries = self.nTries
622 622 else:
623 623 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
624 624
625 625 for nTries in range( tries ):
626 626 if firstTime_flag:
627 627 print "\tWaiting %0.2f sec for the file \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
628 628 time.sleep( self.delay )
629 629 else:
630 630 print "\tSearching next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
631 631
632 632 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
633 633 if fullfilename:
634 634 if self.__verifyFile(fullfilename):
635 635 fileOk_flag = True
636 636 break
637 637
638 638 if fileOk_flag:
639 639 break
640 640
641 641 firstTime_flag = False
642 642
643 643 print "\tSkipping the file \"%s\" due to this file doesn't exist" % filename
644 644 self.set += 1
645 645
646 646 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
647 647 self.set = 0
648 648 self.doy += 1
649 649 self.foldercounter = 0
650 650
651 651 if fileOk_flag:
652 652 self.fileSize = os.path.getsize( fullfilename )
653 653 self.filename = fullfilename
654 654 self.flagIsNewFile = 1
655 655 if self.fp != None: self.fp.close()
656 656 self.fp = open(fullfilename, 'rb')
657 657 self.flagNoMoreFiles = 0
658 658 print 'Setting the file: %s' % fullfilename
659 659 else:
660 660 self.fileSize = 0
661 661 self.filename = None
662 662 self.flagIsNewFile = 0
663 663 self.fp = None
664 664 self.flagNoMoreFiles = 1
665 665 print 'No more Files'
666 666
667 667 return fileOk_flag
668 668
669 669
670 670 def setNextFile(self):
671 671 if self.fp != None:
672 672 self.fp.close()
673 673
674 674 if self.online:
675 675 newFile = self.__setNextFileOnline()
676 676 else:
677 677 newFile = self.__setNextFileOffline()
678 678
679 679 if not(newFile):
680 680 return 0
681 681
682 682 self.__readFirstHeader()
683 683 self.nReadBlocks = 0
684 684 return 1
685 685
686 686 def __waitNewBlock(self):
687 687 """
688 688 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
689 689
690 690 Si el modo de lectura es OffLine siempre retorn 0
691 691 """
692 692 if not self.online:
693 693 return 0
694 694
695 695 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
696 696 return 0
697 697
698 698 currentPointer = self.fp.tell()
699 699
700 700 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
701 701
702 702 for nTries in range( self.nTries ):
703 703
704 704 self.fp.close()
705 705 self.fp = open( self.filename, 'rb' )
706 706 self.fp.seek( currentPointer )
707 707
708 708 self.fileSize = os.path.getsize( self.filename )
709 709 currentSize = self.fileSize - currentPointer
710 710
711 711 if ( currentSize >= neededSize ):
712 712 self.__rdBasicHeader()
713 713 return 1
714 714
715 715 if self.fileSize == self.fileSizeByHeader:
716 716 # self.flagEoF = True
717 717 return 0
718 718
719 719 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
720 720 time.sleep( self.delay )
721 721
722 722
723 723 return 0
724 724
725 def waitDataBlock(self,pointer_location):
726
727 currentPointer = pointer_location
728
729 neededSize = self.processingHeaderObj.blockSize #+ self.basicHeaderSize
730
731 for nTries in range( self.nTries ):
732 self.fp.close()
733 self.fp = open( self.filename, 'rb' )
734 self.fp.seek( currentPointer )
735
736 self.fileSize = os.path.getsize( self.filename )
737 currentSize = self.fileSize - currentPointer
738
739 if ( currentSize >= neededSize ):
740 return 1
741
742 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
743 time.sleep( self.delay )
744
745 return 0
746
747
725 748 def __jumpToLastBlock(self):
726 749
727 750 if not(self.__isFirstTimeOnline):
728 751 return
729 752
730 753 csize = self.fileSize - self.fp.tell()
731 754 blocksize = self.processingHeaderObj.blockSize
732 755
733 756 #salta el primer bloque de datos
734 757 if csize > self.processingHeaderObj.blockSize:
735 758 self.fp.seek(self.fp.tell() + blocksize)
736 759 else:
737 760 return
738 761
739 762 csize = self.fileSize - self.fp.tell()
740 763 neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
741 764 while True:
742 765
743 766 if self.fp.tell()<self.fileSize:
744 767 self.fp.seek(self.fp.tell() + neededsize)
745 768 else:
746 769 self.fp.seek(self.fp.tell() - neededsize)
747 770 break
748 771
749 772 # csize = self.fileSize - self.fp.tell()
750 773 # neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
751 774 # factor = int(csize/neededsize)
752 775 # if factor > 0:
753 776 # self.fp.seek(self.fp.tell() + factor*neededsize)
754 777
755 778 self.flagIsNewFile = 0
756 779 self.__isFirstTimeOnline = 0
757 780
758 781
759 782 def __setNewBlock(self):
760 783
761 784 if self.fp == None:
762 785 return 0
763 786
764 787 if self.online:
765 788 self.__jumpToLastBlock()
766 789
767 790 if self.flagIsNewFile:
768 791 return 1
769 792
770 793 self.lastUTTime = self.basicHeaderObj.utc
771 794 currentSize = self.fileSize - self.fp.tell()
772 795 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
773 796
774 797 if (currentSize >= neededSize):
775 798 self.__rdBasicHeader()
776 799 return 1
777 800
778 801 if self.__waitNewBlock():
779 802 return 1
780 803
781 804 if not(self.setNextFile()):
782 805 return 0
783 806
784 807 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
785 808
786 809 self.flagTimeBlock = 0
787 810
788 811 if deltaTime > self.maxTimeStep:
789 812 self.flagTimeBlock = 1
790 813
791 814 return 1
792 815
793 816
794 817 def readNextBlock(self):
795 818 if not(self.__setNewBlock()):
796 819 return 0
797 820
798 821 if not(self.readBlock()):
799 822 return 0
800 823
801 824 return 1
802 825
803 826 def __rdProcessingHeader(self, fp=None):
804 827 if fp == None:
805 828 fp = self.fp
806 829
807 830 self.processingHeaderObj.read(fp)
808 831
809 832 def __rdRadarControllerHeader(self, fp=None):
810 833 if fp == None:
811 834 fp = self.fp
812 835
813 836 self.radarControllerHeaderObj.read(fp)
814 837
815 838 def __rdSystemHeader(self, fp=None):
816 839 if fp == None:
817 840 fp = self.fp
818 841
819 842 self.systemHeaderObj.read(fp)
820 843
821 844 def __rdBasicHeader(self, fp=None):
822 845 if fp == None:
823 846 fp = self.fp
824 847
825 848 self.basicHeaderObj.read(fp)
826 849
827 850
828 851 def __readFirstHeader(self):
829 852 self.__rdBasicHeader()
830 853 self.__rdSystemHeader()
831 854 self.__rdRadarControllerHeader()
832 855 self.__rdProcessingHeader()
833 856
834 857 self.firstHeaderSize = self.basicHeaderObj.size
835 858
836 859 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
837 860 if datatype == 0:
838 861 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
839 862 elif datatype == 1:
840 863 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
841 864 elif datatype == 2:
842 865 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
843 866 elif datatype == 3:
844 867 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
845 868 elif datatype == 4:
846 869 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
847 870 elif datatype == 5:
848 871 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
849 872 else:
850 873 raise ValueError, 'Data type was not defined'
851 874
852 875 self.dtype = datatype_str
853 876 self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
854 877 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
855 878 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
856 879 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
857 880 self.getBlockDimension()
858 881
859 882
860 883 def __verifyFile(self, filename, msgFlag=True):
861 884 msg = None
862 885 try:
863 886 fp = open(filename, 'rb')
864 887 currentPosition = fp.tell()
865 888 except:
866 889 if msgFlag:
867 890 print "The file %s can't be opened" % (filename)
868 891 return False
869 892
870 893 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
871 894
872 895 if neededSize == 0:
873 896 basicHeaderObj = BasicHeader(LOCALTIME)
874 897 systemHeaderObj = SystemHeader()
875 898 radarControllerHeaderObj = RadarControllerHeader()
876 899 processingHeaderObj = ProcessingHeader()
877 900
878 901 try:
879 902 if not( basicHeaderObj.read(fp) ): raise IOError
880 903 if not( systemHeaderObj.read(fp) ): raise IOError
881 904 if not( radarControllerHeaderObj.read(fp) ): raise IOError
882 905 if not( processingHeaderObj.read(fp) ): raise IOError
883 906 data_type = int(numpy.log2((processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
884 907
885 908 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
886 909
887 910 except:
888 911 if msgFlag:
889 912 print "\tThe file %s is empty or it hasn't enough data" % filename
890 913
891 914 fp.close()
892 915 return False
893 916 else:
894 917 msg = "\tSkipping the file %s due to it hasn't enough data" %filename
895 918
896 919 fp.close()
897 920 fileSize = os.path.getsize(filename)
898 921 currentSize = fileSize - currentPosition
899 922 if currentSize < neededSize:
900 923 if msgFlag and (msg != None):
901 924 print msg #print"\tSkipping the file %s due to it hasn't enough data" %filename
902 925 return False
903 926
904 927 return True
905 928
906 929 def setup(self,
907 930 path=None,
908 931 startDate=None,
909 932 endDate=None,
910 933 startTime=datetime.time(0,0,0),
911 934 endTime=datetime.time(23,59,59),
912 935 set=None,
913 936 expLabel = "",
914 937 ext = None,
915 938 online = False,
916 939 delay = 60,
917 940 walk = True):
918 941
919 942 if path == None:
920 943 raise ValueError, "The path is not valid"
921 944
922 945 if ext == None:
923 946 ext = self.ext
924 947
925 948 if online:
926 949 print "Searching files in online mode..."
927 950
928 951 for nTries in range( self.nTries ):
929 952 fullpath, foldercounter, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk, set=set)
930 953
931 954 if fullpath:
932 955 break
933 956
934 957 print '\tWaiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
935 958 time.sleep( self.delay )
936 959
937 960 if not(fullpath):
938 961 print "There 'isn't valied files in %s" % path
939 962 return None
940 963
941 964 self.year = year
942 965 self.doy = doy
943 966 self.set = set - 1
944 967 self.path = path
945 968 self.foldercounter = foldercounter
946 969
947 970 else:
948 971 print "Searching files in offline mode ..."
949 972 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
950 973 startTime=startTime, endTime=endTime,
951 974 set=set, expLabel=expLabel, ext=ext,
952 975 walk=walk)
953 976
954 977 if not(pathList):
955 978 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
956 979 datetime.datetime.combine(startDate,startTime).ctime(),
957 980 datetime.datetime.combine(endDate,endTime).ctime())
958 981
959 982 sys.exit(-1)
960 983
961 984
962 985 self.fileIndex = -1
963 986 self.pathList = pathList
964 987 self.filenameList = filenameList
965 988
966 989 self.online = online
967 990 self.delay = delay
968 991 ext = ext.lower()
969 992 self.ext = ext
970 993
971 994 if not(self.setNextFile()):
972 995 if (startDate!=None) and (endDate!=None):
973 996 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
974 997 elif startDate != None:
975 998 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
976 999 else:
977 1000 print "No files"
978 1001
979 1002 sys.exit(-1)
980 1003
981 1004 # self.updateDataHeader()
982 1005
983 1006 return self.dataOut
984 1007
985 1008 def getBasicHeader(self):
986 1009
987 1010 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.ippSeconds
988 1011
989 1012 self.dataOut.flagTimeBlock = self.flagTimeBlock
990 1013
991 1014 self.dataOut.timeZone = self.basicHeaderObj.timeZone
992 1015
993 1016 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
994 1017
995 1018 self.dataOut.errorCount = self.basicHeaderObj.errorCount
996 1019
997 1020 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
998 1021
999 1022 def getFirstHeader(self):
1000 1023
1001 1024 raise ValueError, "This method has not been implemented"
1002 1025
1003 1026 def getData():
1004 1027
1005 1028 raise ValueError, "This method has not been implemented"
1006 1029
1007 1030 def hasNotDataInBuffer():
1008 1031
1009 1032 raise ValueError, "This method has not been implemented"
1010 1033
1011 1034 def readBlock():
1012 1035
1013 1036 raise ValueError, "This method has not been implemented"
1014 1037
1015 1038 def isEndProcess(self):
1016 1039
1017 1040 return self.flagNoMoreFiles
1018 1041
1019 1042 def printReadBlocks(self):
1020 1043
1021 1044 print "Number of read blocks per file %04d" %self.nReadBlocks
1022 1045
1023 1046 def printTotalBlocks(self):
1024 1047
1025 1048 print "Number of read blocks %04d" %self.nTotalBlocks
1026 1049
1027 1050 def printNumberOfBlock(self):
1028 1051
1029 1052 if self.flagIsNewBlock:
1030 1053 print "Block No. %04d, Total blocks %04d -> %s" %(self.basicHeaderObj.dataBlock, self.nTotalBlocks, self.dataOut.datatime.ctime())
1031 1054
1032 1055 def printInfo(self):
1033 1056
1034 1057 if self.__printInfo == False:
1035 1058 return
1036 1059
1037 1060 self.basicHeaderObj.printInfo()
1038 1061 self.systemHeaderObj.printInfo()
1039 1062 self.radarControllerHeaderObj.printInfo()
1040 1063 self.processingHeaderObj.printInfo()
1041 1064
1042 1065 self.__printInfo = False
1043 1066
1044 1067
1045 1068 def run(self, **kwargs):
1046 1069
1047 1070 if not(self.isConfig):
1048 1071
1049 1072 # self.dataOut = dataOut
1050 1073 self.setup(**kwargs)
1051 1074 self.isConfig = True
1052 1075
1053 1076 self.getData()
1054 1077
1055 1078 class JRODataWriter(JRODataIO, Operation):
1056 1079
1057 1080 """
1058 1081 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
1059 1082 de los datos siempre se realiza por bloques.
1060 1083 """
1061 1084
1062 1085 blockIndex = 0
1063 1086
1064 1087 path = None
1065 1088
1066 1089 setFile = None
1067 1090
1068 1091 profilesPerBlock = None
1069 1092
1070 1093 blocksPerFile = None
1071 1094
1072 1095 nWriteBlocks = 0
1073 1096
1074 1097 def __init__(self, dataOut=None):
1075 1098 raise ValueError, "Not implemented"
1076 1099
1077 1100
1078 1101 def hasAllDataInBuffer(self):
1079 1102 raise ValueError, "Not implemented"
1080 1103
1081 1104
1082 1105 def setBlockDimension(self):
1083 1106 raise ValueError, "Not implemented"
1084 1107
1085 1108
1086 1109 def writeBlock(self):
1087 1110 raise ValueError, "No implemented"
1088 1111
1089 1112
1090 1113 def putData(self):
1091 1114 raise ValueError, "No implemented"
1092 1115
1093 1116
1094 1117 def setBasicHeader(self):
1095 1118
1096 1119 self.basicHeaderObj.size = self.basicHeaderSize #bytes
1097 1120 self.basicHeaderObj.version = self.versionFile
1098 1121 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1099 1122
1100 1123 utc = numpy.floor(self.dataOut.utctime)
1101 1124 milisecond = (self.dataOut.utctime - utc)* 1000.0
1102 1125
1103 1126 self.basicHeaderObj.utc = utc
1104 1127 self.basicHeaderObj.miliSecond = milisecond
1105 1128 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1106 1129 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1107 1130 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1108 1131
1109 1132 def setFirstHeader(self):
1110 1133 """
1111 1134 Obtiene una copia del First Header
1112 1135
1113 1136 Affected:
1114 1137
1115 1138 self.basicHeaderObj
1116 1139 self.systemHeaderObj
1117 1140 self.radarControllerHeaderObj
1118 1141 self.processingHeaderObj self.
1119 1142
1120 1143 Return:
1121 1144 None
1122 1145 """
1123 1146
1124 1147 raise ValueError, "No implemented"
1125 1148
1126 1149 def __writeFirstHeader(self):
1127 1150 """
1128 1151 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1129 1152
1130 1153 Affected:
1131 1154 __dataType
1132 1155
1133 1156 Return:
1134 1157 None
1135 1158 """
1136 1159
1137 1160 # CALCULAR PARAMETROS
1138 1161
1139 1162 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1140 1163 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1141 1164
1142 1165 self.basicHeaderObj.write(self.fp)
1143 1166 self.systemHeaderObj.write(self.fp)
1144 1167 self.radarControllerHeaderObj.write(self.fp)
1145 1168 self.processingHeaderObj.write(self.fp)
1146 1169
1147 1170 self.dtype = self.dataOut.dtype
1148 1171
1149 1172 def __setNewBlock(self):
1150 1173 """
1151 1174 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1152 1175
1153 1176 Return:
1154 1177 0 : si no pudo escribir nada
1155 1178 1 : Si escribio el Basic el First Header
1156 1179 """
1157 1180 if self.fp == None:
1158 1181 self.setNextFile()
1159 1182
1160 1183 if self.flagIsNewFile:
1161 1184 return 1
1162 1185
1163 1186 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1164 1187 self.basicHeaderObj.write(self.fp)
1165 1188 return 1
1166 1189
1167 1190 if not( self.setNextFile() ):
1168 1191 return 0
1169 1192
1170 1193 return 1
1171 1194
1172 1195
1173 1196 def writeNextBlock(self):
1174 1197 """
1175 1198 Selecciona el bloque siguiente de datos y los escribe en un file
1176 1199
1177 1200 Return:
1178 1201 0 : Si no hizo pudo escribir el bloque de datos
1179 1202 1 : Si no pudo escribir el bloque de datos
1180 1203 """
1181 1204 if not( self.__setNewBlock() ):
1182 1205 return 0
1183 1206
1184 1207 self.writeBlock()
1185 1208
1186 1209 return 1
1187 1210
1188 1211 def setNextFile(self):
1189 1212 """
1190 1213 Determina el siguiente file que sera escrito
1191 1214
1192 1215 Affected:
1193 1216 self.filename
1194 1217 self.subfolder
1195 1218 self.fp
1196 1219 self.setFile
1197 1220 self.flagIsNewFile
1198 1221
1199 1222 Return:
1200 1223 0 : Si el archivo no puede ser escrito
1201 1224 1 : Si el archivo esta listo para ser escrito
1202 1225 """
1203 1226 ext = self.ext
1204 1227 path = self.path
1205 1228
1206 1229 if self.fp != None:
1207 1230 self.fp.close()
1208 1231
1209 1232 timeTuple = time.localtime( self.dataOut.utctime)
1210 1233 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1211 1234
1212 1235 fullpath = os.path.join( path, subfolder )
1213 1236 if not( os.path.exists(fullpath) ):
1214 1237 os.mkdir(fullpath)
1215 1238 self.setFile = -1 #inicializo mi contador de seteo
1216 1239 else:
1217 1240 filesList = os.listdir( fullpath )
1218 1241 if len( filesList ) > 0:
1219 1242 filesList = sorted( filesList, key=str.lower )
1220 1243 filen = filesList[-1]
1221 1244 # el filename debera tener el siguiente formato
1222 1245 # 0 1234 567 89A BCDE (hex)
1223 1246 # x YYYY DDD SSS .ext
1224 1247 if isNumber( filen[8:11] ):
1225 1248 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1226 1249 else:
1227 1250 self.setFile = -1
1228 1251 else:
1229 1252 self.setFile = -1 #inicializo mi contador de seteo
1230 1253
1231 1254 setFile = self.setFile
1232 1255 setFile += 1
1233 1256
1234 1257 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
1235 1258 timeTuple.tm_year,
1236 1259 timeTuple.tm_yday,
1237 1260 setFile,
1238 1261 ext )
1239 1262
1240 1263 filename = os.path.join( path, subfolder, file )
1241 1264
1242 1265 fp = open( filename,'wb' )
1243 1266
1244 1267 self.blockIndex = 0
1245 1268
1246 1269 #guardando atributos
1247 1270 self.filename = filename
1248 1271 self.subfolder = subfolder
1249 1272 self.fp = fp
1250 1273 self.setFile = setFile
1251 1274 self.flagIsNewFile = 1
1252 1275
1253 1276 self.setFirstHeader()
1254 1277
1255 1278 print 'Writing the file: %s'%self.filename
1256 1279
1257 1280 self.__writeFirstHeader()
1258 1281
1259 1282 return 1
1260 1283
1261 1284 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=0, ext=None):
1262 1285 """
1263 1286 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1264 1287
1265 1288 Inputs:
1266 1289 path : el path destino en el cual se escribiran los files a crear
1267 1290 format : formato en el cual sera salvado un file
1268 1291 set : el setebo del file
1269 1292
1270 1293 Return:
1271 1294 0 : Si no realizo un buen seteo
1272 1295 1 : Si realizo un buen seteo
1273 1296 """
1274 1297
1275 1298 if ext == None:
1276 1299 ext = self.ext
1277 1300
1278 1301 ext = ext.lower()
1279 1302
1280 1303 self.ext = ext
1281 1304
1282 1305 self.path = path
1283 1306
1284 1307 self.setFile = set - 1
1285 1308
1286 1309 self.blocksPerFile = blocksPerFile
1287 1310
1288 1311 self.profilesPerBlock = profilesPerBlock
1289 1312
1290 1313 self.dataOut = dataOut
1291 1314
1292 1315 if not(self.setNextFile()):
1293 1316 print "There isn't a next file"
1294 1317 return 0
1295 1318
1296 1319 self.setBlockDimension()
1297 1320
1298 1321 return 1
1299 1322
1300 1323 def run(self, dataOut, **kwargs):
1301 1324
1302 1325 if not(self.isConfig):
1303 1326
1304 1327 self.setup(dataOut, **kwargs)
1305 1328 self.isConfig = True
1306 1329
1307 1330 self.putData()
1308 1331
1309 1332 class VoltageReader(JRODataReader):
1310 1333 """
1311 1334 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
1312 1335 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
1313 1336 perfiles*alturas*canales) son almacenados en la variable "buffer".
1314 1337
1315 1338 perfiles * alturas * canales
1316 1339
1317 1340 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1318 1341 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
1319 1342 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
1320 1343 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1321 1344
1322 1345 Example:
1323 1346
1324 1347 dpath = "/home/myuser/data"
1325 1348
1326 1349 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1327 1350
1328 1351 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1329 1352
1330 1353 readerObj = VoltageReader()
1331 1354
1332 1355 readerObj.setup(dpath, startTime, endTime)
1333 1356
1334 1357 while(True):
1335 1358
1336 1359 #to get one profile
1337 1360 profile = readerObj.getData()
1338 1361
1339 1362 #print the profile
1340 1363 print profile
1341 1364
1342 1365 #If you want to see all datablock
1343 1366 print readerObj.datablock
1344 1367
1345 1368 if readerObj.flagNoMoreFiles:
1346 1369 break
1347 1370
1348 1371 """
1349 1372
1350 1373 ext = ".r"
1351 1374
1352 1375 optchar = "D"
1353 1376 dataOut = None
1354 1377
1355 1378
1356 1379 def __init__(self):
1357 1380 """
1358 1381 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
1359 1382
1360 1383 Input:
1361 1384 dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para
1362 1385 almacenar un perfil de datos cada vez que se haga un requerimiento
1363 1386 (getData). El perfil sera obtenido a partir del buffer de datos,
1364 1387 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1365 1388 bloque de datos.
1366 1389 Si este parametro no es pasado se creara uno internamente.
1367 1390
1368 1391 Variables afectadas:
1369 1392 self.dataOut
1370 1393
1371 1394 Return:
1372 1395 None
1373 1396 """
1374 1397
1375 1398 self.isConfig = False
1376 1399
1377 1400 self.datablock = None
1378 1401
1379 1402 self.utc = 0
1380 1403
1381 1404 self.ext = ".r"
1382 1405
1383 1406 self.optchar = "D"
1384 1407
1385 1408 self.basicHeaderObj = BasicHeader(LOCALTIME)
1386 1409
1387 1410 self.systemHeaderObj = SystemHeader()
1388 1411
1389 1412 self.radarControllerHeaderObj = RadarControllerHeader()
1390 1413
1391 1414 self.processingHeaderObj = ProcessingHeader()
1392 1415
1393 1416 self.online = 0
1394 1417
1395 1418 self.fp = None
1396 1419
1397 1420 self.idFile = None
1398 1421
1399 1422 self.dtype = None
1400 1423
1401 1424 self.fileSizeByHeader = None
1402 1425
1403 1426 self.filenameList = []
1404 1427
1405 1428 self.filename = None
1406 1429
1407 1430 self.fileSize = None
1408 1431
1409 1432 self.firstHeaderSize = 0
1410 1433
1411 1434 self.basicHeaderSize = 24
1412 1435
1413 1436 self.pathList = []
1414 1437
1415 1438 self.filenameList = []
1416 1439
1417 1440 self.lastUTTime = 0
1418 1441
1419 1442 self.maxTimeStep = 30
1420 1443
1421 1444 self.flagNoMoreFiles = 0
1422 1445
1423 1446 self.set = 0
1424 1447
1425 1448 self.path = None
1426 1449
1427 1450 self.profileIndex = 2**32-1
1428 1451
1429 1452 self.delay = 3 #seconds
1430 1453
1431 1454 self.nTries = 3 #quantity tries
1432 1455
1433 1456 self.nFiles = 3 #number of files for searching
1434 1457
1435 1458 self.nReadBlocks = 0
1436 1459
1437 1460 self.flagIsNewFile = 1
1438 1461
1439 1462 self.__isFirstTimeOnline = 1
1440 1463
1441 1464 self.ippSeconds = 0
1442 1465
1443 1466 self.flagTimeBlock = 0
1444 1467
1445 1468 self.flagIsNewBlock = 0
1446 1469
1447 1470 self.nTotalBlocks = 0
1448 1471
1449 1472 self.blocksize = 0
1450 1473
1451 1474 self.dataOut = self.createObjByDefault()
1452 1475
1453 1476 def createObjByDefault(self):
1454 1477
1455 1478 dataObj = Voltage()
1456 1479
1457 1480 return dataObj
1458 1481
1459 1482 def __hasNotDataInBuffer(self):
1460 1483 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1461 1484 return 1
1462 1485 return 0
1463 1486
1464 1487
1465 1488 def getBlockDimension(self):
1466 1489 """
1467 1490 Obtiene la cantidad de puntos a leer por cada bloque de datos
1468 1491
1469 1492 Affected:
1470 1493 self.blocksize
1471 1494
1472 1495 Return:
1473 1496 None
1474 1497 """
1475 1498 pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
1476 1499 self.blocksize = pts2read
1477 1500
1478 1501
1479 1502 def readBlock(self):
1480 1503 """
1481 1504 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
1482 1505 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1483 1506 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1484 1507 es seteado a 0
1485 1508
1486 1509 Inputs:
1487 1510 None
1488 1511
1489 1512 Return:
1490 1513 None
1491 1514
1492 1515 Affected:
1493 1516 self.profileIndex
1494 1517 self.datablock
1495 1518 self.flagIsNewFile
1496 1519 self.flagIsNewBlock
1497 1520 self.nTotalBlocks
1498 1521
1499 1522 Exceptions:
1500 1523 Si un bloque leido no es un bloque valido
1501 1524 """
1502
1525 current_pointer_location = self.fp.tell()
1503 1526 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
1504 1527
1505 1528 try:
1506 1529 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
1507 1530 except:
1508 print "The read block (%3d) has not enough data" %self.nReadBlocks
1509 return 0
1531 #print "The read block (%3d) has not enough data" %self.nReadBlocks
1532
1533 if self.waitDataBlock(pointer_location=current_pointer_location):
1534 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
1535 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
1536 # return 0
1510 1537
1511 1538 junk = numpy.transpose(junk, (2,0,1))
1512 1539 self.datablock = junk['real'] + junk['imag']*1j
1513 1540
1514 1541 self.profileIndex = 0
1515 1542
1516 1543 self.flagIsNewFile = 0
1517 1544 self.flagIsNewBlock = 1
1518 1545
1519 1546 self.nTotalBlocks += 1
1520 1547 self.nReadBlocks += 1
1521 1548
1522 1549 return 1
1523 1550
1524 1551 def getFirstHeader(self):
1525 1552
1526 1553 self.dataOut.dtype = self.dtype
1527 1554
1528 1555 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
1529 1556
1530 1557 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
1531 1558
1532 1559 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
1533 1560
1534 1561 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
1535 1562
1536 1563 self.dataOut.ippSeconds = self.ippSeconds
1537 1564
1538 1565 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt
1539 1566
1540 1567 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
1541 1568
1542 1569 self.dataOut.flagShiftFFT = False
1543 1570
1544 1571 if self.radarControllerHeaderObj.code != None:
1545 1572
1546 1573 self.dataOut.nCode = self.radarControllerHeaderObj.nCode
1547 1574
1548 1575 self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
1549 1576
1550 1577 self.dataOut.code = self.radarControllerHeaderObj.code
1551 1578
1552 1579 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
1553 1580
1554 1581 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
1555 1582
1556 1583 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
1557 1584
1558 1585 self.dataOut.flagDeflipData = False #asumo q la data no esta sin flip
1559 1586
1560 1587 self.dataOut.flagShiftFFT = False
1561 1588
1562 1589 def getData(self):
1563 1590 """
1564 1591 getData obtiene una unidad de datos del buffer de lectura y la copia a la clase "Voltage"
1565 1592 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
1566 1593 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
1567 1594
1568 1595 Ademas incrementa el contador del buffer en 1.
1569 1596
1570 1597 Return:
1571 1598 data : retorna un perfil de voltages (alturas * canales) copiados desde el
1572 1599 buffer. Si no hay mas archivos a leer retorna None.
1573 1600
1574 1601 Variables afectadas:
1575 1602 self.dataOut
1576 1603 self.profileIndex
1577 1604
1578 1605 Affected:
1579 1606 self.dataOut
1580 1607 self.profileIndex
1581 1608 self.flagTimeBlock
1582 1609 self.flagIsNewBlock
1583 1610 """
1584 1611
1585 1612 if self.flagNoMoreFiles:
1586 1613 self.dataOut.flagNoData = True
1587 1614 print 'Process finished'
1588 1615 return 0
1589 1616
1590 1617 self.flagTimeBlock = 0
1591 1618 self.flagIsNewBlock = 0
1592 1619
1593 1620 if self.__hasNotDataInBuffer():
1594 1621
1595 1622 if not( self.readNextBlock() ):
1596 1623 return 0
1597 1624
1598 1625 self.getFirstHeader()
1599 1626
1600 1627 if self.datablock == None:
1601 1628 self.dataOut.flagNoData = True
1602 1629 return 0
1603 1630
1604 1631 self.dataOut.data = self.datablock[:,self.profileIndex,:]
1605 1632
1606 1633 self.dataOut.flagNoData = False
1607 1634
1608 1635 self.getBasicHeader()
1609 1636
1610 1637 self.profileIndex += 1
1611 1638
1612 1639 self.dataOut.realtime = self.online
1613 1640
1614 1641 return self.dataOut.data
1615 1642
1616 1643
1617 1644 class VoltageWriter(JRODataWriter):
1618 1645 """
1619 1646 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
1620 1647 de los datos siempre se realiza por bloques.
1621 1648 """
1622 1649
1623 1650 ext = ".r"
1624 1651
1625 1652 optchar = "D"
1626 1653
1627 1654 shapeBuffer = None
1628 1655
1629 1656
1630 1657 def __init__(self):
1631 1658 """
1632 1659 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
1633 1660
1634 1661 Affected:
1635 1662 self.dataOut
1636 1663
1637 1664 Return: None
1638 1665 """
1639 1666
1640 1667 self.nTotalBlocks = 0
1641 1668
1642 1669 self.profileIndex = 0
1643 1670
1644 1671 self.isConfig = False
1645 1672
1646 1673 self.fp = None
1647 1674
1648 1675 self.flagIsNewFile = 1
1649 1676
1650 1677 self.nTotalBlocks = 0
1651 1678
1652 1679 self.flagIsNewBlock = 0
1653 1680
1654 1681 self.setFile = None
1655 1682
1656 1683 self.dtype = None
1657 1684
1658 1685 self.path = None
1659 1686
1660 1687 self.filename = None
1661 1688
1662 1689 self.basicHeaderObj = BasicHeader(LOCALTIME)
1663 1690
1664 1691 self.systemHeaderObj = SystemHeader()
1665 1692
1666 1693 self.radarControllerHeaderObj = RadarControllerHeader()
1667 1694
1668 1695 self.processingHeaderObj = ProcessingHeader()
1669 1696
1670 1697 def hasAllDataInBuffer(self):
1671 1698 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1672 1699 return 1
1673 1700 return 0
1674 1701
1675 1702
1676 1703 def setBlockDimension(self):
1677 1704 """
1678 1705 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
1679 1706
1680 1707 Affected:
1681 1708 self.shape_spc_Buffer
1682 1709 self.shape_cspc_Buffer
1683 1710 self.shape_dc_Buffer
1684 1711
1685 1712 Return: None
1686 1713 """
1687 1714 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
1688 1715 self.processingHeaderObj.nHeights,
1689 1716 self.systemHeaderObj.nChannels)
1690 1717
1691 1718 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
1692 1719 self.processingHeaderObj.profilesPerBlock,
1693 1720 self.processingHeaderObj.nHeights),
1694 1721 dtype=numpy.dtype('complex64'))
1695 1722
1696 1723
1697 1724 def writeBlock(self):
1698 1725 """
1699 1726 Escribe el buffer en el file designado
1700 1727
1701 1728 Affected:
1702 1729 self.profileIndex
1703 1730 self.flagIsNewFile
1704 1731 self.flagIsNewBlock
1705 1732 self.nTotalBlocks
1706 1733 self.blockIndex
1707 1734
1708 1735 Return: None
1709 1736 """
1710 1737 data = numpy.zeros( self.shapeBuffer, self.dtype )
1711 1738
1712 1739 junk = numpy.transpose(self.datablock, (1,2,0))
1713 1740
1714 1741 data['real'] = junk.real
1715 1742 data['imag'] = junk.imag
1716 1743
1717 1744 data = data.reshape( (-1) )
1718 1745
1719 1746 data.tofile( self.fp )
1720 1747
1721 1748 self.datablock.fill(0)
1722 1749
1723 1750 self.profileIndex = 0
1724 1751 self.flagIsNewFile = 0
1725 1752 self.flagIsNewBlock = 1
1726 1753
1727 1754 self.blockIndex += 1
1728 1755 self.nTotalBlocks += 1
1729 1756
1730 1757 def putData(self):
1731 1758 """
1732 1759 Setea un bloque de datos y luego los escribe en un file
1733 1760
1734 1761 Affected:
1735 1762 self.flagIsNewBlock
1736 1763 self.profileIndex
1737 1764
1738 1765 Return:
1739 1766 0 : Si no hay data o no hay mas files que puedan escribirse
1740 1767 1 : Si se escribio la data de un bloque en un file
1741 1768 """
1742 1769 if self.dataOut.flagNoData:
1743 1770 return 0
1744 1771
1745 1772 self.flagIsNewBlock = 0
1746 1773
1747 1774 if self.dataOut.flagTimeBlock:
1748 1775
1749 1776 self.datablock.fill(0)
1750 1777 self.profileIndex = 0
1751 1778 self.setNextFile()
1752 1779
1753 1780 if self.profileIndex == 0:
1754 1781 self.setBasicHeader()
1755 1782
1756 1783 self.datablock[:,self.profileIndex,:] = self.dataOut.data
1757 1784
1758 1785 self.profileIndex += 1
1759 1786
1760 1787 if self.hasAllDataInBuffer():
1761 1788 #if self.flagIsNewFile:
1762 1789 self.writeNextBlock()
1763 1790 # self.setFirstHeader()
1764 1791
1765 1792 return 1
1766 1793
1767 1794 def __getProcessFlags(self):
1768 1795
1769 1796 processFlags = 0
1770 1797
1771 1798 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1772 1799 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1773 1800 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1774 1801 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1775 1802 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1776 1803 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1777 1804
1778 1805 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1779 1806
1780 1807
1781 1808
1782 1809 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
1783 1810 PROCFLAG.DATATYPE_SHORT,
1784 1811 PROCFLAG.DATATYPE_LONG,
1785 1812 PROCFLAG.DATATYPE_INT64,
1786 1813 PROCFLAG.DATATYPE_FLOAT,
1787 1814 PROCFLAG.DATATYPE_DOUBLE]
1788 1815
1789 1816
1790 1817 for index in range(len(dtypeList)):
1791 1818 if self.dataOut.dtype == dtypeList[index]:
1792 1819 dtypeValue = datatypeValueList[index]
1793 1820 break
1794 1821
1795 1822 processFlags += dtypeValue
1796 1823
1797 1824 if self.dataOut.flagDecodeData:
1798 1825 processFlags += PROCFLAG.DECODE_DATA
1799 1826
1800 1827 if self.dataOut.flagDeflipData:
1801 1828 processFlags += PROCFLAG.DEFLIP_DATA
1802 1829
1803 1830 if self.dataOut.code != None:
1804 1831 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1805 1832
1806 1833 if self.dataOut.nCohInt > 1:
1807 1834 processFlags += PROCFLAG.COHERENT_INTEGRATION
1808 1835
1809 1836 return processFlags
1810 1837
1811 1838
1812 1839 def __getBlockSize(self):
1813 1840 '''
1814 1841 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
1815 1842 '''
1816 1843
1817 1844 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1818 1845 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1819 1846 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1820 1847 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1821 1848 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1822 1849 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1823 1850
1824 1851 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1825 1852 datatypeValueList = [1,2,4,8,4,8]
1826 1853 for index in range(len(dtypeList)):
1827 1854 if self.dataOut.dtype == dtypeList[index]:
1828 1855 datatypeValue = datatypeValueList[index]
1829 1856 break
1830 1857
1831 1858 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels * self.profilesPerBlock * datatypeValue * 2)
1832 1859
1833 1860 return blocksize
1834 1861
1835 1862 def setFirstHeader(self):
1836 1863
1837 1864 """
1838 1865 Obtiene una copia del First Header
1839 1866
1840 1867 Affected:
1841 1868 self.systemHeaderObj
1842 1869 self.radarControllerHeaderObj
1843 1870 self.dtype
1844 1871
1845 1872 Return:
1846 1873 None
1847 1874 """
1848 1875
1849 1876 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
1850 1877 self.systemHeaderObj.nChannels = self.dataOut.nChannels
1851 1878 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
1852 1879
1853 1880 self.setBasicHeader()
1854 1881
1855 1882 processingHeaderSize = 40 # bytes
1856 1883 self.processingHeaderObj.dtype = 0 # Voltage
1857 1884 self.processingHeaderObj.blockSize = self.__getBlockSize()
1858 1885 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
1859 1886 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
1860 1887 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
1861 1888 self.processingHeaderObj.processFlags = self.__getProcessFlags()
1862 1889 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
1863 1890 self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage
1864 1891 self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage
1865 1892
1866 1893 # if self.dataOut.code != None:
1867 1894 # self.processingHeaderObj.code = self.dataOut.code
1868 1895 # self.processingHeaderObj.nCode = self.dataOut.nCode
1869 1896 # self.processingHeaderObj.nBaud = self.dataOut.nBaud
1870 1897 # codesize = int(8 + 4 * self.dataOut.nCode * self.dataOut.nBaud)
1871 1898 # processingHeaderSize += codesize
1872 1899
1873 1900 if self.processingHeaderObj.nWindows != 0:
1874 1901 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
1875 1902 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
1876 1903 self.processingHeaderObj.nHeights = self.dataOut.nHeights
1877 1904 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
1878 1905 processingHeaderSize += 12
1879 1906
1880 1907 self.processingHeaderObj.size = processingHeaderSize
1881 1908
1882 1909 class SpectraReader(JRODataReader):
1883 1910 """
1884 1911 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
1885 1912 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
1886 1913 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
1887 1914
1888 1915 paresCanalesIguales * alturas * perfiles (Self Spectra)
1889 1916 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
1890 1917 canales * alturas (DC Channels)
1891 1918
1892 1919 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1893 1920 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
1894 1921 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
1895 1922 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1896 1923
1897 1924 Example:
1898 1925 dpath = "/home/myuser/data"
1899 1926
1900 1927 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1901 1928
1902 1929 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1903 1930
1904 1931 readerObj = SpectraReader()
1905 1932
1906 1933 readerObj.setup(dpath, startTime, endTime)
1907 1934
1908 1935 while(True):
1909 1936
1910 1937 readerObj.getData()
1911 1938
1912 1939 print readerObj.data_spc
1913 1940
1914 1941 print readerObj.data_cspc
1915 1942
1916 1943 print readerObj.data_dc
1917 1944
1918 1945 if readerObj.flagNoMoreFiles:
1919 1946 break
1920 1947
1921 1948 """
1922 1949
1923 1950 pts2read_SelfSpectra = 0
1924 1951
1925 1952 pts2read_CrossSpectra = 0
1926 1953
1927 1954 pts2read_DCchannels = 0
1928 1955
1929 1956 ext = ".pdata"
1930 1957
1931 1958 optchar = "P"
1932 1959
1933 1960 dataOut = None
1934 1961
1935 1962 nRdChannels = None
1936 1963
1937 1964 nRdPairs = None
1938 1965
1939 1966 rdPairList = []
1940 1967
1941 1968 def __init__(self):
1942 1969 """
1943 1970 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
1944 1971
1945 1972 Inputs:
1946 1973 dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para
1947 1974 almacenar un perfil de datos cada vez que se haga un requerimiento
1948 1975 (getData). El perfil sera obtenido a partir del buffer de datos,
1949 1976 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1950 1977 bloque de datos.
1951 1978 Si este parametro no es pasado se creara uno internamente.
1952 1979
1953 1980 Affected:
1954 1981 self.dataOut
1955 1982
1956 1983 Return : None
1957 1984 """
1958 1985
1959 1986 self.isConfig = False
1960 1987
1961 1988 self.pts2read_SelfSpectra = 0
1962 1989
1963 1990 self.pts2read_CrossSpectra = 0
1964 1991
1965 1992 self.pts2read_DCchannels = 0
1966 1993
1967 1994 self.datablock = None
1968 1995
1969 1996 self.utc = None
1970 1997
1971 1998 self.ext = ".pdata"
1972 1999
1973 2000 self.optchar = "P"
1974 2001
1975 2002 self.basicHeaderObj = BasicHeader(LOCALTIME)
1976 2003
1977 2004 self.systemHeaderObj = SystemHeader()
1978 2005
1979 2006 self.radarControllerHeaderObj = RadarControllerHeader()
1980 2007
1981 2008 self.processingHeaderObj = ProcessingHeader()
1982 2009
1983 2010 self.online = 0
1984 2011
1985 2012 self.fp = None
1986 2013
1987 2014 self.idFile = None
1988 2015
1989 2016 self.dtype = None
1990 2017
1991 2018 self.fileSizeByHeader = None
1992 2019
1993 2020 self.filenameList = []
1994 2021
1995 2022 self.filename = None
1996 2023
1997 2024 self.fileSize = None
1998 2025
1999 2026 self.firstHeaderSize = 0
2000 2027
2001 2028 self.basicHeaderSize = 24
2002 2029
2003 2030 self.pathList = []
2004 2031
2005 2032 self.lastUTTime = 0
2006 2033
2007 2034 self.maxTimeStep = 30
2008 2035
2009 2036 self.flagNoMoreFiles = 0
2010 2037
2011 2038 self.set = 0
2012 2039
2013 2040 self.path = None
2014 2041
2015 2042 self.delay = 60 #seconds
2016 2043
2017 2044 self.nTries = 3 #quantity tries
2018 2045
2019 2046 self.nFiles = 3 #number of files for searching
2020 2047
2021 2048 self.nReadBlocks = 0
2022 2049
2023 2050 self.flagIsNewFile = 1
2024 2051
2025 2052 self.__isFirstTimeOnline = 1
2026 2053
2027 2054 self.ippSeconds = 0
2028 2055
2029 2056 self.flagTimeBlock = 0
2030 2057
2031 2058 self.flagIsNewBlock = 0
2032 2059
2033 2060 self.nTotalBlocks = 0
2034 2061
2035 2062 self.blocksize = 0
2036 2063
2037 2064 self.dataOut = self.createObjByDefault()
2038 2065
2039 2066 self.profileIndex = 1 #Always
2040 2067
2041 2068
2042 2069 def createObjByDefault(self):
2043 2070
2044 2071 dataObj = Spectra()
2045 2072
2046 2073 return dataObj
2047 2074
2048 2075 def __hasNotDataInBuffer(self):
2049 2076 return 1
2050 2077
2051 2078
2052 2079 def getBlockDimension(self):
2053 2080 """
2054 2081 Obtiene la cantidad de puntos a leer por cada bloque de datos
2055 2082
2056 2083 Affected:
2057 2084 self.nRdChannels
2058 2085 self.nRdPairs
2059 2086 self.pts2read_SelfSpectra
2060 2087 self.pts2read_CrossSpectra
2061 2088 self.pts2read_DCchannels
2062 2089 self.blocksize
2063 2090 self.dataOut.nChannels
2064 2091 self.dataOut.nPairs
2065 2092
2066 2093 Return:
2067 2094 None
2068 2095 """
2069 2096 self.nRdChannels = 0
2070 2097 self.nRdPairs = 0
2071 2098 self.rdPairList = []
2072 2099
2073 2100 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
2074 2101 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
2075 2102 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
2076 2103 else:
2077 2104 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
2078 2105 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
2079 2106
2080 2107 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
2081 2108
2082 2109 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
2083 2110 self.blocksize = self.pts2read_SelfSpectra
2084 2111
2085 2112 if self.processingHeaderObj.flag_cspc:
2086 2113 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
2087 2114 self.blocksize += self.pts2read_CrossSpectra
2088 2115
2089 2116 if self.processingHeaderObj.flag_dc:
2090 2117 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
2091 2118 self.blocksize += self.pts2read_DCchannels
2092 2119
2093 2120 # self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels
2094 2121
2095 2122
2096 2123 def readBlock(self):
2097 2124 """
2098 2125 Lee el bloque de datos desde la posicion actual del puntero del archivo
2099 2126 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
2100 2127 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
2101 2128 es seteado a 0
2102 2129
2103 2130 Return: None
2104 2131
2105 2132 Variables afectadas:
2106 2133
2107 2134 self.flagIsNewFile
2108 2135 self.flagIsNewBlock
2109 2136 self.nTotalBlocks
2110 2137 self.data_spc
2111 2138 self.data_cspc
2112 2139 self.data_dc
2113 2140
2114 2141 Exceptions:
2115 2142 Si un bloque leido no es un bloque valido
2116 2143 """
2117 2144 blockOk_flag = False
2118 2145 fpointer = self.fp.tell()
2119 2146
2120 2147 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
2121 2148 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
2122 2149
2123 2150 if self.processingHeaderObj.flag_cspc:
2124 2151 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
2125 2152 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
2126 2153
2127 2154 if self.processingHeaderObj.flag_dc:
2128 2155 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
2129 2156 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
2130 2157
2131 2158
2132 2159 if not(self.processingHeaderObj.shif_fft):
2133 2160 #desplaza a la derecha en el eje 2 determinadas posiciones
2134 2161 shift = int(self.processingHeaderObj.profilesPerBlock/2)
2135 2162 spc = numpy.roll( spc, shift , axis=2 )
2136 2163
2137 2164 if self.processingHeaderObj.flag_cspc:
2138 2165 #desplaza a la derecha en el eje 2 determinadas posiciones
2139 2166 cspc = numpy.roll( cspc, shift, axis=2 )
2140 2167
2141 2168 # self.processingHeaderObj.shif_fft = True
2142 2169
2143 2170 spc = numpy.transpose( spc, (0,2,1) )
2144 2171 self.data_spc = spc
2145 2172
2146 2173 if self.processingHeaderObj.flag_cspc:
2147 2174 cspc = numpy.transpose( cspc, (0,2,1) )
2148 2175 self.data_cspc = cspc['real'] + cspc['imag']*1j
2149 2176 else:
2150 2177 self.data_cspc = None
2151 2178
2152 2179 if self.processingHeaderObj.flag_dc:
2153 2180 self.data_dc = dc['real'] + dc['imag']*1j
2154 2181 else:
2155 2182 self.data_dc = None
2156 2183
2157 2184 self.flagIsNewFile = 0
2158 2185 self.flagIsNewBlock = 1
2159 2186
2160 2187 self.nTotalBlocks += 1
2161 2188 self.nReadBlocks += 1
2162 2189
2163 2190 return 1
2164 2191
2165 2192 def getFirstHeader(self):
2166 2193
2167 2194 self.dataOut.dtype = self.dtype
2168 2195
2169 2196 self.dataOut.nPairs = self.nRdPairs
2170 2197
2171 2198 self.dataOut.pairsList = self.rdPairList
2172 2199
2173 2200 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
2174 2201
2175 2202 self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
2176 2203
2177 2204 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
2178 2205
2179 2206 self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
2180 2207
2181 2208 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
2182 2209
2183 2210 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
2184 2211
2185 2212 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
2186 2213
2187 2214 self.dataOut.ippSeconds = self.ippSeconds
2188 2215
2189 2216 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt * self.processingHeaderObj.nIncohInt * self.dataOut.nFFTPoints
2190 2217
2191 2218 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
2192 2219
2193 2220 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
2194 2221
2195 2222 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
2196 2223
2197 2224 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
2198 2225
2199 2226 self.dataOut.flagDeflipData = True #asumo q la data no esta sin flip
2200 2227
2201 2228 if self.processingHeaderObj.code != None:
2202 2229
2203 2230 self.dataOut.nCode = self.processingHeaderObj.nCode
2204 2231
2205 2232 self.dataOut.nBaud = self.processingHeaderObj.nBaud
2206 2233
2207 2234 self.dataOut.code = self.processingHeaderObj.code
2208 2235
2209 2236 self.dataOut.flagDecodeData = True
2210 2237
2211 2238 def getData(self):
2212 2239 """
2213 2240 Copia el buffer de lectura a la clase "Spectra",
2214 2241 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
2215 2242 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
2216 2243
2217 2244 Return:
2218 2245 0 : Si no hay mas archivos disponibles
2219 2246 1 : Si hizo una buena copia del buffer
2220 2247
2221 2248 Affected:
2222 2249 self.dataOut
2223 2250
2224 2251 self.flagTimeBlock
2225 2252 self.flagIsNewBlock
2226 2253 """
2227 2254
2228 2255 if self.flagNoMoreFiles:
2229 2256 self.dataOut.flagNoData = True
2230 2257 print 'Process finished'
2231 2258 return 0
2232 2259
2233 2260 self.flagTimeBlock = 0
2234 2261 self.flagIsNewBlock = 0
2235 2262
2236 2263 if self.__hasNotDataInBuffer():
2237 2264
2238 2265 if not( self.readNextBlock() ):
2239 2266 self.dataOut.flagNoData = True
2240 2267 return 0
2241 2268
2242 2269 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
2243 2270
2244 2271 if self.data_dc == None:
2245 2272 self.dataOut.flagNoData = True
2246 2273 return 0
2247 2274
2248 2275 self.getBasicHeader()
2249 2276
2250 2277 self.getFirstHeader()
2251 2278
2252 2279 self.dataOut.data_spc = self.data_spc
2253 2280
2254 2281 self.dataOut.data_cspc = self.data_cspc
2255 2282
2256 2283 self.dataOut.data_dc = self.data_dc
2257 2284
2258 2285 self.dataOut.flagNoData = False
2259 2286
2260 2287 self.dataOut.realtime = self.online
2261 2288
2262 2289 return self.dataOut.data_spc
2263 2290
2264 2291
2265 2292 class SpectraWriter(JRODataWriter):
2266 2293
2267 2294 """
2268 2295 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
2269 2296 de los datos siempre se realiza por bloques.
2270 2297 """
2271 2298
2272 2299 ext = ".pdata"
2273 2300
2274 2301 optchar = "P"
2275 2302
2276 2303 shape_spc_Buffer = None
2277 2304
2278 2305 shape_cspc_Buffer = None
2279 2306
2280 2307 shape_dc_Buffer = None
2281 2308
2282 2309 data_spc = None
2283 2310
2284 2311 data_cspc = None
2285 2312
2286 2313 data_dc = None
2287 2314
2288 2315 # dataOut = None
2289 2316
2290 2317 def __init__(self):
2291 2318 """
2292 2319 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
2293 2320
2294 2321 Affected:
2295 2322 self.dataOut
2296 2323 self.basicHeaderObj
2297 2324 self.systemHeaderObj
2298 2325 self.radarControllerHeaderObj
2299 2326 self.processingHeaderObj
2300 2327
2301 2328 Return: None
2302 2329 """
2303 2330
2304 2331 self.isConfig = False
2305 2332
2306 2333 self.nTotalBlocks = 0
2307 2334
2308 2335 self.data_spc = None
2309 2336
2310 2337 self.data_cspc = None
2311 2338
2312 2339 self.data_dc = None
2313 2340
2314 2341 self.fp = None
2315 2342
2316 2343 self.flagIsNewFile = 1
2317 2344
2318 2345 self.nTotalBlocks = 0
2319 2346
2320 2347 self.flagIsNewBlock = 0
2321 2348
2322 2349 self.setFile = None
2323 2350
2324 2351 self.dtype = None
2325 2352
2326 2353 self.path = None
2327 2354
2328 2355 self.noMoreFiles = 0
2329 2356
2330 2357 self.filename = None
2331 2358
2332 2359 self.basicHeaderObj = BasicHeader(LOCALTIME)
2333 2360
2334 2361 self.systemHeaderObj = SystemHeader()
2335 2362
2336 2363 self.radarControllerHeaderObj = RadarControllerHeader()
2337 2364
2338 2365 self.processingHeaderObj = ProcessingHeader()
2339 2366
2340 2367
2341 2368 def hasAllDataInBuffer(self):
2342 2369 return 1
2343 2370
2344 2371
2345 2372 def setBlockDimension(self):
2346 2373 """
2347 2374 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
2348 2375
2349 2376 Affected:
2350 2377 self.shape_spc_Buffer
2351 2378 self.shape_cspc_Buffer
2352 2379 self.shape_dc_Buffer
2353 2380
2354 2381 Return: None
2355 2382 """
2356 2383 self.shape_spc_Buffer = (self.dataOut.nChannels,
2357 2384 self.processingHeaderObj.nHeights,
2358 2385 self.processingHeaderObj.profilesPerBlock)
2359 2386
2360 2387 self.shape_cspc_Buffer = (self.dataOut.nPairs,
2361 2388 self.processingHeaderObj.nHeights,
2362 2389 self.processingHeaderObj.profilesPerBlock)
2363 2390
2364 2391 self.shape_dc_Buffer = (self.dataOut.nChannels,
2365 2392 self.processingHeaderObj.nHeights)
2366 2393
2367 2394
2368 2395 def writeBlock(self):
2369 2396 """
2370 2397 Escribe el buffer en el file designado
2371 2398
2372 2399 Affected:
2373 2400 self.data_spc
2374 2401 self.data_cspc
2375 2402 self.data_dc
2376 2403 self.flagIsNewFile
2377 2404 self.flagIsNewBlock
2378 2405 self.nTotalBlocks
2379 2406 self.nWriteBlocks
2380 2407
2381 2408 Return: None
2382 2409 """
2383 2410
2384 2411 spc = numpy.transpose( self.data_spc, (0,2,1) )
2385 2412 if not( self.processingHeaderObj.shif_fft ):
2386 2413 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2387 2414 data = spc.reshape((-1))
2388 2415 data = data.astype(self.dtype[0])
2389 2416 data.tofile(self.fp)
2390 2417
2391 2418 if self.data_cspc != None:
2392 2419 data = numpy.zeros( self.shape_cspc_Buffer, self.dtype )
2393 2420 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
2394 2421 if not( self.processingHeaderObj.shif_fft ):
2395 2422 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2396 2423 data['real'] = cspc.real
2397 2424 data['imag'] = cspc.imag
2398 2425 data = data.reshape((-1))
2399 2426 data.tofile(self.fp)
2400 2427
2401 2428 if self.data_dc != None:
2402 2429 data = numpy.zeros( self.shape_dc_Buffer, self.dtype )
2403 2430 dc = self.data_dc
2404 2431 data['real'] = dc.real
2405 2432 data['imag'] = dc.imag
2406 2433 data = data.reshape((-1))
2407 2434 data.tofile(self.fp)
2408 2435
2409 2436 self.data_spc.fill(0)
2410 2437
2411 2438 if self.data_dc != None:
2412 2439 self.data_dc.fill(0)
2413 2440
2414 2441 if self.data_cspc != None:
2415 2442 self.data_cspc.fill(0)
2416 2443
2417 2444 self.flagIsNewFile = 0
2418 2445 self.flagIsNewBlock = 1
2419 2446 self.nTotalBlocks += 1
2420 2447 self.nWriteBlocks += 1
2421 2448 self.blockIndex += 1
2422 2449
2423 2450
2424 2451 def putData(self):
2425 2452 """
2426 2453 Setea un bloque de datos y luego los escribe en un file
2427 2454
2428 2455 Affected:
2429 2456 self.data_spc
2430 2457 self.data_cspc
2431 2458 self.data_dc
2432 2459
2433 2460 Return:
2434 2461 0 : Si no hay data o no hay mas files que puedan escribirse
2435 2462 1 : Si se escribio la data de un bloque en un file
2436 2463 """
2437 2464
2438 2465 if self.dataOut.flagNoData:
2439 2466 return 0
2440 2467
2441 2468 self.flagIsNewBlock = 0
2442 2469
2443 2470 if self.dataOut.flagTimeBlock:
2444 2471 self.data_spc.fill(0)
2445 2472 self.data_cspc.fill(0)
2446 2473 self.data_dc.fill(0)
2447 2474 self.setNextFile()
2448 2475
2449 2476 if self.flagIsNewFile == 0:
2450 2477 self.setBasicHeader()
2451 2478
2452 2479 self.data_spc = self.dataOut.data_spc.copy()
2453 2480 if self.dataOut.data_cspc != None:
2454 2481 self.data_cspc = self.dataOut.data_cspc.copy()
2455 2482 self.data_dc = self.dataOut.data_dc.copy()
2456 2483
2457 2484 # #self.processingHeaderObj.dataBlocksPerFile)
2458 2485 if self.hasAllDataInBuffer():
2459 2486 # self.setFirstHeader()
2460 2487 self.writeNextBlock()
2461 2488
2462 2489 return 1
2463 2490
2464 2491
2465 2492 def __getProcessFlags(self):
2466 2493
2467 2494 processFlags = 0
2468 2495
2469 2496 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2470 2497 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2471 2498 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2472 2499 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2473 2500 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2474 2501 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2475 2502
2476 2503 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2477 2504
2478 2505
2479 2506
2480 2507 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
2481 2508 PROCFLAG.DATATYPE_SHORT,
2482 2509 PROCFLAG.DATATYPE_LONG,
2483 2510 PROCFLAG.DATATYPE_INT64,
2484 2511 PROCFLAG.DATATYPE_FLOAT,
2485 2512 PROCFLAG.DATATYPE_DOUBLE]
2486 2513
2487 2514
2488 2515 for index in range(len(dtypeList)):
2489 2516 if self.dataOut.dtype == dtypeList[index]:
2490 2517 dtypeValue = datatypeValueList[index]
2491 2518 break
2492 2519
2493 2520 processFlags += dtypeValue
2494 2521
2495 2522 if self.dataOut.flagDecodeData:
2496 2523 processFlags += PROCFLAG.DECODE_DATA
2497 2524
2498 2525 if self.dataOut.flagDeflipData:
2499 2526 processFlags += PROCFLAG.DEFLIP_DATA
2500 2527
2501 2528 if self.dataOut.code != None:
2502 2529 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
2503 2530
2504 2531 if self.dataOut.nIncohInt > 1:
2505 2532 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
2506 2533
2507 2534 if self.dataOut.data_dc != None:
2508 2535 processFlags += PROCFLAG.SAVE_CHANNELS_DC
2509 2536
2510 2537 return processFlags
2511 2538
2512 2539
2513 2540 def __getBlockSize(self):
2514 2541 '''
2515 2542 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
2516 2543 '''
2517 2544
2518 2545 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2519 2546 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2520 2547 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2521 2548 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2522 2549 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2523 2550 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2524 2551
2525 2552 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2526 2553 datatypeValueList = [1,2,4,8,4,8]
2527 2554 for index in range(len(dtypeList)):
2528 2555 if self.dataOut.dtype == dtypeList[index]:
2529 2556 datatypeValue = datatypeValueList[index]
2530 2557 break
2531 2558
2532 2559
2533 2560 pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints
2534 2561
2535 2562 pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write)
2536 2563 blocksize = (pts2write_SelfSpectra*datatypeValue)
2537 2564
2538 2565 if self.dataOut.data_cspc != None:
2539 2566 pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write)
2540 2567 blocksize += (pts2write_CrossSpectra*datatypeValue*2)
2541 2568
2542 2569 if self.dataOut.data_dc != None:
2543 2570 pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights)
2544 2571 blocksize += (pts2write_DCchannels*datatypeValue*2)
2545 2572
2546 2573 blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
2547 2574
2548 2575 return blocksize
2549 2576
2550 2577 def setFirstHeader(self):
2551 2578
2552 2579 """
2553 2580 Obtiene una copia del First Header
2554 2581
2555 2582 Affected:
2556 2583 self.systemHeaderObj
2557 2584 self.radarControllerHeaderObj
2558 2585 self.dtype
2559 2586
2560 2587 Return:
2561 2588 None
2562 2589 """
2563 2590
2564 2591 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
2565 2592 self.systemHeaderObj.nChannels = self.dataOut.nChannels
2566 2593 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
2567 2594
2568 2595 self.setBasicHeader()
2569 2596
2570 2597 processingHeaderSize = 40 # bytes
2571 2598 self.processingHeaderObj.dtype = 1 # Spectra
2572 2599 self.processingHeaderObj.blockSize = self.__getBlockSize()
2573 2600 self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints
2574 2601 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
2575 2602 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
2576 2603 self.processingHeaderObj.processFlags = self.__getProcessFlags()
2577 2604 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval
2578 2605 self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt
2579 2606 self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
2580 2607 self.processingHeaderObj.shif_fft = self.dataOut.flagShiftFFT
2581 2608
2582 2609 if self.processingHeaderObj.totalSpectra > 0:
2583 2610 channelList = []
2584 2611 for channel in range(self.dataOut.nChannels):
2585 2612 channelList.append(channel)
2586 2613 channelList.append(channel)
2587 2614
2588 2615 pairsList = []
2589 2616 if self.dataOut.nPairs > 0:
2590 2617 for pair in self.dataOut.pairsList:
2591 2618 pairsList.append(pair[0])
2592 2619 pairsList.append(pair[1])
2593 2620
2594 2621 spectraComb = channelList + pairsList
2595 2622 spectraComb = numpy.array(spectraComb,dtype="u1")
2596 2623 self.processingHeaderObj.spectraComb = spectraComb
2597 2624 sizeOfSpcComb = len(spectraComb)
2598 2625 processingHeaderSize += sizeOfSpcComb
2599 2626
2600 2627 # The processing header should not have information about code
2601 2628 # if self.dataOut.code != None:
2602 2629 # self.processingHeaderObj.code = self.dataOut.code
2603 2630 # self.processingHeaderObj.nCode = self.dataOut.nCode
2604 2631 # self.processingHeaderObj.nBaud = self.dataOut.nBaud
2605 2632 # nCodeSize = 4 # bytes
2606 2633 # nBaudSize = 4 # bytes
2607 2634 # codeSize = 4 # bytes
2608 2635 # sizeOfCode = int(nCodeSize + nBaudSize + codeSize * self.dataOut.nCode * self.dataOut.nBaud)
2609 2636 # processingHeaderSize += sizeOfCode
2610 2637
2611 2638 if self.processingHeaderObj.nWindows != 0:
2612 2639 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
2613 2640 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
2614 2641 self.processingHeaderObj.nHeights = self.dataOut.nHeights
2615 2642 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
2616 2643 sizeOfFirstHeight = 4
2617 2644 sizeOfdeltaHeight = 4
2618 2645 sizeOfnHeights = 4
2619 2646 sizeOfWindows = (sizeOfFirstHeight + sizeOfdeltaHeight + sizeOfnHeights)*self.processingHeaderObj.nWindows
2620 2647 processingHeaderSize += sizeOfWindows
2621 2648
2622 2649 self.processingHeaderObj.size = processingHeaderSize
2623 2650
2624 2651 class SpectraHeisWriter(Operation):
2625 2652 # set = None
2626 2653 setFile = None
2627 2654 idblock = None
2628 2655 doypath = None
2629 2656 subfolder = None
2630 2657
2631 2658 def __init__(self):
2632 2659 self.wrObj = FITS()
2633 2660 # self.dataOut = dataOut
2634 2661 self.nTotalBlocks=0
2635 2662 # self.set = None
2636 2663 self.setFile = None
2637 2664 self.idblock = 0
2638 2665 self.wrpath = None
2639 2666 self.doypath = None
2640 2667 self.subfolder = None
2641 2668 self.isConfig = False
2642 2669
2643 2670 def isNumber(str):
2644 2671 """
2645 2672 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
2646 2673
2647 2674 Excepciones:
2648 2675 Si un determinado string no puede ser convertido a numero
2649 2676 Input:
2650 2677 str, string al cual se le analiza para determinar si convertible a un numero o no
2651 2678
2652 2679 Return:
2653 2680 True : si el string es uno numerico
2654 2681 False : no es un string numerico
2655 2682 """
2656 2683 try:
2657 2684 float( str )
2658 2685 return True
2659 2686 except:
2660 2687 return False
2661 2688
2662 2689 def setup(self, dataOut, wrpath):
2663 2690
2664 2691 if not(os.path.exists(wrpath)):
2665 2692 os.mkdir(wrpath)
2666 2693
2667 2694 self.wrpath = wrpath
2668 2695 # self.setFile = 0
2669 2696 self.dataOut = dataOut
2670 2697
2671 2698 def putData(self):
2672 2699 name= time.localtime( self.dataOut.utctime)
2673 2700 ext=".fits"
2674 2701
2675 2702 if self.doypath == None:
2676 2703 self.subfolder = 'F%4.4d%3.3d_%d' % (name.tm_year,name.tm_yday,time.mktime(datetime.datetime.now().timetuple()))
2677 2704 self.doypath = os.path.join( self.wrpath, self.subfolder )
2678 2705 os.mkdir(self.doypath)
2679 2706
2680 2707 if self.setFile == None:
2681 2708 # self.set = self.dataOut.set
2682 2709 self.setFile = 0
2683 2710 # if self.set != self.dataOut.set:
2684 2711 ## self.set = self.dataOut.set
2685 2712 # self.setFile = 0
2686 2713
2687 2714 #make the filename
2688 2715 file = 'D%4.4d%3.3d_%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext)
2689 2716
2690 2717 filename = os.path.join(self.wrpath,self.subfolder, file)
2691 2718
2692 2719 idblock = numpy.array([self.idblock],dtype="int64")
2693 2720 header=self.wrObj.cFImage(idblock=idblock,
2694 2721 year=time.gmtime(self.dataOut.utctime).tm_year,
2695 2722 month=time.gmtime(self.dataOut.utctime).tm_mon,
2696 2723 day=time.gmtime(self.dataOut.utctime).tm_mday,
2697 2724 hour=time.gmtime(self.dataOut.utctime).tm_hour,
2698 2725 minute=time.gmtime(self.dataOut.utctime).tm_min,
2699 2726 second=time.gmtime(self.dataOut.utctime).tm_sec)
2700 2727
2701 2728 c=3E8
2702 2729 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
2703 2730 freq=numpy.arange(-1*self.dataOut.nHeights/2.,self.dataOut.nHeights/2.)*(c/(2*deltaHeight*1000))
2704 2731
2705 2732 colList = []
2706 2733
2707 2734 colFreq=self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints)+'E', array=freq)
2708 2735
2709 2736 colList.append(colFreq)
2710 2737
2711 2738 nchannel=self.dataOut.nChannels
2712 2739
2713 2740 for i in range(nchannel):
2714 2741 col = self.wrObj.writeData(name="PCh"+str(i+1),
2715 2742 format=str(self.dataOut.nFFTPoints)+'E',
2716 2743 data=10*numpy.log10(self.dataOut.data_spc[i,:]))
2717 2744
2718 2745 colList.append(col)
2719 2746
2720 2747 data=self.wrObj.Ctable(colList=colList)
2721 2748
2722 2749 self.wrObj.CFile(header,data)
2723 2750
2724 2751 self.wrObj.wFile(filename)
2725 2752
2726 2753 #update the setFile
2727 2754 self.setFile += 1
2728 2755 self.idblock += 1
2729 2756
2730 2757 return 1
2731 2758
2732 2759 def run(self, dataOut, **kwargs):
2733 2760
2734 2761 if not(self.isConfig):
2735 2762
2736 2763 self.setup(dataOut, **kwargs)
2737 2764 self.isConfig = True
2738 2765
2739 2766 self.putData()
2740 2767
2741 2768
2742 2769 class FITS:
2743 2770 name=None
2744 2771 format=None
2745 2772 array =None
2746 2773 data =None
2747 2774 thdulist=None
2748 2775 prihdr=None
2749 2776 hdu=None
2750 2777
2751 2778 def __init__(self):
2752 2779
2753 2780 pass
2754 2781
2755 2782 def setColF(self,name,format,array):
2756 2783 self.name=name
2757 2784 self.format=format
2758 2785 self.array=array
2759 2786 a1=numpy.array([self.array],dtype=numpy.float32)
2760 2787 self.col1 = pyfits.Column(name=self.name, format=self.format, array=a1)
2761 2788 return self.col1
2762 2789
2763 2790 # def setColP(self,name,format,data):
2764 2791 # self.name=name
2765 2792 # self.format=format
2766 2793 # self.data=data
2767 2794 # a2=numpy.array([self.data],dtype=numpy.float32)
2768 2795 # self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2769 2796 # return self.col2
2770 2797
2771 2798
2772 2799 def writeData(self,name,format,data):
2773 2800 self.name=name
2774 2801 self.format=format
2775 2802 self.data=data
2776 2803 a2=numpy.array([self.data],dtype=numpy.float32)
2777 2804 self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2778 2805 return self.col2
2779 2806
2780 2807 def cFImage(self,idblock,year,month,day,hour,minute,second):
2781 2808 self.hdu= pyfits.PrimaryHDU(idblock)
2782 2809 self.hdu.header.set("Year",year)
2783 2810 self.hdu.header.set("Month",month)
2784 2811 self.hdu.header.set("Day",day)
2785 2812 self.hdu.header.set("Hour",hour)
2786 2813 self.hdu.header.set("Minute",minute)
2787 2814 self.hdu.header.set("Second",second)
2788 2815 return self.hdu
2789 2816
2790 2817
2791 2818 def Ctable(self,colList):
2792 2819 self.cols=pyfits.ColDefs(colList)
2793 2820 self.tbhdu = pyfits.new_table(self.cols)
2794 2821 return self.tbhdu
2795 2822
2796 2823
2797 2824 def CFile(self,hdu,tbhdu):
2798 2825 self.thdulist=pyfits.HDUList([hdu,tbhdu])
2799 2826
2800 2827 def wFile(self,filename):
2801 2828 if os.path.isfile(filename):
2802 2829 os.remove(filename)
2803 2830 self.thdulist.writeto(filename)
2804 2831
2805 2832
2806 2833 class ParameterConf:
2807 2834 ELEMENTNAME = 'Parameter'
2808 2835 def __init__(self):
2809 2836 self.name = ''
2810 2837 self.value = ''
2811 2838
2812 2839 def readXml(self, parmElement):
2813 2840 self.name = parmElement.get('name')
2814 2841 self.value = parmElement.get('value')
2815 2842
2816 2843 def getElementName(self):
2817 2844 return self.ELEMENTNAME
2818 2845
2819 2846 class Metadata:
2820 2847
2821 2848 def __init__(self, filename):
2822 2849 self.parmConfObjList = []
2823 2850 self.readXml(filename)
2824 2851
2825 2852 def readXml(self, filename):
2826 2853 self.projectElement = None
2827 2854 self.procUnitConfObjDict = {}
2828 2855 self.projectElement = ElementTree().parse(filename)
2829 2856 self.project = self.projectElement.tag
2830 2857
2831 2858 parmElementList = self.projectElement.getiterator(ParameterConf().getElementName())
2832 2859
2833 2860 for parmElement in parmElementList:
2834 2861 parmConfObj = ParameterConf()
2835 2862 parmConfObj.readXml(parmElement)
2836 2863 self.parmConfObjList.append(parmConfObj)
2837 2864
2838 2865 class FitsWriter(Operation):
2839 2866
2840 2867 def __init__(self):
2841 2868 self.isConfig = False
2842 2869 self.dataBlocksPerFile = None
2843 2870 self.blockIndex = 0
2844 2871 self.flagIsNewFile = 1
2845 2872 self.fitsObj = None
2846 2873 self.optchar = 'P'
2847 2874 self.ext = '.fits'
2848 2875 self.setFile = 0
2849 2876
2850 2877 def setFitsHeader(self, dataOut, metadatafile):
2851 2878
2852 2879 header_data = pyfits.PrimaryHDU()
2853 2880
2854 2881 metadata4fits = Metadata(metadatafile)
2855 2882 for parameter in metadata4fits.parmConfObjList:
2856 2883 parm_name = parameter.name
2857 2884 parm_value = parameter.value
2858 2885
2859 2886 if parm_value == 'fromdatadatetime':
2860 2887 value = time.strftime("%b %d %Y %H:%M:%S", dataOut.datatime.timetuple())
2861 2888 elif parm_value == 'fromdataheights':
2862 2889 value = dataOut.nHeights
2863 2890 elif parm_value == 'fromdatachannel':
2864 2891 value = dataOut.nChannels
2865 2892 elif parm_value == 'fromdatasamples':
2866 2893 value = dataOut.nFFTPoints
2867 2894 else:
2868 2895 value = parm_value
2869 2896
2870 2897 header_data.header[parm_name] = value
2871 2898
2872 2899 header_data.header['NBLOCK'] = self.blockIndex
2873 2900
2874 2901 header_data.writeto(self.filename)
2875 2902
2876 2903
2877 2904 def setup(self, dataOut, path, dataBlocksPerFile, metadatafile):
2878 2905
2879 2906 self.path = path
2880 2907 self.dataOut = dataOut
2881 2908 self.metadatafile = metadatafile
2882 2909 self.dataBlocksPerFile = dataBlocksPerFile
2883 2910
2884 2911 def open(self):
2885 2912 self.fitsObj = pyfits.open(self.filename, mode='update')
2886 2913
2887 2914
2888 2915 def addData(self, data):
2889 2916 self.open()
2890 2917 extension = pyfits.ImageHDU(data=data, name=self.fitsObj[0].header['DATA'])
2891 2918 extension.header['UTCTIME'] = self.dataOut.utctime
2892 2919 self.fitsObj.append(extension)
2893 2920 self.blockIndex += 1
2894 2921 self.fitsObj[0].header['NBLOCK'] = self.blockIndex
2895 2922
2896 2923 self.write()
2897 2924
2898 2925 def write(self):
2899 2926
2900 2927 self.fitsObj.flush(verbose=True)
2901 2928 self.fitsObj.close()
2902 2929
2903 2930
2904 2931 def setNextFile(self):
2905 2932
2906 2933 ext = self.ext
2907 2934 path = self.path
2908 2935
2909 2936 timeTuple = time.localtime( self.dataOut.utctime)
2910 2937 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
2911 2938
2912 2939 fullpath = os.path.join( path, subfolder )
2913 2940 if not( os.path.exists(fullpath) ):
2914 2941 os.mkdir(fullpath)
2915 2942 self.setFile = -1 #inicializo mi contador de seteo
2916 2943 else:
2917 2944 filesList = os.listdir( fullpath )
2918 2945 if len( filesList ) > 0:
2919 2946 filesList = sorted( filesList, key=str.lower )
2920 2947 filen = filesList[-1]
2921 2948
2922 2949 if isNumber( filen[8:11] ):
2923 2950 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
2924 2951 else:
2925 2952 self.setFile = -1
2926 2953 else:
2927 2954 self.setFile = -1 #inicializo mi contador de seteo
2928 2955
2929 2956 setFile = self.setFile
2930 2957 setFile += 1
2931 2958
2932 2959 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
2933 2960 timeTuple.tm_year,
2934 2961 timeTuple.tm_yday,
2935 2962 setFile,
2936 2963 ext )
2937 2964
2938 2965 filename = os.path.join( path, subfolder, file )
2939 2966
2940 2967 self.blockIndex = 0
2941 2968 self.filename = filename
2942 2969 self.setFile = setFile
2943 2970 self.flagIsNewFile = 1
2944 2971
2945 2972 print 'Writing the file: %s'%self.filename
2946 2973
2947 2974 self.setFitsHeader(self.dataOut, self.metadatafile)
2948 2975
2949 2976 return 1
2950 2977
2951 2978 def writeBlock(self):
2952 2979 self.addData(self.dataOut.data_spc)
2953 2980 self.flagIsNewFile = 0
2954 2981
2955 2982
2956 2983 def __setNewBlock(self):
2957 2984
2958 2985 if self.flagIsNewFile:
2959 2986 return 1
2960 2987
2961 2988 if self.blockIndex < self.dataBlocksPerFile:
2962 2989 return 1
2963 2990
2964 2991 if not( self.setNextFile() ):
2965 2992 return 0
2966 2993
2967 2994 return 1
2968 2995
2969 2996 def writeNextBlock(self):
2970 2997 if not( self.__setNewBlock() ):
2971 2998 return 0
2972 2999 self.writeBlock()
2973 3000 return 1
2974 3001
2975 3002 def putData(self):
2976 3003 if self.flagIsNewFile:
2977 3004 self.setNextFile()
2978 3005 self.writeNextBlock()
2979 3006
2980 3007 def run(self, dataOut, **kwargs):
2981 3008 if not(self.isConfig):
2982 3009 self.setup(dataOut, **kwargs)
2983 3010 self.isConfig = True
2984 3011 self.putData()
2985 3012
2986 3013
2987 3014 class FitsReader(ProcessingUnit):
2988 3015
2989 3016 __TIMEZONE = time.timezone
2990 3017
2991 3018 expName = None
2992 3019 datetimestr = None
2993 3020 utc = None
2994 3021 nChannels = None
2995 3022 nSamples = None
2996 3023 dataBlocksPerFile = None
2997 3024 comments = None
2998 3025 lastUTTime = None
2999 3026 header_dict = None
3000 3027 data = None
3001 3028 data_header_dict = None
3002 3029
3003 3030 def __init__(self):
3004 3031 self.isConfig = False
3005 3032 self.ext = '.fits'
3006 3033 self.setFile = 0
3007 3034 self.flagNoMoreFiles = 0
3008 3035 self.flagIsNewFile = 1
3009 3036 self.flagTimeBlock = None
3010 3037 self.fileIndex = None
3011 3038 self.filename = None
3012 3039 self.fileSize = None
3013 3040 self.fitsObj = None
3014 3041 self.nReadBlocks = 0
3015 3042 self.nTotalBlocks = 0
3016 3043 self.dataOut = self.createObjByDefault()
3017 3044 self.maxTimeStep = 10# deberia ser definido por el usuario usando el metodo setup()
3018 3045 self.blockIndex = 1
3019 3046
3020 3047 def createObjByDefault(self):
3021 3048
3022 3049 dataObj = Fits()
3023 3050
3024 3051 return dataObj
3025 3052
3026 3053 def isFileinThisTime(self, filename, startTime, endTime, useLocalTime=False):
3027 3054 try:
3028 3055 fitsObj = pyfits.open(filename,'readonly')
3029 3056 except:
3030 3057 raise IOError, "The file %s can't be opened" %(filename)
3031 3058
3032 3059 header = fitsObj[0].header
3033 3060 struct_time = time.strptime(header['DATETIME'], "%b %d %Y %H:%M:%S")
3034 3061 utc = time.mktime(struct_time) - time.timezone #TIMEZONE debe ser un parametro del header FITS
3035 3062
3036 3063 ltc = utc
3037 3064 if useLocalTime:
3038 3065 ltc -= time.timezone
3039 3066 thisDatetime = datetime.datetime.utcfromtimestamp(ltc)
3040 3067 thisTime = thisDatetime.time()
3041 3068
3042 3069 if not ((startTime <= thisTime) and (endTime > thisTime)):
3043 3070 return None
3044 3071
3045 3072 return thisDatetime
3046 3073
3047 3074 def __setNextFileOnline(self):
3048 3075 raise ValueError, "No implemented"
3049 3076
3050 3077 def __setNextFileOffline(self):
3051 3078 idFile = self.fileIndex
3052 3079
3053 3080 while (True):
3054 3081 idFile += 1
3055 3082 if not(idFile < len(self.filenameList)):
3056 3083 self.flagNoMoreFiles = 1
3057 3084 print "No more Files"
3058 3085 return 0
3059 3086
3060 3087 filename = self.filenameList[idFile]
3061 3088
3062 3089 # if not(self.__verifyFile(filename)):
3063 3090 # continue
3064 3091
3065 3092 fileSize = os.path.getsize(filename)
3066 3093 fitsObj = pyfits.open(filename,'readonly')
3067 3094 break
3068 3095
3069 3096 self.flagIsNewFile = 1
3070 3097 self.fileIndex = idFile
3071 3098 self.filename = filename
3072 3099 self.fileSize = fileSize
3073 3100 self.fitsObj = fitsObj
3074 3101
3075 3102 print "Setting the file: %s"%self.filename
3076 3103
3077 3104 return 1
3078 3105
3079 3106 def readHeader(self):
3080 3107 headerObj = self.fitsObj[0]
3081 3108
3082 3109 self.header_dict = headerObj.header
3083 3110 self.expName = headerObj.header['EXPNAME']
3084 3111 self.datetimestr = headerObj.header['DATETIME']
3085 3112 struct_time = time.strptime(headerObj.header['DATETIME'], "%b %d %Y %H:%M:%S")
3086 3113 # self.utc = time.mktime(struct_time) - self.__TIMEZONE
3087 3114 self.nChannels = headerObj.header['NCHANNEL']
3088 3115 self.nSamples = headerObj.header['NSAMPLE']
3089 3116 self.dataBlocksPerFile = headerObj.header['NBLOCK']
3090 3117 self.comments = headerObj.header['COMMENT']
3091 3118
3092 3119
3093 3120 def setNextFile(self):
3094 3121
3095 3122 if self.online:
3096 3123 newFile = self.__setNextFileOnline()
3097 3124 else:
3098 3125 newFile = self.__setNextFileOffline()
3099 3126
3100 3127 if not(newFile):
3101 3128 return 0
3102 3129
3103 3130 self.readHeader()
3104 3131
3105 3132 self.nReadBlocks = 0
3106 3133 self.blockIndex = 1
3107 3134 return 1
3108 3135
3109 3136 def __searchFilesOffLine(self,
3110 3137 path,
3111 3138 startDate,
3112 3139 endDate,
3113 3140 startTime=datetime.time(0,0,0),
3114 3141 endTime=datetime.time(23,59,59),
3115 3142 set=None,
3116 3143 expLabel='',
3117 3144 ext='.fits',
3118 3145 walk=True):
3119 3146
3120 3147 pathList = []
3121 3148
3122 3149 if not walk:
3123 3150 pathList.append(path)
3124 3151
3125 3152 else:
3126 3153 dirList = []
3127 3154 for thisPath in os.listdir(path):
3128 3155 if not os.path.isdir(os.path.join(path,thisPath)):
3129 3156 continue
3130 3157 if not isDoyFolder(thisPath):
3131 3158 continue
3132 3159
3133 3160 dirList.append(thisPath)
3134 3161
3135 3162 if not(dirList):
3136 3163 return None, None
3137 3164
3138 3165 thisDate = startDate
3139 3166
3140 3167 while(thisDate <= endDate):
3141 3168 year = thisDate.timetuple().tm_year
3142 3169 doy = thisDate.timetuple().tm_yday
3143 3170
3144 3171 matchlist = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy) + '*')
3145 3172 if len(matchlist) == 0:
3146 3173 thisDate += datetime.timedelta(1)
3147 3174 continue
3148 3175 for match in matchlist:
3149 3176 pathList.append(os.path.join(path,match,expLabel))
3150 3177
3151 3178 thisDate += datetime.timedelta(1)
3152 3179
3153 3180 if pathList == []:
3154 3181 print "Any folder was found for the date range: %s-%s" %(startDate, endDate)
3155 3182 return None, None
3156 3183
3157 3184 print "%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate)
3158 3185
3159 3186 filenameList = []
3160 3187 datetimeList = []
3161 3188
3162 3189 for i in range(len(pathList)):
3163 3190
3164 3191 thisPath = pathList[i]
3165 3192
3166 3193 fileList = glob.glob1(thisPath, "*%s" %ext)
3167 3194 fileList.sort()
3168 3195
3169 3196 for file in fileList:
3170 3197
3171 3198 filename = os.path.join(thisPath,file)
3172 3199 thisDatetime = self.isFileinThisTime(filename, startTime, endTime, useLocalTime=True)
3173 3200
3174 3201 if not(thisDatetime):
3175 3202 continue
3176 3203
3177 3204 filenameList.append(filename)
3178 3205 datetimeList.append(thisDatetime)
3179 3206
3180 3207 if not(filenameList):
3181 3208 print "Any file was found for the time range %s - %s" %(startTime, endTime)
3182 3209 return None, None
3183 3210
3184 3211 print "%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime)
3185 3212 print
3186 3213
3187 3214 for i in range(len(filenameList)):
3188 3215 print "%s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
3189 3216
3190 3217 self.filenameList = filenameList
3191 3218 self.datetimeList = datetimeList
3192 3219
3193 3220 return pathList, filenameList
3194 3221
3195 3222 def setup(self, path=None,
3196 3223 startDate=None,
3197 3224 endDate=None,
3198 3225 startTime=datetime.time(0,0,0),
3199 3226 endTime=datetime.time(23,59,59),
3200 3227 set=0,
3201 3228 expLabel = "",
3202 3229 ext = None,
3203 3230 online = False,
3204 3231 delay = 60,
3205 3232 walk = True):
3206 3233
3207 3234 if path == None:
3208 3235 raise ValueError, "The path is not valid"
3209 3236
3210 3237 if ext == None:
3211 3238 ext = self.ext
3212 3239
3213 3240 if not(online):
3214 3241 print "Searching files in offline mode ..."
3215 3242 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
3216 3243 startTime=startTime, endTime=endTime,
3217 3244 set=set, expLabel=expLabel, ext=ext,
3218 3245 walk=walk)
3219 3246
3220 3247 if not(pathList):
3221 3248 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
3222 3249 datetime.datetime.combine(startDate,startTime).ctime(),
3223 3250 datetime.datetime.combine(endDate,endTime).ctime())
3224 3251
3225 3252 sys.exit(-1)
3226 3253
3227 3254 self.fileIndex = -1
3228 3255 self.pathList = pathList
3229 3256 self.filenameList = filenameList
3230 3257
3231 3258 self.online = online
3232 3259 self.delay = delay
3233 3260 ext = ext.lower()
3234 3261 self.ext = ext
3235 3262
3236 3263 if not(self.setNextFile()):
3237 3264 if (startDate!=None) and (endDate!=None):
3238 3265 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
3239 3266 elif startDate != None:
3240 3267 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
3241 3268 else:
3242 3269 print "No files"
3243 3270
3244 3271 sys.exit(-1)
3245 3272
3246 3273
3247 3274
3248 3275 def readBlock(self):
3249 3276 dataObj = self.fitsObj[self.blockIndex]
3250 3277
3251 3278 self.data = dataObj.data
3252 3279 self.data_header_dict = dataObj.header
3253 3280 self.utc = self.data_header_dict['UTCTIME']
3254 3281
3255 3282 self.flagIsNewFile = 0
3256 3283 self.blockIndex += 1
3257 3284 self.nTotalBlocks += 1
3258 3285 self.nReadBlocks += 1
3259 3286
3260 3287 return 1
3261 3288
3262 3289 def __jumpToLastBlock(self):
3263 3290 raise ValueError, "No implemented"
3264 3291
3265 3292 def __waitNewBlock(self):
3266 3293 """
3267 3294 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
3268 3295
3269 3296 Si el modo de lectura es OffLine siempre retorn 0
3270 3297 """
3271 3298 if not self.online:
3272 3299 return 0
3273 3300
3274 3301 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
3275 3302 return 0
3276 3303
3277 3304 currentPointer = self.fp.tell()
3278 3305
3279 3306 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
3280 3307
3281 3308 for nTries in range( self.nTries ):
3282 3309
3283 3310 self.fp.close()
3284 3311 self.fp = open( self.filename, 'rb' )
3285 3312 self.fp.seek( currentPointer )
3286 3313
3287 3314 self.fileSize = os.path.getsize( self.filename )
3288 3315 currentSize = self.fileSize - currentPointer
3289 3316
3290 3317 if ( currentSize >= neededSize ):
3291 3318 self.__rdBasicHeader()
3292 3319 return 1
3293 3320
3294 3321 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
3295 3322 time.sleep( self.delay )
3296 3323
3297 3324
3298 3325 return 0
3299 3326
3300 3327 def __setNewBlock(self):
3301 3328
3302 3329 if self.online:
3303 3330 self.__jumpToLastBlock()
3304 3331
3305 3332 if self.flagIsNewFile:
3306 3333 return 1
3307 3334
3308 3335 self.lastUTTime = self.utc
3309 3336
3310 3337 if self.online:
3311 3338 if self.__waitNewBlock():
3312 3339 return 1
3313 3340
3314 3341 if self.nReadBlocks < self.dataBlocksPerFile:
3315 3342 return 1
3316 3343
3317 3344 if not(self.setNextFile()):
3318 3345 return 0
3319 3346
3320 3347 deltaTime = self.utc - self.lastUTTime
3321 3348
3322 3349 self.flagTimeBlock = 0
3323 3350
3324 3351 if deltaTime > self.maxTimeStep:
3325 3352 self.flagTimeBlock = 1
3326 3353
3327 3354 return 1
3328 3355
3329 3356
3330 3357 def readNextBlock(self):
3331 3358 if not(self.__setNewBlock()):
3332 3359 return 0
3333 3360
3334 3361 if not(self.readBlock()):
3335 3362 return 0
3336 3363
3337 3364 return 1
3338 3365
3339 3366
3340 3367 def getData(self):
3341 3368
3342 3369 if self.flagNoMoreFiles:
3343 3370 self.dataOut.flagNoData = True
3344 3371 print 'Process finished'
3345 3372 return 0
3346 3373
3347 3374 self.flagTimeBlock = 0
3348 3375 self.flagIsNewBlock = 0
3349 3376
3350 3377 if not(self.readNextBlock()):
3351 3378 return 0
3352 3379
3353 3380 if self.data == None:
3354 3381 self.dataOut.flagNoData = True
3355 3382 return 0
3356 3383
3357 3384 self.dataOut.data = self.data
3358 3385 self.dataOut.data_header = self.data_header_dict
3359 3386 self.dataOut.utctime = self.utc
3360 3387
3361 3388 self.dataOut.header = self.header_dict
3362 3389 self.dataOut.expName = self.expName
3363 3390 self.dataOut.nChannels = self.nChannels
3364 3391 self.dataOut.nSamples = self.nSamples
3365 3392 self.dataOut.dataBlocksPerFile = self.dataBlocksPerFile
3366 3393 self.dataOut.comments = self.comments
3367 3394
3368 3395 self.dataOut.flagNoData = False
3369 3396
3370 3397 return self.dataOut.data
3371 3398
3372 3399 def run(self, **kwargs):
3373 3400
3374 3401 if not(self.isConfig):
3375 3402 self.setup(**kwargs)
3376 3403 self.isConfig = True
3377 3404
3378 3405 self.getData() No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now