##// END OF EJS Templates
En jrodataIO.py se agregan los cambios en searchFilesOffLine para lectura de datos sobre multiples paths. Los datos se ordenan segun el nombre del archivo.
Daniel Valdez -
r449:336ad0c1a9b5
parent child
Show More
@@ -1,3394 +1,3413
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JRODataIO.py 169 2012-11-19 21:57:03Z murco $
5 5 '''
6 6
7 7 import os, sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import time, datetime
13 13 from xml.etree.ElementTree import Element, SubElement, ElementTree
14 14 try:
15 15 import pyfits
16 16 except:
17 17 print "pyfits module has not been imported, it should be installed to save files in fits format"
18 18
19 19 from jrodata import *
20 20 from jroheaderIO import *
21 21 from jroprocessing import *
22 22
23 23 LOCALTIME = True #-18000
24 24
25 25 def isNumber(str):
26 26 """
27 27 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
28 28
29 29 Excepciones:
30 30 Si un determinado string no puede ser convertido a numero
31 31 Input:
32 32 str, string al cual se le analiza para determinar si convertible a un numero o no
33 33
34 34 Return:
35 35 True : si el string es uno numerico
36 36 False : no es un string numerico
37 37 """
38 38 try:
39 39 float( str )
40 40 return True
41 41 except:
42 42 return False
43 43
44 44 def isThisFileinRange(filename, startUTSeconds, endUTSeconds):
45 45 """
46 46 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
47 47
48 48 Inputs:
49 49 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
50 50
51 51 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
52 52 segundos contados desde 01/01/1970.
53 53 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
54 54 segundos contados desde 01/01/1970.
55 55
56 56 Return:
57 57 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
58 58 fecha especificado, de lo contrario retorna False.
59 59
60 60 Excepciones:
61 61 Si el archivo no existe o no puede ser abierto
62 62 Si la cabecera no puede ser leida.
63 63
64 64 """
65 65 basicHeaderObj = BasicHeader(LOCALTIME)
66 66
67 67 try:
68 68 fp = open(filename,'rb')
69 69 except:
70 70 raise IOError, "The file %s can't be opened" %(filename)
71 71
72 72 sts = basicHeaderObj.read(fp)
73 73 fp.close()
74 74
75 75 if not(sts):
76 76 print "Skipping the file %s because it has not a valid header" %(filename)
77 77 return 0
78 78
79 79 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
80 80 return 0
81 81
82 82 return 1
83 83
84 84 def isFileinThisTime(filename, startTime, endTime):
85 85 """
86 86 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
87 87
88 88 Inputs:
89 89 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
90 90
91 91 startTime : tiempo inicial del rango seleccionado en formato datetime.time
92 92
93 93 endTime : tiempo final del rango seleccionado en formato datetime.time
94 94
95 95 Return:
96 96 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
97 97 fecha especificado, de lo contrario retorna False.
98 98
99 99 Excepciones:
100 100 Si el archivo no existe o no puede ser abierto
101 101 Si la cabecera no puede ser leida.
102 102
103 103 """
104 104
105 105
106 106 try:
107 107 fp = open(filename,'rb')
108 108 except:
109 109 raise IOError, "The file %s can't be opened" %(filename)
110 110
111 111 basicHeaderObj = BasicHeader(LOCALTIME)
112 112 sts = basicHeaderObj.read(fp)
113 113 fp.close()
114 114
115 115 thisDatetime = basicHeaderObj.datatime
116 116 thisTime = basicHeaderObj.datatime.time()
117 117
118 118 if not(sts):
119 119 print "Skipping the file %s because it has not a valid header" %(filename)
120 120 return None
121 121
122 122 if not ((startTime <= thisTime) and (endTime > thisTime)):
123 123 return None
124 124
125 125 return thisDatetime
126 126
127 127 def getFileFromSet(path,ext,set):
128 128 validFilelist = []
129 129 fileList = os.listdir(path)
130 130
131 131 # 0 1234 567 89A BCDE
132 132 # H YYYY DDD SSS .ext
133 133
134 134 for file in fileList:
135 135 try:
136 136 year = int(file[1:5])
137 137 doy = int(file[5:8])
138 138
139 139
140 140 except:
141 141 continue
142 142
143 143 if (os.path.splitext(file)[-1].lower() != ext.lower()):
144 144 continue
145 145
146 146 validFilelist.append(file)
147 147
148 148 myfile = fnmatch.filter(validFilelist,'*%4.4d%3.3d%3.3d*'%(year,doy,set))
149 149
150 150 if len(myfile)!= 0:
151 151 return myfile[0]
152 152 else:
153 153 filename = '*%4.4d%3.3d%3.3d%s'%(year,doy,set,ext.lower())
154 154 print 'the filename %s does not exist'%filename
155 155 print '...going to the last file: '
156 156
157 157 if validFilelist:
158 158 validFilelist = sorted( validFilelist, key=str.lower )
159 159 return validFilelist[-1]
160 160
161 161 return None
162 162
163 163
164 164 def getlastFileFromPath(path, ext):
165 165 """
166 166 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
167 167 al final de la depuracion devuelve el ultimo file de la lista que quedo.
168 168
169 169 Input:
170 170 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
171 171 ext : extension de los files contenidos en una carpeta
172 172
173 173 Return:
174 174 El ultimo file de una determinada carpeta, no se considera el path.
175 175 """
176 176 validFilelist = []
177 177 fileList = os.listdir(path)
178 178
179 179 # 0 1234 567 89A BCDE
180 180 # H YYYY DDD SSS .ext
181 181
182 182 for file in fileList:
183 183 try:
184 184 year = int(file[1:5])
185 185 doy = int(file[5:8])
186 186
187 187
188 188 except:
189 189 continue
190 190
191 191 if (os.path.splitext(file)[-1].lower() != ext.lower()):
192 192 continue
193 193
194 194 validFilelist.append(file)
195 195
196 196 if validFilelist:
197 197 validFilelist = sorted( validFilelist, key=str.lower )
198 198 return validFilelist[-1]
199 199
200 200 return None
201 201
202 202 def checkForRealPath(path, foldercounter, year, doy, set, ext):
203 203 """
204 204 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
205 205 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
206 206 el path exacto de un determinado file.
207 207
208 208 Example :
209 209 nombre correcto del file es .../.../D2009307/P2009307367.ext
210 210
211 211 Entonces la funcion prueba con las siguientes combinaciones
212 212 .../.../y2009307367.ext
213 213 .../.../Y2009307367.ext
214 214 .../.../x2009307/y2009307367.ext
215 215 .../.../x2009307/Y2009307367.ext
216 216 .../.../X2009307/y2009307367.ext
217 217 .../.../X2009307/Y2009307367.ext
218 218 siendo para este caso, la ultima combinacion de letras, identica al file buscado
219 219
220 220 Return:
221 221 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
222 222 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
223 223 para el filename
224 224 """
225 225 fullfilename = None
226 226 find_flag = False
227 227 filename = None
228 228
229 229 prefixDirList = [None,'d','D']
230 230 if ext.lower() == ".r": #voltage
231 231 prefixFileList = ['d','D']
232 232 elif ext.lower() == ".pdata": #spectra
233 233 prefixFileList = ['p','P']
234 234 else:
235 235 return None, filename
236 236
237 237 #barrido por las combinaciones posibles
238 238 for prefixDir in prefixDirList:
239 239 thispath = path
240 240 if prefixDir != None:
241 241 #formo el nombre del directorio xYYYYDDD (x=d o x=D)
242 242 if foldercounter == 0:
243 243 thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy ))
244 244 else:
245 245 thispath = os.path.join(path, "%s%04d%03d_%02d" % ( prefixDir, year, doy , foldercounter))
246 246 for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D"
247 247 filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
248 248 fullfilename = os.path.join( thispath, filename ) #formo el path completo
249 249
250 250 if os.path.exists( fullfilename ): #verifico que exista
251 251 find_flag = True
252 252 break
253 253 if find_flag:
254 254 break
255 255
256 256 if not(find_flag):
257 257 return None, filename
258 258
259 259 return fullfilename, filename
260 260
261 261 def isDoyFolder(folder):
262 262 try:
263 263 year = int(folder[1:5])
264 264 except:
265 265 return 0
266 266
267 267 try:
268 268 doy = int(folder[5:8])
269 269 except:
270 270 return 0
271 271
272 272 return 1
273 273
274 274 class JRODataIO:
275 275
276 276 c = 3E8
277 277
278 278 isConfig = False
279 279
280 280 basicHeaderObj = BasicHeader(LOCALTIME)
281 281
282 282 systemHeaderObj = SystemHeader()
283 283
284 284 radarControllerHeaderObj = RadarControllerHeader()
285 285
286 286 processingHeaderObj = ProcessingHeader()
287 287
288 288 online = 0
289 289
290 290 dtype = None
291 291
292 292 pathList = []
293 293
294 294 filenameList = []
295 295
296 296 filename = None
297 297
298 298 ext = None
299 299
300 300 flagIsNewFile = 1
301 301
302 302 flagTimeBlock = 0
303 303
304 304 flagIsNewBlock = 0
305 305
306 306 fp = None
307 307
308 308 firstHeaderSize = 0
309 309
310 310 basicHeaderSize = 24
311 311
312 312 versionFile = 1103
313 313
314 314 fileSize = None
315 315
316 316 ippSeconds = None
317 317
318 318 fileSizeByHeader = None
319 319
320 320 fileIndex = None
321 321
322 322 profileIndex = None
323 323
324 324 blockIndex = None
325 325
326 326 nTotalBlocks = None
327 327
328 328 maxTimeStep = 30
329 329
330 330 lastUTTime = None
331 331
332 332 datablock = None
333 333
334 334 dataOut = None
335 335
336 336 blocksize = None
337 337
338 338 def __init__(self):
339 339
340 340 raise ValueError, "Not implemented"
341 341
342 342 def run(self):
343 343
344 344 raise ValueError, "Not implemented"
345 345
346 346 def getOutput(self):
347 347
348 348 return self.dataOut
349 349
350 350 class JRODataReader(JRODataIO, ProcessingUnit):
351 351
352 352 nReadBlocks = 0
353 353
354 354 delay = 10 #number of seconds waiting a new file
355 355
356 356 nTries = 3 #quantity tries
357 357
358 358 nFiles = 3 #number of files for searching
359 359
360 360 path = None
361 361
362 362 foldercounter = 0
363 363
364 364 flagNoMoreFiles = 0
365 365
366 366 datetimeList = []
367 367
368 368 __isFirstTimeOnline = 1
369 369
370 370 __printInfo = True
371 371
372 372 profileIndex = None
373 373
374 374 def __init__(self):
375 375
376 376 """
377 377
378 378 """
379 379
380 380 raise ValueError, "This method has not been implemented"
381 381
382 382
383 383 def createObjByDefault(self):
384 384 """
385 385
386 386 """
387 387 raise ValueError, "This method has not been implemented"
388 388
389 389 def getBlockDimension(self):
390 390
391 391 raise ValueError, "No implemented"
392 392
393 393 def __searchFilesOffLine(self,
394 394 path,
395 395 startDate,
396 396 endDate,
397 397 startTime=datetime.time(0,0,0),
398 398 endTime=datetime.time(23,59,59),
399 399 set=None,
400 400 expLabel='',
401 401 ext='.r',
402 402 walk=True):
403 403
404 404 pathList = []
405 405
406 406 if not walk:
407 pathList.append(path)
407 #pathList.append(path)
408 multi_path = path.split(',')
409 for single_path in multi_path:
410 pathList.append(single_path)
408 411
409 412 else:
413 #dirList = []
414 multi_path = path.split(',')
415 for single_path in multi_path:
410 416 dirList = []
411 for thisPath in os.listdir(path):
412 if not os.path.isdir(os.path.join(path,thisPath)):
417 for thisPath in os.listdir(single_path):
418 if not os.path.isdir(os.path.join(single_path,thisPath)):
413 419 continue
414 420 if not isDoyFolder(thisPath):
415 421 continue
416 422
417 423 dirList.append(thisPath)
418 424
419 425 if not(dirList):
420 426 return None, None
421 427
422 428 thisDate = startDate
423 429
424 430 while(thisDate <= endDate):
425 431 year = thisDate.timetuple().tm_year
426 432 doy = thisDate.timetuple().tm_yday
427 433
428 434 matchlist = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy) + '*')
429 435 if len(matchlist) == 0:
430 436 thisDate += datetime.timedelta(1)
431 437 continue
432 438 for match in matchlist:
433 pathList.append(os.path.join(path,match,expLabel))
439 pathList.append(os.path.join(single_path,match,expLabel))
434 440
435 441 thisDate += datetime.timedelta(1)
436 442
437 443 if pathList == []:
438 444 print "Any folder was found for the date range: %s-%s" %(startDate, endDate)
439 445 return None, None
440 446
441 447 print "%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate)
442 448
443 449 filenameList = []
444 450 datetimeList = []
451 pathDict = {}
452 filenameList_to_sort = []
445 453
446 454 for i in range(len(pathList)):
447 455
448 456 thisPath = pathList[i]
449 457
450 458 fileList = glob.glob1(thisPath, "*%s" %ext)
451 459 fileList.sort()
460 pathDict.setdefault(fileList[0])
461 pathDict[fileList[0]] = i
462 filenameList_to_sort.append(fileList[0])
463
464 filenameList_to_sort.sort()
465
466 for file in filenameList_to_sort:
467 thisPath = pathList[pathDict[file]]
468
469 fileList = glob.glob1(thisPath, "*%s" %ext)
470 fileList.sort()
452 471
453 472 for file in fileList:
454 473
455 474 filename = os.path.join(thisPath,file)
456 475 thisDatetime = isFileinThisTime(filename, startTime, endTime)
457 476
458 477 if not(thisDatetime):
459 478 continue
460 479
461 480 filenameList.append(filename)
462 481 datetimeList.append(thisDatetime)
463 482
464 483 if not(filenameList):
465 484 print "Any file was found for the time range %s - %s" %(startTime, endTime)
466 485 return None, None
467 486
468 487 print "%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime)
469 488 print
470 489
471 490 for i in range(len(filenameList)):
472 491 print "%s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
473 492
474 493 self.filenameList = filenameList
475 494 self.datetimeList = datetimeList
476 495
477 496 return pathList, filenameList
478 497
479 498 def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True, set=None):
480 499
481 500 """
482 501 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
483 502 devuelve el archivo encontrado ademas de otros datos.
484 503
485 504 Input:
486 505 path : carpeta donde estan contenidos los files que contiene data
487 506
488 507 expLabel : Nombre del subexperimento (subfolder)
489 508
490 509 ext : extension de los files
491 510
492 511 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
493 512
494 513 Return:
495 514 directory : eL directorio donde esta el file encontrado
496 515 filename : el ultimo file de una determinada carpeta
497 516 year : el anho
498 517 doy : el numero de dia del anho
499 518 set : el set del archivo
500 519
501 520
502 521 """
503 522 dirList = []
504 523
505 524 if not walk:
506 525 fullpath = path
507 526 foldercounter = 0
508 527 else:
509 528 #Filtra solo los directorios
510 529 for thisPath in os.listdir(path):
511 530 if not os.path.isdir(os.path.join(path,thisPath)):
512 531 continue
513 532 if not isDoyFolder(thisPath):
514 533 continue
515 534
516 535 dirList.append(thisPath)
517 536
518 537 if not(dirList):
519 538 return None, None, None, None, None, None
520 539
521 540 dirList = sorted( dirList, key=str.lower )
522 541
523 542 doypath = dirList[-1]
524 543 foldercounter = int(doypath.split('_')[1]) if len(doypath.split('_'))>1 else 0
525 544 fullpath = os.path.join(path, doypath, expLabel)
526 545
527 546
528 547 print "%s folder was found: " %(fullpath )
529 548
530 549 if set == None:
531 550 filename = getlastFileFromPath(fullpath, ext)
532 551 else:
533 552 filename = getFileFromSet(fullpath, ext, set)
534 553
535 554 if not(filename):
536 555 return None, None, None, None, None, None
537 556
538 557 print "%s file was found" %(filename)
539 558
540 559 if not(self.__verifyFile(os.path.join(fullpath, filename))):
541 560 return None, None, None, None, None, None
542 561
543 562 year = int( filename[1:5] )
544 563 doy = int( filename[5:8] )
545 564 set = int( filename[8:11] )
546 565
547 566 return fullpath, foldercounter, filename, year, doy, set
548 567
549 568 def __setNextFileOffline(self):
550 569
551 570 idFile = self.fileIndex
552 571
553 572 while (True):
554 573 idFile += 1
555 574 if not(idFile < len(self.filenameList)):
556 575 self.flagNoMoreFiles = 1
557 576 print "No more Files"
558 577 return 0
559 578
560 579 filename = self.filenameList[idFile]
561 580
562 581 if not(self.__verifyFile(filename)):
563 582 continue
564 583
565 584 fileSize = os.path.getsize(filename)
566 585 fp = open(filename,'rb')
567 586 break
568 587
569 588 self.flagIsNewFile = 1
570 589 self.fileIndex = idFile
571 590 self.filename = filename
572 591 self.fileSize = fileSize
573 592 self.fp = fp
574 593
575 594 print "Setting the file: %s"%self.filename
576 595
577 596 return 1
578 597
579 598 def __setNextFileOnline(self):
580 599 """
581 600 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
582 601 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
583 602 siguientes.
584 603
585 604 Affected:
586 605 self.flagIsNewFile
587 606 self.filename
588 607 self.fileSize
589 608 self.fp
590 609 self.set
591 610 self.flagNoMoreFiles
592 611
593 612 Return:
594 613 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
595 614 1 : si el file fue abierto con exito y esta listo a ser leido
596 615
597 616 Excepciones:
598 617 Si un determinado file no puede ser abierto
599 618 """
600 619 nFiles = 0
601 620 fileOk_flag = False
602 621 firstTime_flag = True
603 622
604 623 self.set += 1
605 624
606 625 if self.set > 999:
607 626 self.set = 0
608 627 self.foldercounter += 1
609 628
610 629 #busca el 1er file disponible
611 630 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
612 631 if fullfilename:
613 632 if self.__verifyFile(fullfilename, False):
614 633 fileOk_flag = True
615 634
616 635 #si no encuentra un file entonces espera y vuelve a buscar
617 636 if not(fileOk_flag):
618 637 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
619 638
620 639 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
621 640 tries = self.nTries
622 641 else:
623 642 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
624 643
625 644 for nTries in range( tries ):
626 645 if firstTime_flag:
627 646 print "\tWaiting %0.2f sec for the file \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
628 647 time.sleep( self.delay )
629 648 else:
630 649 print "\tSearching next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
631 650
632 651 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
633 652 if fullfilename:
634 653 if self.__verifyFile(fullfilename):
635 654 fileOk_flag = True
636 655 break
637 656
638 657 if fileOk_flag:
639 658 break
640 659
641 660 firstTime_flag = False
642 661
643 662 print "\tSkipping the file \"%s\" due to this file doesn't exist" % filename
644 663 self.set += 1
645 664
646 665 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
647 666 self.set = 0
648 667 self.doy += 1
649 668 self.foldercounter = 0
650 669
651 670 if fileOk_flag:
652 671 self.fileSize = os.path.getsize( fullfilename )
653 672 self.filename = fullfilename
654 673 self.flagIsNewFile = 1
655 674 if self.fp != None: self.fp.close()
656 675 self.fp = open(fullfilename, 'rb')
657 676 self.flagNoMoreFiles = 0
658 677 print 'Setting the file: %s' % fullfilename
659 678 else:
660 679 self.fileSize = 0
661 680 self.filename = None
662 681 self.flagIsNewFile = 0
663 682 self.fp = None
664 683 self.flagNoMoreFiles = 1
665 684 print 'No more Files'
666 685
667 686 return fileOk_flag
668 687
669 688
670 689 def setNextFile(self):
671 690 if self.fp != None:
672 691 self.fp.close()
673 692
674 693 if self.online:
675 694 newFile = self.__setNextFileOnline()
676 695 else:
677 696 newFile = self.__setNextFileOffline()
678 697
679 698 if not(newFile):
680 699 return 0
681 700
682 701 self.__readFirstHeader()
683 702 self.nReadBlocks = 0
684 703 return 1
685 704
686 705 def __waitNewBlock(self):
687 706 """
688 707 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
689 708
690 709 Si el modo de lectura es OffLine siempre retorn 0
691 710 """
692 711 if not self.online:
693 712 return 0
694 713
695 714 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
696 715 return 0
697 716
698 717 currentPointer = self.fp.tell()
699 718
700 719 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
701 720
702 721 for nTries in range( self.nTries ):
703 722
704 723 self.fp.close()
705 724 self.fp = open( self.filename, 'rb' )
706 725 self.fp.seek( currentPointer )
707 726
708 727 self.fileSize = os.path.getsize( self.filename )
709 728 currentSize = self.fileSize - currentPointer
710 729
711 730 if ( currentSize >= neededSize ):
712 731 self.__rdBasicHeader()
713 732 return 1
714 733
715 734 if self.fileSize == self.fileSizeByHeader:
716 735 # self.flagEoF = True
717 736 return 0
718 737
719 738 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
720 739 time.sleep( self.delay )
721 740
722 741
723 742 return 0
724 743
725 744 def waitDataBlock(self,pointer_location):
726 745
727 746 currentPointer = pointer_location
728 747
729 748 neededSize = self.processingHeaderObj.blockSize #+ self.basicHeaderSize
730 749
731 750 for nTries in range( self.nTries ):
732 751 self.fp.close()
733 752 self.fp = open( self.filename, 'rb' )
734 753 self.fp.seek( currentPointer )
735 754
736 755 self.fileSize = os.path.getsize( self.filename )
737 756 currentSize = self.fileSize - currentPointer
738 757
739 758 if ( currentSize >= neededSize ):
740 759 return 1
741 760
742 761 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
743 762 time.sleep( self.delay )
744 763
745 764 return 0
746 765
747 766
748 767 def __jumpToLastBlock(self):
749 768
750 769 if not(self.__isFirstTimeOnline):
751 770 return
752 771
753 772 csize = self.fileSize - self.fp.tell()
754 773 blocksize = self.processingHeaderObj.blockSize
755 774
756 775 #salta el primer bloque de datos
757 776 if csize > self.processingHeaderObj.blockSize:
758 777 self.fp.seek(self.fp.tell() + blocksize)
759 778 else:
760 779 return
761 780
762 781 csize = self.fileSize - self.fp.tell()
763 782 neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
764 783 while True:
765 784
766 785 if self.fp.tell()<self.fileSize:
767 786 self.fp.seek(self.fp.tell() + neededsize)
768 787 else:
769 788 self.fp.seek(self.fp.tell() - neededsize)
770 789 break
771 790
772 791 # csize = self.fileSize - self.fp.tell()
773 792 # neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
774 793 # factor = int(csize/neededsize)
775 794 # if factor > 0:
776 795 # self.fp.seek(self.fp.tell() + factor*neededsize)
777 796
778 797 self.flagIsNewFile = 0
779 798 self.__isFirstTimeOnline = 0
780 799
781 800
782 801 def __setNewBlock(self):
783 802
784 803 if self.fp == None:
785 804 return 0
786 805
787 806 if self.online:
788 807 self.__jumpToLastBlock()
789 808
790 809 if self.flagIsNewFile:
791 810 return 1
792 811
793 812 self.lastUTTime = self.basicHeaderObj.utc
794 813 currentSize = self.fileSize - self.fp.tell()
795 814 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
796 815
797 816 if (currentSize >= neededSize):
798 817 self.__rdBasicHeader()
799 818 return 1
800 819
801 820 if self.__waitNewBlock():
802 821 return 1
803 822
804 823 if not(self.setNextFile()):
805 824 return 0
806 825
807 826 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
808 827
809 828 self.flagTimeBlock = 0
810 829
811 830 if deltaTime > self.maxTimeStep:
812 831 self.flagTimeBlock = 1
813 832
814 833 return 1
815 834
816 835
817 836 def readNextBlock(self):
818 837 if not(self.__setNewBlock()):
819 838 return 0
820 839
821 840 if not(self.readBlock()):
822 841 return 0
823 842
824 843 return 1
825 844
826 845 def __rdProcessingHeader(self, fp=None):
827 846 if fp == None:
828 847 fp = self.fp
829 848
830 849 self.processingHeaderObj.read(fp)
831 850
832 851 def __rdRadarControllerHeader(self, fp=None):
833 852 if fp == None:
834 853 fp = self.fp
835 854
836 855 self.radarControllerHeaderObj.read(fp)
837 856
838 857 def __rdSystemHeader(self, fp=None):
839 858 if fp == None:
840 859 fp = self.fp
841 860
842 861 self.systemHeaderObj.read(fp)
843 862
844 863 def __rdBasicHeader(self, fp=None):
845 864 if fp == None:
846 865 fp = self.fp
847 866
848 867 self.basicHeaderObj.read(fp)
849 868
850 869
851 870 def __readFirstHeader(self):
852 871 self.__rdBasicHeader()
853 872 self.__rdSystemHeader()
854 873 self.__rdRadarControllerHeader()
855 874 self.__rdProcessingHeader()
856 875
857 876 self.firstHeaderSize = self.basicHeaderObj.size
858 877
859 878 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
860 879 if datatype == 0:
861 880 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
862 881 elif datatype == 1:
863 882 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
864 883 elif datatype == 2:
865 884 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
866 885 elif datatype == 3:
867 886 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
868 887 elif datatype == 4:
869 888 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
870 889 elif datatype == 5:
871 890 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
872 891 else:
873 892 raise ValueError, 'Data type was not defined'
874 893
875 894 self.dtype = datatype_str
876 895 self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
877 896 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
878 897 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
879 898 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
880 899 self.getBlockDimension()
881 900
882 901
883 902 def __verifyFile(self, filename, msgFlag=True):
884 903 msg = None
885 904 try:
886 905 fp = open(filename, 'rb')
887 906 currentPosition = fp.tell()
888 907 except:
889 908 if msgFlag:
890 909 print "The file %s can't be opened" % (filename)
891 910 return False
892 911
893 912 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
894 913
895 914 if neededSize == 0:
896 915 basicHeaderObj = BasicHeader(LOCALTIME)
897 916 systemHeaderObj = SystemHeader()
898 917 radarControllerHeaderObj = RadarControllerHeader()
899 918 processingHeaderObj = ProcessingHeader()
900 919
901 920 try:
902 921 if not( basicHeaderObj.read(fp) ): raise IOError
903 922 if not( systemHeaderObj.read(fp) ): raise IOError
904 923 if not( radarControllerHeaderObj.read(fp) ): raise IOError
905 924 if not( processingHeaderObj.read(fp) ): raise IOError
906 925 data_type = int(numpy.log2((processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
907 926
908 927 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
909 928
910 929 except:
911 930 if msgFlag:
912 931 print "\tThe file %s is empty or it hasn't enough data" % filename
913 932
914 933 fp.close()
915 934 return False
916 935 else:
917 936 msg = "\tSkipping the file %s due to it hasn't enough data" %filename
918 937
919 938 fp.close()
920 939 fileSize = os.path.getsize(filename)
921 940 currentSize = fileSize - currentPosition
922 941 if currentSize < neededSize:
923 942 if msgFlag and (msg != None):
924 943 print msg #print"\tSkipping the file %s due to it hasn't enough data" %filename
925 944 return False
926 945
927 946 return True
928 947
929 948 def setup(self,
930 949 path=None,
931 950 startDate=None,
932 951 endDate=None,
933 952 startTime=datetime.time(0,0,0),
934 953 endTime=datetime.time(23,59,59),
935 954 set=None,
936 955 expLabel = "",
937 956 ext = None,
938 957 online = False,
939 958 delay = 60,
940 959 walk = True):
941 960
942 961 if path == None:
943 962 raise ValueError, "The path is not valid"
944 963
945 964 if ext == None:
946 965 ext = self.ext
947 966
948 967 if online:
949 968 print "Searching files in online mode..."
950 969
951 970 for nTries in range( self.nTries ):
952 971 fullpath, foldercounter, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk, set=set)
953 972
954 973 if fullpath:
955 974 break
956 975
957 976 print '\tWaiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
958 977 time.sleep( self.delay )
959 978
960 979 if not(fullpath):
961 980 print "There 'isn't valied files in %s" % path
962 981 return None
963 982
964 983 self.year = year
965 984 self.doy = doy
966 985 self.set = set - 1
967 986 self.path = path
968 987 self.foldercounter = foldercounter
969 988
970 989 else:
971 990 print "Searching files in offline mode ..."
972 991 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
973 992 startTime=startTime, endTime=endTime,
974 993 set=set, expLabel=expLabel, ext=ext,
975 994 walk=walk)
976 995
977 996 if not(pathList):
978 997 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
979 998 datetime.datetime.combine(startDate,startTime).ctime(),
980 999 datetime.datetime.combine(endDate,endTime).ctime())
981 1000
982 1001 sys.exit(-1)
983 1002
984 1003
985 1004 self.fileIndex = -1
986 1005 self.pathList = pathList
987 1006 self.filenameList = filenameList
988 1007
989 1008 self.online = online
990 1009 self.delay = delay
991 1010 ext = ext.lower()
992 1011 self.ext = ext
993 1012
994 1013 if not(self.setNextFile()):
995 1014 if (startDate!=None) and (endDate!=None):
996 1015 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
997 1016 elif startDate != None:
998 1017 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
999 1018 else:
1000 1019 print "No files"
1001 1020
1002 1021 sys.exit(-1)
1003 1022
1004 1023 # self.updateDataHeader()
1005 1024
1006 1025 return self.dataOut
1007 1026
1008 1027 def getBasicHeader(self):
1009 1028
1010 1029 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.ippSeconds
1011 1030
1012 1031 self.dataOut.flagTimeBlock = self.flagTimeBlock
1013 1032
1014 1033 self.dataOut.timeZone = self.basicHeaderObj.timeZone
1015 1034
1016 1035 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
1017 1036
1018 1037 self.dataOut.errorCount = self.basicHeaderObj.errorCount
1019 1038
1020 1039 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
1021 1040
1022 1041 def getFirstHeader(self):
1023 1042
1024 1043 raise ValueError, "This method has not been implemented"
1025 1044
1026 1045 def getData():
1027 1046
1028 1047 raise ValueError, "This method has not been implemented"
1029 1048
1030 1049 def hasNotDataInBuffer():
1031 1050
1032 1051 raise ValueError, "This method has not been implemented"
1033 1052
1034 1053 def readBlock():
1035 1054
1036 1055 raise ValueError, "This method has not been implemented"
1037 1056
1038 1057 def isEndProcess(self):
1039 1058
1040 1059 return self.flagNoMoreFiles
1041 1060
1042 1061 def printReadBlocks(self):
1043 1062
1044 1063 print "Number of read blocks per file %04d" %self.nReadBlocks
1045 1064
1046 1065 def printTotalBlocks(self):
1047 1066
1048 1067 print "Number of read blocks %04d" %self.nTotalBlocks
1049 1068
1050 1069 def printNumberOfBlock(self):
1051 1070
1052 1071 if self.flagIsNewBlock:
1053 1072 print "Block No. %04d, Total blocks %04d -> %s" %(self.basicHeaderObj.dataBlock, self.nTotalBlocks, self.dataOut.datatime.ctime())
1054 1073
1055 1074 def printInfo(self):
1056 1075
1057 1076 if self.__printInfo == False:
1058 1077 return
1059 1078
1060 1079 self.basicHeaderObj.printInfo()
1061 1080 self.systemHeaderObj.printInfo()
1062 1081 self.radarControllerHeaderObj.printInfo()
1063 1082 self.processingHeaderObj.printInfo()
1064 1083
1065 1084 self.__printInfo = False
1066 1085
1067 1086
1068 1087 def run(self, **kwargs):
1069 1088
1070 1089 if not(self.isConfig):
1071 1090
1072 1091 # self.dataOut = dataOut
1073 1092 self.setup(**kwargs)
1074 1093 self.isConfig = True
1075 1094
1076 1095 self.getData()
1077 1096
1078 1097 class JRODataWriter(JRODataIO, Operation):
1079 1098
1080 1099 """
1081 1100 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
1082 1101 de los datos siempre se realiza por bloques.
1083 1102 """
1084 1103
1085 1104 blockIndex = 0
1086 1105
1087 1106 path = None
1088 1107
1089 1108 setFile = None
1090 1109
1091 1110 profilesPerBlock = None
1092 1111
1093 1112 blocksPerFile = None
1094 1113
1095 1114 nWriteBlocks = 0
1096 1115
1097 1116 def __init__(self, dataOut=None):
1098 1117 raise ValueError, "Not implemented"
1099 1118
1100 1119
1101 1120 def hasAllDataInBuffer(self):
1102 1121 raise ValueError, "Not implemented"
1103 1122
1104 1123
1105 1124 def setBlockDimension(self):
1106 1125 raise ValueError, "Not implemented"
1107 1126
1108 1127
1109 1128 def writeBlock(self):
1110 1129 raise ValueError, "No implemented"
1111 1130
1112 1131
1113 1132 def putData(self):
1114 1133 raise ValueError, "No implemented"
1115 1134
1116 1135
1117 1136 def setBasicHeader(self):
1118 1137
1119 1138 self.basicHeaderObj.size = self.basicHeaderSize #bytes
1120 1139 self.basicHeaderObj.version = self.versionFile
1121 1140 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1122 1141
1123 1142 utc = numpy.floor(self.dataOut.utctime)
1124 1143 milisecond = (self.dataOut.utctime - utc)* 1000.0
1125 1144
1126 1145 self.basicHeaderObj.utc = utc
1127 1146 self.basicHeaderObj.miliSecond = milisecond
1128 1147 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1129 1148 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1130 1149 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1131 1150
1132 1151 def setFirstHeader(self):
1133 1152 """
1134 1153 Obtiene una copia del First Header
1135 1154
1136 1155 Affected:
1137 1156
1138 1157 self.basicHeaderObj
1139 1158 self.systemHeaderObj
1140 1159 self.radarControllerHeaderObj
1141 1160 self.processingHeaderObj self.
1142 1161
1143 1162 Return:
1144 1163 None
1145 1164 """
1146 1165
1147 1166 raise ValueError, "No implemented"
1148 1167
1149 1168 def __writeFirstHeader(self):
1150 1169 """
1151 1170 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1152 1171
1153 1172 Affected:
1154 1173 __dataType
1155 1174
1156 1175 Return:
1157 1176 None
1158 1177 """
1159 1178
1160 1179 # CALCULAR PARAMETROS
1161 1180
1162 1181 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1163 1182 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1164 1183
1165 1184 self.basicHeaderObj.write(self.fp)
1166 1185 self.systemHeaderObj.write(self.fp)
1167 1186 self.radarControllerHeaderObj.write(self.fp)
1168 1187 self.processingHeaderObj.write(self.fp)
1169 1188
1170 1189 self.dtype = self.dataOut.dtype
1171 1190
1172 1191 def __setNewBlock(self):
1173 1192 """
1174 1193 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1175 1194
1176 1195 Return:
1177 1196 0 : si no pudo escribir nada
1178 1197 1 : Si escribio el Basic el First Header
1179 1198 """
1180 1199 if self.fp == None:
1181 1200 self.setNextFile()
1182 1201
1183 1202 if self.flagIsNewFile:
1184 1203 return 1
1185 1204
1186 1205 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1187 1206 self.basicHeaderObj.write(self.fp)
1188 1207 return 1
1189 1208
1190 1209 if not( self.setNextFile() ):
1191 1210 return 0
1192 1211
1193 1212 return 1
1194 1213
1195 1214
1196 1215 def writeNextBlock(self):
1197 1216 """
1198 1217 Selecciona el bloque siguiente de datos y los escribe en un file
1199 1218
1200 1219 Return:
1201 1220 0 : Si no hizo pudo escribir el bloque de datos
1202 1221 1 : Si no pudo escribir el bloque de datos
1203 1222 """
1204 1223 if not( self.__setNewBlock() ):
1205 1224 return 0
1206 1225
1207 1226 self.writeBlock()
1208 1227
1209 1228 return 1
1210 1229
1211 1230 def setNextFile(self):
1212 1231 """
1213 1232 Determina el siguiente file que sera escrito
1214 1233
1215 1234 Affected:
1216 1235 self.filename
1217 1236 self.subfolder
1218 1237 self.fp
1219 1238 self.setFile
1220 1239 self.flagIsNewFile
1221 1240
1222 1241 Return:
1223 1242 0 : Si el archivo no puede ser escrito
1224 1243 1 : Si el archivo esta listo para ser escrito
1225 1244 """
1226 1245 ext = self.ext
1227 1246 path = self.path
1228 1247
1229 1248 if self.fp != None:
1230 1249 self.fp.close()
1231 1250
1232 1251 timeTuple = time.localtime( self.dataOut.utctime)
1233 1252 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1234 1253
1235 1254 fullpath = os.path.join( path, subfolder )
1236 1255 if not( os.path.exists(fullpath) ):
1237 1256 os.mkdir(fullpath)
1238 1257 self.setFile = -1 #inicializo mi contador de seteo
1239 1258 else:
1240 1259 filesList = os.listdir( fullpath )
1241 1260 if len( filesList ) > 0:
1242 1261 filesList = sorted( filesList, key=str.lower )
1243 1262 filen = filesList[-1]
1244 1263 # el filename debera tener el siguiente formato
1245 1264 # 0 1234 567 89A BCDE (hex)
1246 1265 # x YYYY DDD SSS .ext
1247 1266 if isNumber( filen[8:11] ):
1248 1267 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1249 1268 else:
1250 1269 self.setFile = -1
1251 1270 else:
1252 1271 self.setFile = -1 #inicializo mi contador de seteo
1253 1272
1254 1273 setFile = self.setFile
1255 1274 setFile += 1
1256 1275
1257 1276 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
1258 1277 timeTuple.tm_year,
1259 1278 timeTuple.tm_yday,
1260 1279 setFile,
1261 1280 ext )
1262 1281
1263 1282 filename = os.path.join( path, subfolder, file )
1264 1283
1265 1284 fp = open( filename,'wb' )
1266 1285
1267 1286 self.blockIndex = 0
1268 1287
1269 1288 #guardando atributos
1270 1289 self.filename = filename
1271 1290 self.subfolder = subfolder
1272 1291 self.fp = fp
1273 1292 self.setFile = setFile
1274 1293 self.flagIsNewFile = 1
1275 1294
1276 1295 self.setFirstHeader()
1277 1296
1278 1297 print 'Writing the file: %s'%self.filename
1279 1298
1280 1299 self.__writeFirstHeader()
1281 1300
1282 1301 return 1
1283 1302
1284 1303 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=0, ext=None):
1285 1304 """
1286 1305 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1287 1306
1288 1307 Inputs:
1289 1308 path : el path destino en el cual se escribiran los files a crear
1290 1309 format : formato en el cual sera salvado un file
1291 1310 set : el setebo del file
1292 1311
1293 1312 Return:
1294 1313 0 : Si no realizo un buen seteo
1295 1314 1 : Si realizo un buen seteo
1296 1315 """
1297 1316
1298 1317 if ext == None:
1299 1318 ext = self.ext
1300 1319
1301 1320 ext = ext.lower()
1302 1321
1303 1322 self.ext = ext
1304 1323
1305 1324 self.path = path
1306 1325
1307 1326 self.setFile = set - 1
1308 1327
1309 1328 self.blocksPerFile = blocksPerFile
1310 1329
1311 1330 self.profilesPerBlock = profilesPerBlock
1312 1331
1313 1332 self.dataOut = dataOut
1314 1333
1315 1334 if not(self.setNextFile()):
1316 1335 print "There isn't a next file"
1317 1336 return 0
1318 1337
1319 1338 self.setBlockDimension()
1320 1339
1321 1340 return 1
1322 1341
1323 1342 def run(self, dataOut, **kwargs):
1324 1343
1325 1344 if not(self.isConfig):
1326 1345
1327 1346 self.setup(dataOut, **kwargs)
1328 1347 self.isConfig = True
1329 1348
1330 1349 self.putData()
1331 1350
1332 1351 class VoltageReader(JRODataReader):
1333 1352 """
1334 1353 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
1335 1354 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
1336 1355 perfiles*alturas*canales) son almacenados en la variable "buffer".
1337 1356
1338 1357 perfiles * alturas * canales
1339 1358
1340 1359 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1341 1360 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
1342 1361 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
1343 1362 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1344 1363
1345 1364 Example:
1346 1365
1347 1366 dpath = "/home/myuser/data"
1348 1367
1349 1368 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1350 1369
1351 1370 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1352 1371
1353 1372 readerObj = VoltageReader()
1354 1373
1355 1374 readerObj.setup(dpath, startTime, endTime)
1356 1375
1357 1376 while(True):
1358 1377
1359 1378 #to get one profile
1360 1379 profile = readerObj.getData()
1361 1380
1362 1381 #print the profile
1363 1382 print profile
1364 1383
1365 1384 #If you want to see all datablock
1366 1385 print readerObj.datablock
1367 1386
1368 1387 if readerObj.flagNoMoreFiles:
1369 1388 break
1370 1389
1371 1390 """
1372 1391
1373 1392 ext = ".r"
1374 1393
1375 1394 optchar = "D"
1376 1395 dataOut = None
1377 1396
1378 1397
1379 1398 def __init__(self):
1380 1399 """
1381 1400 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
1382 1401
1383 1402 Input:
1384 1403 dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para
1385 1404 almacenar un perfil de datos cada vez que se haga un requerimiento
1386 1405 (getData). El perfil sera obtenido a partir del buffer de datos,
1387 1406 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1388 1407 bloque de datos.
1389 1408 Si este parametro no es pasado se creara uno internamente.
1390 1409
1391 1410 Variables afectadas:
1392 1411 self.dataOut
1393 1412
1394 1413 Return:
1395 1414 None
1396 1415 """
1397 1416
1398 1417 self.isConfig = False
1399 1418
1400 1419 self.datablock = None
1401 1420
1402 1421 self.utc = 0
1403 1422
1404 1423 self.ext = ".r"
1405 1424
1406 1425 self.optchar = "D"
1407 1426
1408 1427 self.basicHeaderObj = BasicHeader(LOCALTIME)
1409 1428
1410 1429 self.systemHeaderObj = SystemHeader()
1411 1430
1412 1431 self.radarControllerHeaderObj = RadarControllerHeader()
1413 1432
1414 1433 self.processingHeaderObj = ProcessingHeader()
1415 1434
1416 1435 self.online = 0
1417 1436
1418 1437 self.fp = None
1419 1438
1420 1439 self.idFile = None
1421 1440
1422 1441 self.dtype = None
1423 1442
1424 1443 self.fileSizeByHeader = None
1425 1444
1426 1445 self.filenameList = []
1427 1446
1428 1447 self.filename = None
1429 1448
1430 1449 self.fileSize = None
1431 1450
1432 1451 self.firstHeaderSize = 0
1433 1452
1434 1453 self.basicHeaderSize = 24
1435 1454
1436 1455 self.pathList = []
1437 1456
1438 1457 self.filenameList = []
1439 1458
1440 1459 self.lastUTTime = 0
1441 1460
1442 1461 self.maxTimeStep = 30
1443 1462
1444 1463 self.flagNoMoreFiles = 0
1445 1464
1446 1465 self.set = 0
1447 1466
1448 1467 self.path = None
1449 1468
1450 1469 self.profileIndex = 2**32-1
1451 1470
1452 1471 self.delay = 3 #seconds
1453 1472
1454 1473 self.nTries = 3 #quantity tries
1455 1474
1456 1475 self.nFiles = 3 #number of files for searching
1457 1476
1458 1477 self.nReadBlocks = 0
1459 1478
1460 1479 self.flagIsNewFile = 1
1461 1480
1462 1481 self.__isFirstTimeOnline = 1
1463 1482
1464 1483 self.ippSeconds = 0
1465 1484
1466 1485 self.flagTimeBlock = 0
1467 1486
1468 1487 self.flagIsNewBlock = 0
1469 1488
1470 1489 self.nTotalBlocks = 0
1471 1490
1472 1491 self.blocksize = 0
1473 1492
1474 1493 self.dataOut = self.createObjByDefault()
1475 1494
1476 1495 def createObjByDefault(self):
1477 1496
1478 1497 dataObj = Voltage()
1479 1498
1480 1499 return dataObj
1481 1500
1482 1501 def __hasNotDataInBuffer(self):
1483 1502 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1484 1503 return 1
1485 1504 return 0
1486 1505
1487 1506
1488 1507 def getBlockDimension(self):
1489 1508 """
1490 1509 Obtiene la cantidad de puntos a leer por cada bloque de datos
1491 1510
1492 1511 Affected:
1493 1512 self.blocksize
1494 1513
1495 1514 Return:
1496 1515 None
1497 1516 """
1498 1517 pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
1499 1518 self.blocksize = pts2read
1500 1519
1501 1520
1502 1521 def readBlock(self):
1503 1522 """
1504 1523 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
1505 1524 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1506 1525 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1507 1526 es seteado a 0
1508 1527
1509 1528 Inputs:
1510 1529 None
1511 1530
1512 1531 Return:
1513 1532 None
1514 1533
1515 1534 Affected:
1516 1535 self.profileIndex
1517 1536 self.datablock
1518 1537 self.flagIsNewFile
1519 1538 self.flagIsNewBlock
1520 1539 self.nTotalBlocks
1521 1540
1522 1541 Exceptions:
1523 1542 Si un bloque leido no es un bloque valido
1524 1543 """
1525 1544 current_pointer_location = self.fp.tell()
1526 1545 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
1527 1546
1528 1547 try:
1529 1548 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
1530 1549 except:
1531 1550 #print "The read block (%3d) has not enough data" %self.nReadBlocks
1532 1551
1533 1552 if self.waitDataBlock(pointer_location=current_pointer_location):
1534 1553 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
1535 1554 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
1536 1555 # return 0
1537 1556
1538 1557 junk = numpy.transpose(junk, (2,0,1))
1539 1558 self.datablock = junk['real'] + junk['imag']*1j
1540 1559
1541 1560 self.profileIndex = 0
1542 1561
1543 1562 self.flagIsNewFile = 0
1544 1563 self.flagIsNewBlock = 1
1545 1564
1546 1565 self.nTotalBlocks += 1
1547 1566 self.nReadBlocks += 1
1548 1567
1549 1568 return 1
1550 1569
1551 1570 def getFirstHeader(self):
1552 1571
1553 1572 self.dataOut.dtype = self.dtype
1554 1573
1555 1574 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
1556 1575
1557 1576 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
1558 1577
1559 1578 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
1560 1579
1561 1580 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
1562 1581
1563 1582 self.dataOut.ippSeconds = self.ippSeconds
1564 1583
1565 1584 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt
1566 1585
1567 1586 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
1568 1587
1569 1588 self.dataOut.flagShiftFFT = False
1570 1589
1571 1590 if self.radarControllerHeaderObj.code != None:
1572 1591
1573 1592 self.dataOut.nCode = self.radarControllerHeaderObj.nCode
1574 1593
1575 1594 self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
1576 1595
1577 1596 self.dataOut.code = self.radarControllerHeaderObj.code
1578 1597
1579 1598 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
1580 1599
1581 1600 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
1582 1601
1583 1602 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
1584 1603
1585 1604 self.dataOut.flagDeflipData = False #asumo q la data no esta sin flip
1586 1605
1587 1606 self.dataOut.flagShiftFFT = False
1588 1607
1589 1608 def getData(self):
1590 1609 """
1591 1610 getData obtiene una unidad de datos del buffer de lectura y la copia a la clase "Voltage"
1592 1611 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
1593 1612 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
1594 1613
1595 1614 Ademas incrementa el contador del buffer en 1.
1596 1615
1597 1616 Return:
1598 1617 data : retorna un perfil de voltages (alturas * canales) copiados desde el
1599 1618 buffer. Si no hay mas archivos a leer retorna None.
1600 1619
1601 1620 Variables afectadas:
1602 1621 self.dataOut
1603 1622 self.profileIndex
1604 1623
1605 1624 Affected:
1606 1625 self.dataOut
1607 1626 self.profileIndex
1608 1627 self.flagTimeBlock
1609 1628 self.flagIsNewBlock
1610 1629 """
1611 1630
1612 1631 if self.flagNoMoreFiles:
1613 1632 self.dataOut.flagNoData = True
1614 1633 print 'Process finished'
1615 1634 return 0
1616 1635
1617 1636 self.flagTimeBlock = 0
1618 1637 self.flagIsNewBlock = 0
1619 1638
1620 1639 if self.__hasNotDataInBuffer():
1621 1640
1622 1641 if not( self.readNextBlock() ):
1623 1642 return 0
1624 1643
1625 1644 self.getFirstHeader()
1626 1645
1627 1646 if self.datablock == None:
1628 1647 self.dataOut.flagNoData = True
1629 1648 return 0
1630 1649
1631 1650 self.dataOut.data = self.datablock[:,self.profileIndex,:]
1632 1651
1633 1652 self.dataOut.flagNoData = False
1634 1653
1635 1654 self.getBasicHeader()
1636 1655
1637 1656 self.profileIndex += 1
1638 1657
1639 1658 self.dataOut.realtime = self.online
1640 1659
1641 1660 return self.dataOut.data
1642 1661
1643 1662
1644 1663 class VoltageWriter(JRODataWriter):
1645 1664 """
1646 1665 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
1647 1666 de los datos siempre se realiza por bloques.
1648 1667 """
1649 1668
1650 1669 ext = ".r"
1651 1670
1652 1671 optchar = "D"
1653 1672
1654 1673 shapeBuffer = None
1655 1674
1656 1675
1657 1676 def __init__(self):
1658 1677 """
1659 1678 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
1660 1679
1661 1680 Affected:
1662 1681 self.dataOut
1663 1682
1664 1683 Return: None
1665 1684 """
1666 1685
1667 1686 self.nTotalBlocks = 0
1668 1687
1669 1688 self.profileIndex = 0
1670 1689
1671 1690 self.isConfig = False
1672 1691
1673 1692 self.fp = None
1674 1693
1675 1694 self.flagIsNewFile = 1
1676 1695
1677 1696 self.nTotalBlocks = 0
1678 1697
1679 1698 self.flagIsNewBlock = 0
1680 1699
1681 1700 self.setFile = None
1682 1701
1683 1702 self.dtype = None
1684 1703
1685 1704 self.path = None
1686 1705
1687 1706 self.filename = None
1688 1707
1689 1708 self.basicHeaderObj = BasicHeader(LOCALTIME)
1690 1709
1691 1710 self.systemHeaderObj = SystemHeader()
1692 1711
1693 1712 self.radarControllerHeaderObj = RadarControllerHeader()
1694 1713
1695 1714 self.processingHeaderObj = ProcessingHeader()
1696 1715
1697 1716 def hasAllDataInBuffer(self):
1698 1717 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1699 1718 return 1
1700 1719 return 0
1701 1720
1702 1721
1703 1722 def setBlockDimension(self):
1704 1723 """
1705 1724 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
1706 1725
1707 1726 Affected:
1708 1727 self.shape_spc_Buffer
1709 1728 self.shape_cspc_Buffer
1710 1729 self.shape_dc_Buffer
1711 1730
1712 1731 Return: None
1713 1732 """
1714 1733 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
1715 1734 self.processingHeaderObj.nHeights,
1716 1735 self.systemHeaderObj.nChannels)
1717 1736
1718 1737 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
1719 1738 self.processingHeaderObj.profilesPerBlock,
1720 1739 self.processingHeaderObj.nHeights),
1721 1740 dtype=numpy.dtype('complex64'))
1722 1741
1723 1742
1724 1743 def writeBlock(self):
1725 1744 """
1726 1745 Escribe el buffer en el file designado
1727 1746
1728 1747 Affected:
1729 1748 self.profileIndex
1730 1749 self.flagIsNewFile
1731 1750 self.flagIsNewBlock
1732 1751 self.nTotalBlocks
1733 1752 self.blockIndex
1734 1753
1735 1754 Return: None
1736 1755 """
1737 1756 data = numpy.zeros( self.shapeBuffer, self.dtype )
1738 1757
1739 1758 junk = numpy.transpose(self.datablock, (1,2,0))
1740 1759
1741 1760 data['real'] = junk.real
1742 1761 data['imag'] = junk.imag
1743 1762
1744 1763 data = data.reshape( (-1) )
1745 1764
1746 1765 data.tofile( self.fp )
1747 1766
1748 1767 self.datablock.fill(0)
1749 1768
1750 1769 self.profileIndex = 0
1751 1770 self.flagIsNewFile = 0
1752 1771 self.flagIsNewBlock = 1
1753 1772
1754 1773 self.blockIndex += 1
1755 1774 self.nTotalBlocks += 1
1756 1775
1757 1776 def putData(self):
1758 1777 """
1759 1778 Setea un bloque de datos y luego los escribe en un file
1760 1779
1761 1780 Affected:
1762 1781 self.flagIsNewBlock
1763 1782 self.profileIndex
1764 1783
1765 1784 Return:
1766 1785 0 : Si no hay data o no hay mas files que puedan escribirse
1767 1786 1 : Si se escribio la data de un bloque en un file
1768 1787 """
1769 1788 if self.dataOut.flagNoData:
1770 1789 return 0
1771 1790
1772 1791 self.flagIsNewBlock = 0
1773 1792
1774 1793 if self.dataOut.flagTimeBlock:
1775 1794
1776 1795 self.datablock.fill(0)
1777 1796 self.profileIndex = 0
1778 1797 self.setNextFile()
1779 1798
1780 1799 if self.profileIndex == 0:
1781 1800 self.setBasicHeader()
1782 1801
1783 1802 self.datablock[:,self.profileIndex,:] = self.dataOut.data
1784 1803
1785 1804 self.profileIndex += 1
1786 1805
1787 1806 if self.hasAllDataInBuffer():
1788 1807 #if self.flagIsNewFile:
1789 1808 self.writeNextBlock()
1790 1809 # self.setFirstHeader()
1791 1810
1792 1811 return 1
1793 1812
1794 1813 def __getProcessFlags(self):
1795 1814
1796 1815 processFlags = 0
1797 1816
1798 1817 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1799 1818 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1800 1819 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1801 1820 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1802 1821 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1803 1822 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1804 1823
1805 1824 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1806 1825
1807 1826
1808 1827
1809 1828 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
1810 1829 PROCFLAG.DATATYPE_SHORT,
1811 1830 PROCFLAG.DATATYPE_LONG,
1812 1831 PROCFLAG.DATATYPE_INT64,
1813 1832 PROCFLAG.DATATYPE_FLOAT,
1814 1833 PROCFLAG.DATATYPE_DOUBLE]
1815 1834
1816 1835
1817 1836 for index in range(len(dtypeList)):
1818 1837 if self.dataOut.dtype == dtypeList[index]:
1819 1838 dtypeValue = datatypeValueList[index]
1820 1839 break
1821 1840
1822 1841 processFlags += dtypeValue
1823 1842
1824 1843 if self.dataOut.flagDecodeData:
1825 1844 processFlags += PROCFLAG.DECODE_DATA
1826 1845
1827 1846 if self.dataOut.flagDeflipData:
1828 1847 processFlags += PROCFLAG.DEFLIP_DATA
1829 1848
1830 1849 if self.dataOut.code != None:
1831 1850 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1832 1851
1833 1852 if self.dataOut.nCohInt > 1:
1834 1853 processFlags += PROCFLAG.COHERENT_INTEGRATION
1835 1854
1836 1855 return processFlags
1837 1856
1838 1857
1839 1858 def __getBlockSize(self):
1840 1859 '''
1841 1860 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
1842 1861 '''
1843 1862
1844 1863 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1845 1864 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1846 1865 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1847 1866 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1848 1867 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1849 1868 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1850 1869
1851 1870 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1852 1871 datatypeValueList = [1,2,4,8,4,8]
1853 1872 for index in range(len(dtypeList)):
1854 1873 if self.dataOut.dtype == dtypeList[index]:
1855 1874 datatypeValue = datatypeValueList[index]
1856 1875 break
1857 1876
1858 1877 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels * self.profilesPerBlock * datatypeValue * 2)
1859 1878
1860 1879 return blocksize
1861 1880
1862 1881 def setFirstHeader(self):
1863 1882
1864 1883 """
1865 1884 Obtiene una copia del First Header
1866 1885
1867 1886 Affected:
1868 1887 self.systemHeaderObj
1869 1888 self.radarControllerHeaderObj
1870 1889 self.dtype
1871 1890
1872 1891 Return:
1873 1892 None
1874 1893 """
1875 1894
1876 1895 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
1877 1896 self.systemHeaderObj.nChannels = self.dataOut.nChannels
1878 1897 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
1879 1898
1880 1899 self.setBasicHeader()
1881 1900
1882 1901 processingHeaderSize = 40 # bytes
1883 1902 self.processingHeaderObj.dtype = 0 # Voltage
1884 1903 self.processingHeaderObj.blockSize = self.__getBlockSize()
1885 1904 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
1886 1905 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
1887 1906 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
1888 1907 self.processingHeaderObj.processFlags = self.__getProcessFlags()
1889 1908 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
1890 1909 self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage
1891 1910 self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage
1892 1911
1893 1912 # if self.dataOut.code != None:
1894 1913 # self.processingHeaderObj.code = self.dataOut.code
1895 1914 # self.processingHeaderObj.nCode = self.dataOut.nCode
1896 1915 # self.processingHeaderObj.nBaud = self.dataOut.nBaud
1897 1916 # codesize = int(8 + 4 * self.dataOut.nCode * self.dataOut.nBaud)
1898 1917 # processingHeaderSize += codesize
1899 1918
1900 1919 if self.processingHeaderObj.nWindows != 0:
1901 1920 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
1902 1921 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
1903 1922 self.processingHeaderObj.nHeights = self.dataOut.nHeights
1904 1923 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
1905 1924 processingHeaderSize += 12
1906 1925
1907 1926 self.processingHeaderObj.size = processingHeaderSize
1908 1927
1909 1928 class SpectraReader(JRODataReader):
1910 1929 """
1911 1930 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
1912 1931 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
1913 1932 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
1914 1933
1915 1934 paresCanalesIguales * alturas * perfiles (Self Spectra)
1916 1935 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
1917 1936 canales * alturas (DC Channels)
1918 1937
1919 1938 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1920 1939 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
1921 1940 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
1922 1941 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1923 1942
1924 1943 Example:
1925 1944 dpath = "/home/myuser/data"
1926 1945
1927 1946 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1928 1947
1929 1948 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1930 1949
1931 1950 readerObj = SpectraReader()
1932 1951
1933 1952 readerObj.setup(dpath, startTime, endTime)
1934 1953
1935 1954 while(True):
1936 1955
1937 1956 readerObj.getData()
1938 1957
1939 1958 print readerObj.data_spc
1940 1959
1941 1960 print readerObj.data_cspc
1942 1961
1943 1962 print readerObj.data_dc
1944 1963
1945 1964 if readerObj.flagNoMoreFiles:
1946 1965 break
1947 1966
1948 1967 """
1949 1968
1950 1969 pts2read_SelfSpectra = 0
1951 1970
1952 1971 pts2read_CrossSpectra = 0
1953 1972
1954 1973 pts2read_DCchannels = 0
1955 1974
1956 1975 ext = ".pdata"
1957 1976
1958 1977 optchar = "P"
1959 1978
1960 1979 dataOut = None
1961 1980
1962 1981 nRdChannels = None
1963 1982
1964 1983 nRdPairs = None
1965 1984
1966 1985 rdPairList = []
1967 1986
1968 1987 def __init__(self):
1969 1988 """
1970 1989 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
1971 1990
1972 1991 Inputs:
1973 1992 dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para
1974 1993 almacenar un perfil de datos cada vez que se haga un requerimiento
1975 1994 (getData). El perfil sera obtenido a partir del buffer de datos,
1976 1995 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1977 1996 bloque de datos.
1978 1997 Si este parametro no es pasado se creara uno internamente.
1979 1998
1980 1999 Affected:
1981 2000 self.dataOut
1982 2001
1983 2002 Return : None
1984 2003 """
1985 2004
1986 2005 self.isConfig = False
1987 2006
1988 2007 self.pts2read_SelfSpectra = 0
1989 2008
1990 2009 self.pts2read_CrossSpectra = 0
1991 2010
1992 2011 self.pts2read_DCchannels = 0
1993 2012
1994 2013 self.datablock = None
1995 2014
1996 2015 self.utc = None
1997 2016
1998 2017 self.ext = ".pdata"
1999 2018
2000 2019 self.optchar = "P"
2001 2020
2002 2021 self.basicHeaderObj = BasicHeader(LOCALTIME)
2003 2022
2004 2023 self.systemHeaderObj = SystemHeader()
2005 2024
2006 2025 self.radarControllerHeaderObj = RadarControllerHeader()
2007 2026
2008 2027 self.processingHeaderObj = ProcessingHeader()
2009 2028
2010 2029 self.online = 0
2011 2030
2012 2031 self.fp = None
2013 2032
2014 2033 self.idFile = None
2015 2034
2016 2035 self.dtype = None
2017 2036
2018 2037 self.fileSizeByHeader = None
2019 2038
2020 2039 self.filenameList = []
2021 2040
2022 2041 self.filename = None
2023 2042
2024 2043 self.fileSize = None
2025 2044
2026 2045 self.firstHeaderSize = 0
2027 2046
2028 2047 self.basicHeaderSize = 24
2029 2048
2030 2049 self.pathList = []
2031 2050
2032 2051 self.lastUTTime = 0
2033 2052
2034 2053 self.maxTimeStep = 30
2035 2054
2036 2055 self.flagNoMoreFiles = 0
2037 2056
2038 2057 self.set = 0
2039 2058
2040 2059 self.path = None
2041 2060
2042 2061 self.delay = 60 #seconds
2043 2062
2044 2063 self.nTries = 3 #quantity tries
2045 2064
2046 2065 self.nFiles = 3 #number of files for searching
2047 2066
2048 2067 self.nReadBlocks = 0
2049 2068
2050 2069 self.flagIsNewFile = 1
2051 2070
2052 2071 self.__isFirstTimeOnline = 1
2053 2072
2054 2073 self.ippSeconds = 0
2055 2074
2056 2075 self.flagTimeBlock = 0
2057 2076
2058 2077 self.flagIsNewBlock = 0
2059 2078
2060 2079 self.nTotalBlocks = 0
2061 2080
2062 2081 self.blocksize = 0
2063 2082
2064 2083 self.dataOut = self.createObjByDefault()
2065 2084
2066 2085 self.profileIndex = 1 #Always
2067 2086
2068 2087
2069 2088 def createObjByDefault(self):
2070 2089
2071 2090 dataObj = Spectra()
2072 2091
2073 2092 return dataObj
2074 2093
2075 2094 def __hasNotDataInBuffer(self):
2076 2095 return 1
2077 2096
2078 2097
2079 2098 def getBlockDimension(self):
2080 2099 """
2081 2100 Obtiene la cantidad de puntos a leer por cada bloque de datos
2082 2101
2083 2102 Affected:
2084 2103 self.nRdChannels
2085 2104 self.nRdPairs
2086 2105 self.pts2read_SelfSpectra
2087 2106 self.pts2read_CrossSpectra
2088 2107 self.pts2read_DCchannels
2089 2108 self.blocksize
2090 2109 self.dataOut.nChannels
2091 2110 self.dataOut.nPairs
2092 2111
2093 2112 Return:
2094 2113 None
2095 2114 """
2096 2115 self.nRdChannels = 0
2097 2116 self.nRdPairs = 0
2098 2117 self.rdPairList = []
2099 2118
2100 2119 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
2101 2120 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
2102 2121 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
2103 2122 else:
2104 2123 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
2105 2124 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
2106 2125
2107 2126 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
2108 2127
2109 2128 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
2110 2129 self.blocksize = self.pts2read_SelfSpectra
2111 2130
2112 2131 if self.processingHeaderObj.flag_cspc:
2113 2132 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
2114 2133 self.blocksize += self.pts2read_CrossSpectra
2115 2134
2116 2135 if self.processingHeaderObj.flag_dc:
2117 2136 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
2118 2137 self.blocksize += self.pts2read_DCchannels
2119 2138
2120 2139 # self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels
2121 2140
2122 2141
2123 2142 def readBlock(self):
2124 2143 """
2125 2144 Lee el bloque de datos desde la posicion actual del puntero del archivo
2126 2145 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
2127 2146 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
2128 2147 es seteado a 0
2129 2148
2130 2149 Return: None
2131 2150
2132 2151 Variables afectadas:
2133 2152
2134 2153 self.flagIsNewFile
2135 2154 self.flagIsNewBlock
2136 2155 self.nTotalBlocks
2137 2156 self.data_spc
2138 2157 self.data_cspc
2139 2158 self.data_dc
2140 2159
2141 2160 Exceptions:
2142 2161 Si un bloque leido no es un bloque valido
2143 2162 """
2144 2163 blockOk_flag = False
2145 2164 fpointer = self.fp.tell()
2146 2165
2147 2166 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
2148 2167 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
2149 2168
2150 2169 if self.processingHeaderObj.flag_cspc:
2151 2170 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
2152 2171 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
2153 2172
2154 2173 if self.processingHeaderObj.flag_dc:
2155 2174 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
2156 2175 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
2157 2176
2158 2177
2159 2178 if not(self.processingHeaderObj.shif_fft):
2160 2179 #desplaza a la derecha en el eje 2 determinadas posiciones
2161 2180 shift = int(self.processingHeaderObj.profilesPerBlock/2)
2162 2181 spc = numpy.roll( spc, shift , axis=2 )
2163 2182
2164 2183 if self.processingHeaderObj.flag_cspc:
2165 2184 #desplaza a la derecha en el eje 2 determinadas posiciones
2166 2185 cspc = numpy.roll( cspc, shift, axis=2 )
2167 2186
2168 2187 # self.processingHeaderObj.shif_fft = True
2169 2188
2170 2189 spc = numpy.transpose( spc, (0,2,1) )
2171 2190 self.data_spc = spc
2172 2191
2173 2192 if self.processingHeaderObj.flag_cspc:
2174 2193 cspc = numpy.transpose( cspc, (0,2,1) )
2175 2194 self.data_cspc = cspc['real'] + cspc['imag']*1j
2176 2195 else:
2177 2196 self.data_cspc = None
2178 2197
2179 2198 if self.processingHeaderObj.flag_dc:
2180 2199 self.data_dc = dc['real'] + dc['imag']*1j
2181 2200 else:
2182 2201 self.data_dc = None
2183 2202
2184 2203 self.flagIsNewFile = 0
2185 2204 self.flagIsNewBlock = 1
2186 2205
2187 2206 self.nTotalBlocks += 1
2188 2207 self.nReadBlocks += 1
2189 2208
2190 2209 return 1
2191 2210
2192 2211 def getFirstHeader(self):
2193 2212
2194 2213 self.dataOut.dtype = self.dtype
2195 2214
2196 2215 self.dataOut.nPairs = self.nRdPairs
2197 2216
2198 2217 self.dataOut.pairsList = self.rdPairList
2199 2218
2200 2219 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
2201 2220
2202 2221 self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
2203 2222
2204 2223 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
2205 2224
2206 2225 self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
2207 2226
2208 2227 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
2209 2228
2210 2229 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
2211 2230
2212 2231 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
2213 2232
2214 2233 self.dataOut.ippSeconds = self.ippSeconds
2215 2234
2216 2235 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt * self.processingHeaderObj.nIncohInt * self.dataOut.nFFTPoints
2217 2236
2218 2237 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
2219 2238
2220 2239 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
2221 2240
2222 2241 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
2223 2242
2224 2243 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
2225 2244
2226 2245 self.dataOut.flagDeflipData = True #asumo q la data no esta sin flip
2227 2246
2228 2247 if self.processingHeaderObj.code != None:
2229 2248
2230 2249 self.dataOut.nCode = self.processingHeaderObj.nCode
2231 2250
2232 2251 self.dataOut.nBaud = self.processingHeaderObj.nBaud
2233 2252
2234 2253 self.dataOut.code = self.processingHeaderObj.code
2235 2254
2236 2255 self.dataOut.flagDecodeData = True
2237 2256
2238 2257 def getData(self):
2239 2258 """
2240 2259 Copia el buffer de lectura a la clase "Spectra",
2241 2260 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
2242 2261 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
2243 2262
2244 2263 Return:
2245 2264 0 : Si no hay mas archivos disponibles
2246 2265 1 : Si hizo una buena copia del buffer
2247 2266
2248 2267 Affected:
2249 2268 self.dataOut
2250 2269
2251 2270 self.flagTimeBlock
2252 2271 self.flagIsNewBlock
2253 2272 """
2254 2273
2255 2274 if self.flagNoMoreFiles:
2256 2275 self.dataOut.flagNoData = True
2257 2276 print 'Process finished'
2258 2277 return 0
2259 2278
2260 2279 self.flagTimeBlock = 0
2261 2280 self.flagIsNewBlock = 0
2262 2281
2263 2282 if self.__hasNotDataInBuffer():
2264 2283
2265 2284 if not( self.readNextBlock() ):
2266 2285 self.dataOut.flagNoData = True
2267 2286 return 0
2268 2287
2269 2288 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
2270 2289
2271 2290 if self.data_dc == None:
2272 2291 self.dataOut.flagNoData = True
2273 2292 return 0
2274 2293
2275 2294 self.getBasicHeader()
2276 2295
2277 2296 self.getFirstHeader()
2278 2297
2279 2298 self.dataOut.data_spc = self.data_spc
2280 2299
2281 2300 self.dataOut.data_cspc = self.data_cspc
2282 2301
2283 2302 self.dataOut.data_dc = self.data_dc
2284 2303
2285 2304 self.dataOut.flagNoData = False
2286 2305
2287 2306 self.dataOut.realtime = self.online
2288 2307
2289 2308 return self.dataOut.data_spc
2290 2309
2291 2310
2292 2311 class SpectraWriter(JRODataWriter):
2293 2312
2294 2313 """
2295 2314 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
2296 2315 de los datos siempre se realiza por bloques.
2297 2316 """
2298 2317
2299 2318 ext = ".pdata"
2300 2319
2301 2320 optchar = "P"
2302 2321
2303 2322 shape_spc_Buffer = None
2304 2323
2305 2324 shape_cspc_Buffer = None
2306 2325
2307 2326 shape_dc_Buffer = None
2308 2327
2309 2328 data_spc = None
2310 2329
2311 2330 data_cspc = None
2312 2331
2313 2332 data_dc = None
2314 2333
2315 2334 # dataOut = None
2316 2335
2317 2336 def __init__(self):
2318 2337 """
2319 2338 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
2320 2339
2321 2340 Affected:
2322 2341 self.dataOut
2323 2342 self.basicHeaderObj
2324 2343 self.systemHeaderObj
2325 2344 self.radarControllerHeaderObj
2326 2345 self.processingHeaderObj
2327 2346
2328 2347 Return: None
2329 2348 """
2330 2349
2331 2350 self.isConfig = False
2332 2351
2333 2352 self.nTotalBlocks = 0
2334 2353
2335 2354 self.data_spc = None
2336 2355
2337 2356 self.data_cspc = None
2338 2357
2339 2358 self.data_dc = None
2340 2359
2341 2360 self.fp = None
2342 2361
2343 2362 self.flagIsNewFile = 1
2344 2363
2345 2364 self.nTotalBlocks = 0
2346 2365
2347 2366 self.flagIsNewBlock = 0
2348 2367
2349 2368 self.setFile = None
2350 2369
2351 2370 self.dtype = None
2352 2371
2353 2372 self.path = None
2354 2373
2355 2374 self.noMoreFiles = 0
2356 2375
2357 2376 self.filename = None
2358 2377
2359 2378 self.basicHeaderObj = BasicHeader(LOCALTIME)
2360 2379
2361 2380 self.systemHeaderObj = SystemHeader()
2362 2381
2363 2382 self.radarControllerHeaderObj = RadarControllerHeader()
2364 2383
2365 2384 self.processingHeaderObj = ProcessingHeader()
2366 2385
2367 2386
2368 2387 def hasAllDataInBuffer(self):
2369 2388 return 1
2370 2389
2371 2390
2372 2391 def setBlockDimension(self):
2373 2392 """
2374 2393 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
2375 2394
2376 2395 Affected:
2377 2396 self.shape_spc_Buffer
2378 2397 self.shape_cspc_Buffer
2379 2398 self.shape_dc_Buffer
2380 2399
2381 2400 Return: None
2382 2401 """
2383 2402 self.shape_spc_Buffer = (self.dataOut.nChannels,
2384 2403 self.processingHeaderObj.nHeights,
2385 2404 self.processingHeaderObj.profilesPerBlock)
2386 2405
2387 2406 self.shape_cspc_Buffer = (self.dataOut.nPairs,
2388 2407 self.processingHeaderObj.nHeights,
2389 2408 self.processingHeaderObj.profilesPerBlock)
2390 2409
2391 2410 self.shape_dc_Buffer = (self.dataOut.nChannels,
2392 2411 self.processingHeaderObj.nHeights)
2393 2412
2394 2413
2395 2414 def writeBlock(self):
2396 2415 """
2397 2416 Escribe el buffer en el file designado
2398 2417
2399 2418 Affected:
2400 2419 self.data_spc
2401 2420 self.data_cspc
2402 2421 self.data_dc
2403 2422 self.flagIsNewFile
2404 2423 self.flagIsNewBlock
2405 2424 self.nTotalBlocks
2406 2425 self.nWriteBlocks
2407 2426
2408 2427 Return: None
2409 2428 """
2410 2429
2411 2430 spc = numpy.transpose( self.data_spc, (0,2,1) )
2412 2431 if not( self.processingHeaderObj.shif_fft ):
2413 2432 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2414 2433 data = spc.reshape((-1))
2415 2434 data = data.astype(self.dtype[0])
2416 2435 data.tofile(self.fp)
2417 2436
2418 2437 if self.data_cspc != None:
2419 2438 data = numpy.zeros( self.shape_cspc_Buffer, self.dtype )
2420 2439 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
2421 2440 if not( self.processingHeaderObj.shif_fft ):
2422 2441 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2423 2442 data['real'] = cspc.real
2424 2443 data['imag'] = cspc.imag
2425 2444 data = data.reshape((-1))
2426 2445 data.tofile(self.fp)
2427 2446
2428 2447 if self.data_dc != None:
2429 2448 data = numpy.zeros( self.shape_dc_Buffer, self.dtype )
2430 2449 dc = self.data_dc
2431 2450 data['real'] = dc.real
2432 2451 data['imag'] = dc.imag
2433 2452 data = data.reshape((-1))
2434 2453 data.tofile(self.fp)
2435 2454
2436 2455 self.data_spc.fill(0)
2437 2456
2438 2457 if self.data_dc != None:
2439 2458 self.data_dc.fill(0)
2440 2459
2441 2460 if self.data_cspc != None:
2442 2461 self.data_cspc.fill(0)
2443 2462
2444 2463 self.flagIsNewFile = 0
2445 2464 self.flagIsNewBlock = 1
2446 2465 self.nTotalBlocks += 1
2447 2466 self.nWriteBlocks += 1
2448 2467 self.blockIndex += 1
2449 2468
2450 2469
2451 2470 def putData(self):
2452 2471 """
2453 2472 Setea un bloque de datos y luego los escribe en un file
2454 2473
2455 2474 Affected:
2456 2475 self.data_spc
2457 2476 self.data_cspc
2458 2477 self.data_dc
2459 2478
2460 2479 Return:
2461 2480 0 : Si no hay data o no hay mas files que puedan escribirse
2462 2481 1 : Si se escribio la data de un bloque en un file
2463 2482 """
2464 2483
2465 2484 if self.dataOut.flagNoData:
2466 2485 return 0
2467 2486
2468 2487 self.flagIsNewBlock = 0
2469 2488
2470 2489 if self.dataOut.flagTimeBlock:
2471 2490 self.data_spc.fill(0)
2472 2491 self.data_cspc.fill(0)
2473 2492 self.data_dc.fill(0)
2474 2493 self.setNextFile()
2475 2494
2476 2495 if self.flagIsNewFile == 0:
2477 2496 self.setBasicHeader()
2478 2497
2479 2498 self.data_spc = self.dataOut.data_spc.copy()
2480 2499 if self.dataOut.data_cspc != None:
2481 2500 self.data_cspc = self.dataOut.data_cspc.copy()
2482 2501 self.data_dc = self.dataOut.data_dc.copy()
2483 2502
2484 2503 # #self.processingHeaderObj.dataBlocksPerFile)
2485 2504 if self.hasAllDataInBuffer():
2486 2505 # self.setFirstHeader()
2487 2506 self.writeNextBlock()
2488 2507
2489 2508 return 1
2490 2509
2491 2510
2492 2511 def __getProcessFlags(self):
2493 2512
2494 2513 processFlags = 0
2495 2514
2496 2515 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2497 2516 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2498 2517 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2499 2518 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2500 2519 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2501 2520 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2502 2521
2503 2522 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2504 2523
2505 2524
2506 2525
2507 2526 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
2508 2527 PROCFLAG.DATATYPE_SHORT,
2509 2528 PROCFLAG.DATATYPE_LONG,
2510 2529 PROCFLAG.DATATYPE_INT64,
2511 2530 PROCFLAG.DATATYPE_FLOAT,
2512 2531 PROCFLAG.DATATYPE_DOUBLE]
2513 2532
2514 2533
2515 2534 for index in range(len(dtypeList)):
2516 2535 if self.dataOut.dtype == dtypeList[index]:
2517 2536 dtypeValue = datatypeValueList[index]
2518 2537 break
2519 2538
2520 2539 processFlags += dtypeValue
2521 2540
2522 2541 if self.dataOut.flagDecodeData:
2523 2542 processFlags += PROCFLAG.DECODE_DATA
2524 2543
2525 2544 if self.dataOut.flagDeflipData:
2526 2545 processFlags += PROCFLAG.DEFLIP_DATA
2527 2546
2528 2547 if self.dataOut.code != None:
2529 2548 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
2530 2549
2531 2550 if self.dataOut.nIncohInt > 1:
2532 2551 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
2533 2552
2534 2553 if self.dataOut.data_dc != None:
2535 2554 processFlags += PROCFLAG.SAVE_CHANNELS_DC
2536 2555
2537 2556 return processFlags
2538 2557
2539 2558
2540 2559 def __getBlockSize(self):
2541 2560 '''
2542 2561 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
2543 2562 '''
2544 2563
2545 2564 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2546 2565 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2547 2566 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2548 2567 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2549 2568 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2550 2569 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2551 2570
2552 2571 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2553 2572 datatypeValueList = [1,2,4,8,4,8]
2554 2573 for index in range(len(dtypeList)):
2555 2574 if self.dataOut.dtype == dtypeList[index]:
2556 2575 datatypeValue = datatypeValueList[index]
2557 2576 break
2558 2577
2559 2578
2560 2579 pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints
2561 2580
2562 2581 pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write)
2563 2582 blocksize = (pts2write_SelfSpectra*datatypeValue)
2564 2583
2565 2584 if self.dataOut.data_cspc != None:
2566 2585 pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write)
2567 2586 blocksize += (pts2write_CrossSpectra*datatypeValue*2)
2568 2587
2569 2588 if self.dataOut.data_dc != None:
2570 2589 pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights)
2571 2590 blocksize += (pts2write_DCchannels*datatypeValue*2)
2572 2591
2573 2592 blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
2574 2593
2575 2594 return blocksize
2576 2595
2577 2596 def setFirstHeader(self):
2578 2597
2579 2598 """
2580 2599 Obtiene una copia del First Header
2581 2600
2582 2601 Affected:
2583 2602 self.systemHeaderObj
2584 2603 self.radarControllerHeaderObj
2585 2604 self.dtype
2586 2605
2587 2606 Return:
2588 2607 None
2589 2608 """
2590 2609
2591 2610 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
2592 2611 self.systemHeaderObj.nChannels = self.dataOut.nChannels
2593 2612 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
2594 2613
2595 2614 self.setBasicHeader()
2596 2615
2597 2616 processingHeaderSize = 40 # bytes
2598 2617 self.processingHeaderObj.dtype = 1 # Spectra
2599 2618 self.processingHeaderObj.blockSize = self.__getBlockSize()
2600 2619 self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints
2601 2620 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
2602 2621 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
2603 2622 self.processingHeaderObj.processFlags = self.__getProcessFlags()
2604 2623 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval
2605 2624 self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt
2606 2625 self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
2607 2626 self.processingHeaderObj.shif_fft = self.dataOut.flagShiftFFT
2608 2627
2609 2628 if self.processingHeaderObj.totalSpectra > 0:
2610 2629 channelList = []
2611 2630 for channel in range(self.dataOut.nChannels):
2612 2631 channelList.append(channel)
2613 2632 channelList.append(channel)
2614 2633
2615 2634 pairsList = []
2616 2635 if self.dataOut.nPairs > 0:
2617 2636 for pair in self.dataOut.pairsList:
2618 2637 pairsList.append(pair[0])
2619 2638 pairsList.append(pair[1])
2620 2639
2621 2640 spectraComb = channelList + pairsList
2622 2641 spectraComb = numpy.array(spectraComb,dtype="u1")
2623 2642 self.processingHeaderObj.spectraComb = spectraComb
2624 2643 sizeOfSpcComb = len(spectraComb)
2625 2644 processingHeaderSize += sizeOfSpcComb
2626 2645
2627 2646 # The processing header should not have information about code
2628 2647 # if self.dataOut.code != None:
2629 2648 # self.processingHeaderObj.code = self.dataOut.code
2630 2649 # self.processingHeaderObj.nCode = self.dataOut.nCode
2631 2650 # self.processingHeaderObj.nBaud = self.dataOut.nBaud
2632 2651 # nCodeSize = 4 # bytes
2633 2652 # nBaudSize = 4 # bytes
2634 2653 # codeSize = 4 # bytes
2635 2654 # sizeOfCode = int(nCodeSize + nBaudSize + codeSize * self.dataOut.nCode * self.dataOut.nBaud)
2636 2655 # processingHeaderSize += sizeOfCode
2637 2656
2638 2657 if self.processingHeaderObj.nWindows != 0:
2639 2658 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
2640 2659 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
2641 2660 self.processingHeaderObj.nHeights = self.dataOut.nHeights
2642 2661 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
2643 2662 sizeOfFirstHeight = 4
2644 2663 sizeOfdeltaHeight = 4
2645 2664 sizeOfnHeights = 4
2646 2665 sizeOfWindows = (sizeOfFirstHeight + sizeOfdeltaHeight + sizeOfnHeights)*self.processingHeaderObj.nWindows
2647 2666 processingHeaderSize += sizeOfWindows
2648 2667
2649 2668 self.processingHeaderObj.size = processingHeaderSize
2650 2669
2651 2670 class SpectraHeisWriter(Operation):
2652 2671 # set = None
2653 2672 setFile = None
2654 2673 idblock = None
2655 2674 doypath = None
2656 2675 subfolder = None
2657 2676
2658 2677 def __init__(self):
2659 2678 self.wrObj = FITS()
2660 2679 # self.dataOut = dataOut
2661 2680 self.nTotalBlocks=0
2662 2681 # self.set = None
2663 2682 self.setFile = None
2664 2683 self.idblock = 0
2665 2684 self.wrpath = None
2666 2685 self.doypath = None
2667 2686 self.subfolder = None
2668 2687 self.isConfig = False
2669 2688
2670 2689 def isNumber(str):
2671 2690 """
2672 2691 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
2673 2692
2674 2693 Excepciones:
2675 2694 Si un determinado string no puede ser convertido a numero
2676 2695 Input:
2677 2696 str, string al cual se le analiza para determinar si convertible a un numero o no
2678 2697
2679 2698 Return:
2680 2699 True : si el string es uno numerico
2681 2700 False : no es un string numerico
2682 2701 """
2683 2702 try:
2684 2703 float( str )
2685 2704 return True
2686 2705 except:
2687 2706 return False
2688 2707
2689 2708 def setup(self, dataOut, wrpath):
2690 2709
2691 2710 if not(os.path.exists(wrpath)):
2692 2711 os.mkdir(wrpath)
2693 2712
2694 2713 self.wrpath = wrpath
2695 2714 # self.setFile = 0
2696 2715 self.dataOut = dataOut
2697 2716
2698 2717 def putData(self):
2699 2718 name= time.localtime( self.dataOut.utctime)
2700 2719 ext=".fits"
2701 2720
2702 2721 if self.doypath == None:
2703 2722 self.subfolder = 'F%4.4d%3.3d_%d' % (name.tm_year,name.tm_yday,time.mktime(datetime.datetime.now().timetuple()))
2704 2723 self.doypath = os.path.join( self.wrpath, self.subfolder )
2705 2724 os.mkdir(self.doypath)
2706 2725
2707 2726 if self.setFile == None:
2708 2727 # self.set = self.dataOut.set
2709 2728 self.setFile = 0
2710 2729 # if self.set != self.dataOut.set:
2711 2730 ## self.set = self.dataOut.set
2712 2731 # self.setFile = 0
2713 2732
2714 2733 #make the filename
2715 2734 file = 'D%4.4d%3.3d_%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext)
2716 2735
2717 2736 filename = os.path.join(self.wrpath,self.subfolder, file)
2718 2737
2719 2738 idblock = numpy.array([self.idblock],dtype="int64")
2720 2739 header=self.wrObj.cFImage(idblock=idblock,
2721 2740 year=time.gmtime(self.dataOut.utctime).tm_year,
2722 2741 month=time.gmtime(self.dataOut.utctime).tm_mon,
2723 2742 day=time.gmtime(self.dataOut.utctime).tm_mday,
2724 2743 hour=time.gmtime(self.dataOut.utctime).tm_hour,
2725 2744 minute=time.gmtime(self.dataOut.utctime).tm_min,
2726 2745 second=time.gmtime(self.dataOut.utctime).tm_sec)
2727 2746
2728 2747 c=3E8
2729 2748 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
2730 2749 freq=numpy.arange(-1*self.dataOut.nHeights/2.,self.dataOut.nHeights/2.)*(c/(2*deltaHeight*1000))
2731 2750
2732 2751 colList = []
2733 2752
2734 2753 colFreq=self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints)+'E', array=freq)
2735 2754
2736 2755 colList.append(colFreq)
2737 2756
2738 2757 nchannel=self.dataOut.nChannels
2739 2758
2740 2759 for i in range(nchannel):
2741 2760 col = self.wrObj.writeData(name="PCh"+str(i+1),
2742 2761 format=str(self.dataOut.nFFTPoints)+'E',
2743 2762 data=10*numpy.log10(self.dataOut.data_spc[i,:]))
2744 2763
2745 2764 colList.append(col)
2746 2765
2747 2766 data=self.wrObj.Ctable(colList=colList)
2748 2767
2749 2768 self.wrObj.CFile(header,data)
2750 2769
2751 2770 self.wrObj.wFile(filename)
2752 2771
2753 2772 #update the setFile
2754 2773 self.setFile += 1
2755 2774 self.idblock += 1
2756 2775
2757 2776 return 1
2758 2777
2759 2778 def run(self, dataOut, **kwargs):
2760 2779
2761 2780 if not(self.isConfig):
2762 2781
2763 2782 self.setup(dataOut, **kwargs)
2764 2783 self.isConfig = True
2765 2784
2766 2785 self.putData()
2767 2786
2768 2787
2769 2788
2770 2789 class ParameterConf:
2771 2790 ELEMENTNAME = 'Parameter'
2772 2791 def __init__(self):
2773 2792 self.name = ''
2774 2793 self.value = ''
2775 2794
2776 2795 def readXml(self, parmElement):
2777 2796 self.name = parmElement.get('name')
2778 2797 self.value = parmElement.get('value')
2779 2798
2780 2799 def getElementName(self):
2781 2800 return self.ELEMENTNAME
2782 2801
2783 2802 class Metadata:
2784 2803
2785 2804 def __init__(self, filename):
2786 2805 self.parmConfObjList = []
2787 2806 self.readXml(filename)
2788 2807
2789 2808 def readXml(self, filename):
2790 2809 self.projectElement = None
2791 2810 self.procUnitConfObjDict = {}
2792 2811 self.projectElement = ElementTree().parse(filename)
2793 2812 self.project = self.projectElement.tag
2794 2813
2795 2814 parmElementList = self.projectElement.getiterator(ParameterConf().getElementName())
2796 2815
2797 2816 for parmElement in parmElementList:
2798 2817 parmConfObj = ParameterConf()
2799 2818 parmConfObj.readXml(parmElement)
2800 2819 self.parmConfObjList.append(parmConfObj)
2801 2820
2802 2821 class FitsWriter(Operation):
2803 2822
2804 2823 def __init__(self):
2805 2824 self.isConfig = False
2806 2825 self.dataBlocksPerFile = None
2807 2826 self.blockIndex = 0
2808 2827 self.flagIsNewFile = 1
2809 2828 self.fitsObj = None
2810 2829 self.optchar = 'P'
2811 2830 self.ext = '.fits'
2812 2831 self.setFile = 0
2813 2832
2814 2833 def setFitsHeader(self, dataOut, metadatafile):
2815 2834
2816 2835 header_data = pyfits.PrimaryHDU()
2817 2836
2818 2837 metadata4fits = Metadata(metadatafile)
2819 2838 for parameter in metadata4fits.parmConfObjList:
2820 2839 parm_name = parameter.name
2821 2840 parm_value = parameter.value
2822 2841
2823 2842 # if parm_value == 'fromdatadatetime':
2824 2843 # value = time.strftime("%b %d %Y %H:%M:%S", dataOut.datatime.timetuple())
2825 2844 # elif parm_value == 'fromdataheights':
2826 2845 # value = dataOut.nHeights
2827 2846 # elif parm_value == 'fromdatachannel':
2828 2847 # value = dataOut.nChannels
2829 2848 # elif parm_value == 'fromdatasamples':
2830 2849 # value = dataOut.nFFTPoints
2831 2850 # else:
2832 2851 # value = parm_value
2833 2852
2834 2853 header_data.header[parm_name] = parm_value
2835 2854
2836 2855
2837 2856 header_data.header['DATETIME'] = time.strftime("%b %d %Y %H:%M:%S", dataOut.datatime.timetuple())
2838 2857 header_data.header['CHANNELLIST'] = str(dataOut.channelList)
2839 2858 header_data.header['NCHANNELS'] = dataOut.nChannels
2840 2859 #header_data.header['HEIGHTS'] = dataOut.heightList
2841 2860 header_data.header['NHEIGHTS'] = dataOut.nHeights
2842 2861
2843 2862 header_data.header['IPPSECONDS'] = dataOut.ippSeconds
2844 2863 header_data.header['NCOHINT'] = dataOut.nCohInt
2845 2864 header_data.header['NINCOHINT'] = dataOut.nIncohInt
2846 2865 header_data.header['TIMEZONE'] = dataOut.timeZone
2847 2866 header_data.header['NBLOCK'] = self.blockIndex
2848 2867
2849 2868 header_data.writeto(self.filename)
2850 2869
2851 2870 self.addExtension(dataOut.heightList,'HEIGHTLIST')
2852 2871
2853 2872
2854 2873 def setup(self, dataOut, path, dataBlocksPerFile, metadatafile):
2855 2874
2856 2875 self.path = path
2857 2876 self.dataOut = dataOut
2858 2877 self.metadatafile = metadatafile
2859 2878 self.dataBlocksPerFile = dataBlocksPerFile
2860 2879
2861 2880 def open(self):
2862 2881 self.fitsObj = pyfits.open(self.filename, mode='update')
2863 2882
2864 2883
2865 2884 def addExtension(self, data, tagname):
2866 2885 self.open()
2867 2886 extension = pyfits.ImageHDU(data=data, name=tagname)
2868 2887 #extension.header['TAG'] = tagname
2869 2888 self.fitsObj.append(extension)
2870 2889 self.write()
2871 2890
2872 2891 def addData(self, data):
2873 2892 self.open()
2874 2893 extension = pyfits.ImageHDU(data=data, name=self.fitsObj[0].header['DATATYPE'])
2875 2894 extension.header['UTCTIME'] = self.dataOut.utctime
2876 2895 self.fitsObj.append(extension)
2877 2896 self.blockIndex += 1
2878 2897 self.fitsObj[0].header['NBLOCK'] = self.blockIndex
2879 2898
2880 2899 self.write()
2881 2900
2882 2901 def write(self):
2883 2902
2884 2903 self.fitsObj.flush(verbose=True)
2885 2904 self.fitsObj.close()
2886 2905
2887 2906
2888 2907 def setNextFile(self):
2889 2908
2890 2909 ext = self.ext
2891 2910 path = self.path
2892 2911
2893 2912 timeTuple = time.localtime( self.dataOut.utctime)
2894 2913 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
2895 2914
2896 2915 fullpath = os.path.join( path, subfolder )
2897 2916 if not( os.path.exists(fullpath) ):
2898 2917 os.mkdir(fullpath)
2899 2918 self.setFile = -1 #inicializo mi contador de seteo
2900 2919 else:
2901 2920 filesList = os.listdir( fullpath )
2902 2921 if len( filesList ) > 0:
2903 2922 filesList = sorted( filesList, key=str.lower )
2904 2923 filen = filesList[-1]
2905 2924
2906 2925 if isNumber( filen[8:11] ):
2907 2926 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
2908 2927 else:
2909 2928 self.setFile = -1
2910 2929 else:
2911 2930 self.setFile = -1 #inicializo mi contador de seteo
2912 2931
2913 2932 setFile = self.setFile
2914 2933 setFile += 1
2915 2934
2916 2935 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
2917 2936 timeTuple.tm_year,
2918 2937 timeTuple.tm_yday,
2919 2938 setFile,
2920 2939 ext )
2921 2940
2922 2941 filename = os.path.join( path, subfolder, file )
2923 2942
2924 2943 self.blockIndex = 0
2925 2944 self.filename = filename
2926 2945 self.setFile = setFile
2927 2946 self.flagIsNewFile = 1
2928 2947
2929 2948 print 'Writing the file: %s'%self.filename
2930 2949
2931 2950 self.setFitsHeader(self.dataOut, self.metadatafile)
2932 2951
2933 2952 return 1
2934 2953
2935 2954 def writeBlock(self):
2936 2955 self.addData(self.dataOut.data_spc)
2937 2956 self.flagIsNewFile = 0
2938 2957
2939 2958
2940 2959 def __setNewBlock(self):
2941 2960
2942 2961 if self.flagIsNewFile:
2943 2962 return 1
2944 2963
2945 2964 if self.blockIndex < self.dataBlocksPerFile:
2946 2965 return 1
2947 2966
2948 2967 if not( self.setNextFile() ):
2949 2968 return 0
2950 2969
2951 2970 return 1
2952 2971
2953 2972 def writeNextBlock(self):
2954 2973 if not( self.__setNewBlock() ):
2955 2974 return 0
2956 2975 self.writeBlock()
2957 2976 return 1
2958 2977
2959 2978 def putData(self):
2960 2979 if self.flagIsNewFile:
2961 2980 self.setNextFile()
2962 2981 self.writeNextBlock()
2963 2982
2964 2983 def run(self, dataOut, **kwargs):
2965 2984 if not(self.isConfig):
2966 2985 self.setup(dataOut, **kwargs)
2967 2986 self.isConfig = True
2968 2987 self.putData()
2969 2988
2970 2989
2971 2990 class FitsReader(ProcessingUnit):
2972 2991
2973 2992 # __TIMEZONE = time.timezone
2974 2993
2975 2994 expName = None
2976 2995 datetimestr = None
2977 2996 utc = None
2978 2997 nChannels = None
2979 2998 nSamples = None
2980 2999 dataBlocksPerFile = None
2981 3000 comments = None
2982 3001 lastUTTime = None
2983 3002 header_dict = None
2984 3003 data = None
2985 3004 data_header_dict = None
2986 3005
2987 3006 def __init__(self):
2988 3007 self.isConfig = False
2989 3008 self.ext = '.fits'
2990 3009 self.setFile = 0
2991 3010 self.flagNoMoreFiles = 0
2992 3011 self.flagIsNewFile = 1
2993 3012 self.flagTimeBlock = None
2994 3013 self.fileIndex = None
2995 3014 self.filename = None
2996 3015 self.fileSize = None
2997 3016 self.fitsObj = None
2998 3017 self.timeZone = None
2999 3018 self.nReadBlocks = 0
3000 3019 self.nTotalBlocks = 0
3001 3020 self.dataOut = self.createObjByDefault()
3002 3021 self.maxTimeStep = 10# deberia ser definido por el usuario usando el metodo setup()
3003 3022 self.blockIndex = 1
3004 3023
3005 3024 def createObjByDefault(self):
3006 3025
3007 3026 dataObj = Fits()
3008 3027
3009 3028 return dataObj
3010 3029
3011 3030 def isFileinThisTime(self, filename, startTime, endTime, useLocalTime=False):
3012 3031 try:
3013 3032 fitsObj = pyfits.open(filename,'readonly')
3014 3033 except:
3015 3034 raise IOError, "The file %s can't be opened" %(filename)
3016 3035
3017 3036 header = fitsObj[0].header
3018 3037 struct_time = time.strptime(header['DATETIME'], "%b %d %Y %H:%M:%S")
3019 3038 utc = time.mktime(struct_time) - time.timezone #TIMEZONE debe ser un parametro del header FITS
3020 3039
3021 3040 ltc = utc
3022 3041 if useLocalTime:
3023 3042 ltc -= time.timezone
3024 3043 thisDatetime = datetime.datetime.utcfromtimestamp(ltc)
3025 3044 thisTime = thisDatetime.time()
3026 3045
3027 3046 if not ((startTime <= thisTime) and (endTime > thisTime)):
3028 3047 return None
3029 3048
3030 3049 return thisDatetime
3031 3050
3032 3051 def __setNextFileOnline(self):
3033 3052 raise ValueError, "No implemented"
3034 3053
3035 3054 def __setNextFileOffline(self):
3036 3055 idFile = self.fileIndex
3037 3056
3038 3057 while (True):
3039 3058 idFile += 1
3040 3059 if not(idFile < len(self.filenameList)):
3041 3060 self.flagNoMoreFiles = 1
3042 3061 print "No more Files"
3043 3062 return 0
3044 3063
3045 3064 filename = self.filenameList[idFile]
3046 3065
3047 3066 # if not(self.__verifyFile(filename)):
3048 3067 # continue
3049 3068
3050 3069 fileSize = os.path.getsize(filename)
3051 3070 fitsObj = pyfits.open(filename,'readonly')
3052 3071 break
3053 3072
3054 3073 self.flagIsNewFile = 1
3055 3074 self.fileIndex = idFile
3056 3075 self.filename = filename
3057 3076 self.fileSize = fileSize
3058 3077 self.fitsObj = fitsObj
3059 3078 self.blockIndex = 0
3060 3079 print "Setting the file: %s"%self.filename
3061 3080
3062 3081 return 1
3063 3082
3064 3083 def readHeader(self):
3065 3084 headerObj = self.fitsObj[0]
3066 3085
3067 3086 self.header_dict = headerObj.header
3068 3087 if 'EXPNAME' in headerObj.header.keys():
3069 3088 self.expName = headerObj.header['EXPNAME']
3070 3089
3071 3090 if 'DATATYPE' in headerObj.header.keys():
3072 3091 self.dataType = headerObj.header['DATATYPE']
3073 3092
3074 3093 self.datetimestr = headerObj.header['DATETIME']
3075 3094 channelList = headerObj.header['CHANNELLIST']
3076 3095 channelList = channelList.split('[')
3077 3096 channelList = channelList[1].split(']')
3078 3097 channelList = channelList[0].split(',')
3079 3098 channelList = [int(ch) for ch in channelList]
3080 3099 self.channelList = channelList
3081 3100 self.nChannels = headerObj.header['NCHANNELS']
3082 3101 self.nHeights = headerObj.header['NHEIGHTS']
3083 3102 self.ippSeconds = headerObj.header['IPPSECONDS']
3084 3103 self.nCohInt = headerObj.header['NCOHINT']
3085 3104 self.nIncohInt = headerObj.header['NINCOHINT']
3086 3105 self.dataBlocksPerFile = headerObj.header['NBLOCK']
3087 3106 self.timeZone = headerObj.header['TIMEZONE']
3088 3107
3089 3108 self.timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt
3090 3109
3091 3110 if 'COMMENT' in headerObj.header.keys():
3092 3111 self.comments = headerObj.header['COMMENT']
3093 3112
3094 3113 self.readHeightList()
3095 3114
3096 3115 def readHeightList(self):
3097 3116 self.blockIndex = self.blockIndex + 1
3098 3117 obj = self.fitsObj[self.blockIndex]
3099 3118 self.heightList = obj.data
3100 3119 self.blockIndex = self.blockIndex + 1
3101 3120
3102 3121 def readExtension(self):
3103 3122 obj = self.fitsObj[self.blockIndex]
3104 3123 self.heightList = obj.data
3105 3124 self.blockIndex = self.blockIndex + 1
3106 3125
3107 3126 def setNextFile(self):
3108 3127
3109 3128 if self.online:
3110 3129 newFile = self.__setNextFileOnline()
3111 3130 else:
3112 3131 newFile = self.__setNextFileOffline()
3113 3132
3114 3133 if not(newFile):
3115 3134 return 0
3116 3135
3117 3136 self.readHeader()
3118 3137
3119 3138 self.nReadBlocks = 0
3120 3139 # self.blockIndex = 1
3121 3140 return 1
3122 3141
3123 3142 def __searchFilesOffLine(self,
3124 3143 path,
3125 3144 startDate,
3126 3145 endDate,
3127 3146 startTime=datetime.time(0,0,0),
3128 3147 endTime=datetime.time(23,59,59),
3129 3148 set=None,
3130 3149 expLabel='',
3131 3150 ext='.fits',
3132 3151 walk=True):
3133 3152
3134 3153 pathList = []
3135 3154
3136 3155 if not walk:
3137 3156 pathList.append(path)
3138 3157
3139 3158 else:
3140 3159 dirList = []
3141 3160 for thisPath in os.listdir(path):
3142 3161 if not os.path.isdir(os.path.join(path,thisPath)):
3143 3162 continue
3144 3163 if not isDoyFolder(thisPath):
3145 3164 continue
3146 3165
3147 3166 dirList.append(thisPath)
3148 3167
3149 3168 if not(dirList):
3150 3169 return None, None
3151 3170
3152 3171 thisDate = startDate
3153 3172
3154 3173 while(thisDate <= endDate):
3155 3174 year = thisDate.timetuple().tm_year
3156 3175 doy = thisDate.timetuple().tm_yday
3157 3176
3158 3177 matchlist = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy) + '*')
3159 3178 if len(matchlist) == 0:
3160 3179 thisDate += datetime.timedelta(1)
3161 3180 continue
3162 3181 for match in matchlist:
3163 3182 pathList.append(os.path.join(path,match,expLabel))
3164 3183
3165 3184 thisDate += datetime.timedelta(1)
3166 3185
3167 3186 if pathList == []:
3168 3187 print "Any folder was found for the date range: %s-%s" %(startDate, endDate)
3169 3188 return None, None
3170 3189
3171 3190 print "%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate)
3172 3191
3173 3192 filenameList = []
3174 3193 datetimeList = []
3175 3194
3176 3195 for i in range(len(pathList)):
3177 3196
3178 3197 thisPath = pathList[i]
3179 3198
3180 3199 fileList = glob.glob1(thisPath, "*%s" %ext)
3181 3200 fileList.sort()
3182 3201
3183 3202 for file in fileList:
3184 3203
3185 3204 filename = os.path.join(thisPath,file)
3186 3205 thisDatetime = self.isFileinThisTime(filename, startTime, endTime)
3187 3206
3188 3207 if not(thisDatetime):
3189 3208 continue
3190 3209
3191 3210 filenameList.append(filename)
3192 3211 datetimeList.append(thisDatetime)
3193 3212
3194 3213 if not(filenameList):
3195 3214 print "Any file was found for the time range %s - %s" %(startTime, endTime)
3196 3215 return None, None
3197 3216
3198 3217 print "%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime)
3199 3218 print
3200 3219
3201 3220 for i in range(len(filenameList)):
3202 3221 print "%s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
3203 3222
3204 3223 self.filenameList = filenameList
3205 3224 self.datetimeList = datetimeList
3206 3225
3207 3226 return pathList, filenameList
3208 3227
3209 3228 def setup(self, path=None,
3210 3229 startDate=None,
3211 3230 endDate=None,
3212 3231 startTime=datetime.time(0,0,0),
3213 3232 endTime=datetime.time(23,59,59),
3214 3233 set=0,
3215 3234 expLabel = "",
3216 3235 ext = None,
3217 3236 online = False,
3218 3237 delay = 60,
3219 3238 walk = True):
3220 3239
3221 3240 if path == None:
3222 3241 raise ValueError, "The path is not valid"
3223 3242
3224 3243 if ext == None:
3225 3244 ext = self.ext
3226 3245
3227 3246 if not(online):
3228 3247 print "Searching files in offline mode ..."
3229 3248 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
3230 3249 startTime=startTime, endTime=endTime,
3231 3250 set=set, expLabel=expLabel, ext=ext,
3232 3251 walk=walk)
3233 3252
3234 3253 if not(pathList):
3235 3254 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
3236 3255 datetime.datetime.combine(startDate,startTime).ctime(),
3237 3256 datetime.datetime.combine(endDate,endTime).ctime())
3238 3257
3239 3258 sys.exit(-1)
3240 3259
3241 3260 self.fileIndex = -1
3242 3261 self.pathList = pathList
3243 3262 self.filenameList = filenameList
3244 3263
3245 3264 self.online = online
3246 3265 self.delay = delay
3247 3266 ext = ext.lower()
3248 3267 self.ext = ext
3249 3268
3250 3269 if not(self.setNextFile()):
3251 3270 if (startDate!=None) and (endDate!=None):
3252 3271 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
3253 3272 elif startDate != None:
3254 3273 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
3255 3274 else:
3256 3275 print "No files"
3257 3276
3258 3277 sys.exit(-1)
3259 3278
3260 3279
3261 3280
3262 3281 def readBlock(self):
3263 3282 dataObj = self.fitsObj[self.blockIndex]
3264 3283
3265 3284 self.data = dataObj.data
3266 3285 self.data_header_dict = dataObj.header
3267 3286 self.utc = self.data_header_dict['UTCTIME']
3268 3287
3269 3288 self.flagIsNewFile = 0
3270 3289 self.blockIndex += 1
3271 3290 self.nTotalBlocks += 1
3272 3291 self.nReadBlocks += 1
3273 3292
3274 3293 return 1
3275 3294
3276 3295 def __jumpToLastBlock(self):
3277 3296 raise ValueError, "No implemented"
3278 3297
3279 3298 def __waitNewBlock(self):
3280 3299 """
3281 3300 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
3282 3301
3283 3302 Si el modo de lectura es OffLine siempre retorn 0
3284 3303 """
3285 3304 if not self.online:
3286 3305 return 0
3287 3306
3288 3307 if (self.nReadBlocks >= self.dataBlocksPerFile):
3289 3308 return 0
3290 3309
3291 3310 currentPointer = self.fp.tell()
3292 3311
3293 3312 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
3294 3313
3295 3314 for nTries in range( self.nTries ):
3296 3315
3297 3316 self.fp.close()
3298 3317 self.fp = open( self.filename, 'rb' )
3299 3318 self.fp.seek( currentPointer )
3300 3319
3301 3320 self.fileSize = os.path.getsize( self.filename )
3302 3321 currentSize = self.fileSize - currentPointer
3303 3322
3304 3323 if ( currentSize >= neededSize ):
3305 3324 self.__rdBasicHeader()
3306 3325 return 1
3307 3326
3308 3327 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
3309 3328 time.sleep( self.delay )
3310 3329
3311 3330
3312 3331 return 0
3313 3332
3314 3333 def __setNewBlock(self):
3315 3334
3316 3335 if self.online:
3317 3336 self.__jumpToLastBlock()
3318 3337
3319 3338 if self.flagIsNewFile:
3320 3339 return 1
3321 3340
3322 3341 self.lastUTTime = self.utc
3323 3342
3324 3343 if self.online:
3325 3344 if self.__waitNewBlock():
3326 3345 return 1
3327 3346
3328 3347 if self.nReadBlocks < self.dataBlocksPerFile:
3329 3348 return 1
3330 3349
3331 3350 if not(self.setNextFile()):
3332 3351 return 0
3333 3352
3334 3353 deltaTime = self.utc - self.lastUTTime
3335 3354
3336 3355 self.flagTimeBlock = 0
3337 3356
3338 3357 if deltaTime > self.maxTimeStep:
3339 3358 self.flagTimeBlock = 1
3340 3359
3341 3360 return 1
3342 3361
3343 3362
3344 3363 def readNextBlock(self):
3345 3364 if not(self.__setNewBlock()):
3346 3365 return 0
3347 3366
3348 3367 if not(self.readBlock()):
3349 3368 return 0
3350 3369
3351 3370 return 1
3352 3371
3353 3372
3354 3373 def getData(self):
3355 3374
3356 3375 if self.flagNoMoreFiles:
3357 3376 self.dataOut.flagNoData = True
3358 3377 print 'Process finished'
3359 3378 return 0
3360 3379
3361 3380 self.flagTimeBlock = 0
3362 3381 self.flagIsNewBlock = 0
3363 3382
3364 3383 if not(self.readNextBlock()):
3365 3384 return 0
3366 3385
3367 3386 if self.data == None:
3368 3387 self.dataOut.flagNoData = True
3369 3388 return 0
3370 3389
3371 3390 self.dataOut.data = self.data
3372 3391 self.dataOut.data_header = self.data_header_dict
3373 3392 self.dataOut.utctime = self.utc
3374 3393
3375 3394 self.dataOut.header = self.header_dict
3376 3395 self.dataOut.expName = self.expName
3377 3396 self.dataOut.nChannels = self.nChannels
3378 3397 self.dataOut.timeZone = self.timeZone
3379 3398 self.dataOut.dataBlocksPerFile = self.dataBlocksPerFile
3380 3399 self.dataOut.comments = self.comments
3381 3400 self.dataOut.timeInterval = self.timeInterval
3382 3401 self.dataOut.channelList = self.channelList
3383 3402 self.dataOut.heightList = self.heightList
3384 3403 self.dataOut.flagNoData = False
3385 3404
3386 3405 return self.dataOut.data
3387 3406
3388 3407 def run(self, **kwargs):
3389 3408
3390 3409 if not(self.isConfig):
3391 3410 self.setup(**kwargs)
3392 3411 self.isConfig = True
3393 3412
3394 3413 self.getData() No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now