##// END OF EJS Templates
Se esta obviando el guardado del codigo en el header de procesamiento...
Miguel Valdez -
r333:99678d91f38e
parent child
Show More
@@ -1,2659 +1,2660
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JRODataIO.py 169 2012-11-19 21:57:03Z murco $
5 5 '''
6 6
7 7 import os, sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import time, datetime
13 13
14 14 from jrodata import *
15 15 from jroheaderIO import *
16 16 from jroprocessing import *
17 17
18 18 LOCALTIME = -18000
19 19
20 20 def isNumber(str):
21 21 """
22 22 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
23 23
24 24 Excepciones:
25 25 Si un determinado string no puede ser convertido a numero
26 26 Input:
27 27 str, string al cual se le analiza para determinar si convertible a un numero o no
28 28
29 29 Return:
30 30 True : si el string es uno numerico
31 31 False : no es un string numerico
32 32 """
33 33 try:
34 34 float( str )
35 35 return True
36 36 except:
37 37 return False
38 38
39 39 def isThisFileinRange(filename, startUTSeconds, endUTSeconds):
40 40 """
41 41 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
42 42
43 43 Inputs:
44 44 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
45 45
46 46 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
47 47 segundos contados desde 01/01/1970.
48 48 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
49 49 segundos contados desde 01/01/1970.
50 50
51 51 Return:
52 52 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
53 53 fecha especificado, de lo contrario retorna False.
54 54
55 55 Excepciones:
56 56 Si el archivo no existe o no puede ser abierto
57 57 Si la cabecera no puede ser leida.
58 58
59 59 """
60 60 basicHeaderObj = BasicHeader(LOCALTIME)
61 61
62 62 try:
63 63 fp = open(filename,'rb')
64 64 except:
65 65 raise IOError, "The file %s can't be opened" %(filename)
66 66
67 67 sts = basicHeaderObj.read(fp)
68 68 fp.close()
69 69
70 70 if not(sts):
71 71 print "Skipping the file %s because it has not a valid header" %(filename)
72 72 return 0
73 73
74 74 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
75 75 return 0
76 76
77 77 return 1
78 78
79 79 def isFileinThisTime(filename, startTime, endTime):
80 80 """
81 81 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
82 82
83 83 Inputs:
84 84 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
85 85
86 86 startTime : tiempo inicial del rango seleccionado en formato datetime.time
87 87
88 88 endTime : tiempo final del rango seleccionado en formato datetime.time
89 89
90 90 Return:
91 91 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
92 92 fecha especificado, de lo contrario retorna False.
93 93
94 94 Excepciones:
95 95 Si el archivo no existe o no puede ser abierto
96 96 Si la cabecera no puede ser leida.
97 97
98 98 """
99 99
100 100
101 101 try:
102 102 fp = open(filename,'rb')
103 103 except:
104 104 raise IOError, "The file %s can't be opened" %(filename)
105 105
106 106 basicHeaderObj = BasicHeader(LOCALTIME)
107 107 sts = basicHeaderObj.read(fp)
108 108 fp.close()
109 109
110 110 thisDatetime = basicHeaderObj.datatime
111 111 thisTime = basicHeaderObj.datatime.time()
112 112
113 113 if not(sts):
114 114 print "Skipping the file %s because it has not a valid header" %(filename)
115 115 return None
116 116
117 117 if not ((startTime <= thisTime) and (endTime > thisTime)):
118 118 return None
119 119
120 120 return thisDatetime
121 121
122 122 def getlastFileFromPath(path, ext):
123 123 """
124 124 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
125 125 al final de la depuracion devuelve el ultimo file de la lista que quedo.
126 126
127 127 Input:
128 128 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
129 129 ext : extension de los files contenidos en una carpeta
130 130
131 131 Return:
132 132 El ultimo file de una determinada carpeta, no se considera el path.
133 133 """
134 134 validFilelist = []
135 135 fileList = os.listdir(path)
136 136
137 137 # 0 1234 567 89A BCDE
138 138 # H YYYY DDD SSS .ext
139 139
140 140 for file in fileList:
141 141 try:
142 142 year = int(file[1:5])
143 143 doy = int(file[5:8])
144 144
145 145
146 146 except:
147 147 continue
148 148
149 149 if (os.path.splitext(file)[-1].lower() != ext.lower()):
150 150 continue
151 151
152 152 validFilelist.append(file)
153 153
154 154 if validFilelist:
155 155 validFilelist = sorted( validFilelist, key=str.lower )
156 156 return validFilelist[-1]
157 157
158 158 return None
159 159
160 160 def checkForRealPath(path, year, doy, set, ext):
161 161 """
162 162 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
163 163 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
164 164 el path exacto de un determinado file.
165 165
166 166 Example :
167 167 nombre correcto del file es .../.../D2009307/P2009307367.ext
168 168
169 169 Entonces la funcion prueba con las siguientes combinaciones
170 170 .../.../y2009307367.ext
171 171 .../.../Y2009307367.ext
172 172 .../.../x2009307/y2009307367.ext
173 173 .../.../x2009307/Y2009307367.ext
174 174 .../.../X2009307/y2009307367.ext
175 175 .../.../X2009307/Y2009307367.ext
176 176 siendo para este caso, la ultima combinacion de letras, identica al file buscado
177 177
178 178 Return:
179 179 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
180 180 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
181 181 para el filename
182 182 """
183 183 fullfilename = None
184 184 find_flag = False
185 185 filename = None
186 186
187 187 prefixDirList = [None,'d','D']
188 188 if ext.lower() == ".r": #voltage
189 189 prefixFileList = ['d','D']
190 190 elif ext.lower() == ".pdata": #spectra
191 191 prefixFileList = ['p','P']
192 192 else:
193 193 return None, filename
194 194
195 195 #barrido por las combinaciones posibles
196 196 for prefixDir in prefixDirList:
197 197 thispath = path
198 198 if prefixDir != None:
199 199 #formo el nombre del directorio xYYYYDDD (x=d o x=D)
200 200 thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy ))
201 201
202 202 for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D"
203 203 filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
204 204 fullfilename = os.path.join( thispath, filename ) #formo el path completo
205 205
206 206 if os.path.exists( fullfilename ): #verifico que exista
207 207 find_flag = True
208 208 break
209 209 if find_flag:
210 210 break
211 211
212 212 if not(find_flag):
213 213 return None, filename
214 214
215 215 return fullfilename, filename
216 216
217 217 def isDoyFolder(folder):
218 218 try:
219 219 year = int(folder[1:5])
220 220 except:
221 221 return 0
222 222
223 223 try:
224 224 doy = int(folder[5:8])
225 225 except:
226 226 return 0
227 227
228 228 return 1
229 229
230 230 class JRODataIO:
231 231
232 232 c = 3E8
233 233
234 234 isConfig = False
235 235
236 236 basicHeaderObj = BasicHeader(LOCALTIME)
237 237
238 238 systemHeaderObj = SystemHeader()
239 239
240 240 radarControllerHeaderObj = RadarControllerHeader()
241 241
242 242 processingHeaderObj = ProcessingHeader()
243 243
244 244 online = 0
245 245
246 246 dtype = None
247 247
248 248 pathList = []
249 249
250 250 filenameList = []
251 251
252 252 filename = None
253 253
254 254 ext = None
255 255
256 256 flagIsNewFile = 1
257 257
258 258 flagTimeBlock = 0
259 259
260 260 flagIsNewBlock = 0
261 261
262 262 fp = None
263 263
264 264 firstHeaderSize = 0
265 265
266 266 basicHeaderSize = 24
267 267
268 268 versionFile = 1103
269 269
270 270 fileSize = None
271 271
272 272 ippSeconds = None
273 273
274 274 fileSizeByHeader = None
275 275
276 276 fileIndex = None
277 277
278 278 profileIndex = None
279 279
280 280 blockIndex = None
281 281
282 282 nTotalBlocks = None
283 283
284 284 maxTimeStep = 30
285 285
286 286 lastUTTime = None
287 287
288 288 datablock = None
289 289
290 290 dataOut = None
291 291
292 292 blocksize = None
293 293
294 294 def __init__(self):
295 295
296 296 raise ValueError, "Not implemented"
297 297
298 298 def run(self):
299 299
300 300 raise ValueError, "Not implemented"
301 301
302 302 def getOutput(self):
303 303
304 304 return self.dataOut
305 305
306 306 class JRODataReader(JRODataIO, ProcessingUnit):
307 307
308 308 nReadBlocks = 0
309 309
310 310 delay = 10 #number of seconds waiting a new file
311 311
312 312 nTries = 3 #quantity tries
313 313
314 314 nFiles = 3 #number of files for searching
315 315
316 316 flagNoMoreFiles = 0
317 317
318 318 datetimeList = []
319 319
320 320 __isFirstTimeOnline = 1
321 321
322 322 __printInfo = True
323 323
324 324 def __init__(self):
325 325
326 326 """
327 327
328 328 """
329 329
330 330 raise ValueError, "This method has not been implemented"
331 331
332 332
333 333 def createObjByDefault(self):
334 334 """
335 335
336 336 """
337 337 raise ValueError, "This method has not been implemented"
338 338
339 339 def getBlockDimension(self):
340 340
341 341 raise ValueError, "No implemented"
342 342
343 343 def __searchFilesOffLine(self,
344 344 path,
345 345 startDate,
346 346 endDate,
347 347 startTime=datetime.time(0,0,0),
348 348 endTime=datetime.time(23,59,59),
349 349 set=None,
350 350 expLabel='',
351 351 ext='.r',
352 352 walk=True):
353 353
354 354 pathList = []
355 355
356 356 if not walk:
357 357 pathList.append(path)
358 358
359 359 else:
360 360 dirList = []
361 361 for thisPath in os.listdir(path):
362 362 if not os.path.isdir(os.path.join(path,thisPath)):
363 363 continue
364 364 if not isDoyFolder(thisPath):
365 365 continue
366 366
367 367 dirList.append(thisPath)
368 368
369 369 if not(dirList):
370 370 return None, None
371 371
372 372 thisDate = startDate
373 373
374 374 while(thisDate <= endDate):
375 375 year = thisDate.timetuple().tm_year
376 376 doy = thisDate.timetuple().tm_yday
377 377
378 378 match = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy))
379 379 if len(match) == 0:
380 380 thisDate += datetime.timedelta(1)
381 381 continue
382 382
383 383 pathList.append(os.path.join(path,match[0],expLabel))
384 384
385 385 thisDate += datetime.timedelta(1)
386 386
387 387 if pathList == []:
388 388 print "Any folder was found for the date range: %s-%s" %(startDate, endDate)
389 389 return None, None
390 390
391 391 print "%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate)
392 392
393 393 filenameList = []
394 394 datetimeList = []
395 395
396 396 for i in range(len(pathList)):
397 397
398 398 thisPath = pathList[i]
399 399
400 400 fileList = glob.glob1(thisPath, "*%s" %ext)
401 401 fileList.sort()
402 402
403 403 for file in fileList:
404 404
405 405 filename = os.path.join(thisPath,file)
406 406 thisDatetime = isFileinThisTime(filename, startTime, endTime)
407 407
408 408 if not(thisDatetime):
409 409 continue
410 410
411 411 filenameList.append(filename)
412 412 datetimeList.append(thisDatetime)
413 413
414 414 if not(filenameList):
415 415 print "Any file was found for the time range %s - %s" %(startTime, endTime)
416 416 return None, None
417 417
418 418 print "%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime)
419 419 print
420 420
421 421 for i in range(len(filenameList)):
422 422 print "%s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
423 423
424 424 self.filenameList = filenameList
425 425 self.datetimeList = datetimeList
426 426
427 427 return pathList, filenameList
428 428
429 429 def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True):
430 430
431 431 """
432 432 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
433 433 devuelve el archivo encontrado ademas de otros datos.
434 434
435 435 Input:
436 436 path : carpeta donde estan contenidos los files que contiene data
437 437
438 438 expLabel : Nombre del subexperimento (subfolder)
439 439
440 440 ext : extension de los files
441 441
442 442 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
443 443
444 444 Return:
445 445 directory : eL directorio donde esta el file encontrado
446 446 filename : el ultimo file de una determinada carpeta
447 447 year : el anho
448 448 doy : el numero de dia del anho
449 449 set : el set del archivo
450 450
451 451
452 452 """
453 453 dirList = []
454 454
455 455 if not walk:
456 456 fullpath = path
457 457
458 458 else:
459 459 #Filtra solo los directorios
460 460 for thisPath in os.listdir(path):
461 461 if not os.path.isdir(os.path.join(path,thisPath)):
462 462 continue
463 463 if not isDoyFolder(thisPath):
464 464 continue
465 465
466 466 dirList.append(thisPath)
467 467
468 468 if not(dirList):
469 469 return None, None, None, None, None
470 470
471 471 dirList = sorted( dirList, key=str.lower )
472 472
473 473 doypath = dirList[-1]
474 474 fullpath = os.path.join(path, doypath, expLabel)
475 475
476 476
477 477 print "%s folder was found: " %(fullpath )
478 478
479 479 filename = getlastFileFromPath(fullpath, ext)
480 480
481 481 if not(filename):
482 482 return None, None, None, None, None
483 483
484 484 print "%s file was found" %(filename)
485 485
486 486 if not(self.__verifyFile(os.path.join(fullpath, filename))):
487 487 return None, None, None, None, None
488 488
489 489 year = int( filename[1:5] )
490 490 doy = int( filename[5:8] )
491 491 set = int( filename[8:11] )
492 492
493 493 return fullpath, filename, year, doy, set
494 494
495 495 def __setNextFileOffline(self):
496 496
497 497 idFile = self.fileIndex
498 498
499 499 while (True):
500 500 idFile += 1
501 501 if not(idFile < len(self.filenameList)):
502 502 self.flagNoMoreFiles = 1
503 503 print "No more Files"
504 504 return 0
505 505
506 506 filename = self.filenameList[idFile]
507 507
508 508 if not(self.__verifyFile(filename)):
509 509 continue
510 510
511 511 fileSize = os.path.getsize(filename)
512 512 fp = open(filename,'rb')
513 513 break
514 514
515 515 self.flagIsNewFile = 1
516 516 self.fileIndex = idFile
517 517 self.filename = filename
518 518 self.fileSize = fileSize
519 519 self.fp = fp
520 520
521 521 print "Setting the file: %s"%self.filename
522 522
523 523 return 1
524 524
525 525 def __setNextFileOnline(self):
526 526 """
527 527 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
528 528 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
529 529 siguientes.
530 530
531 531 Affected:
532 532 self.flagIsNewFile
533 533 self.filename
534 534 self.fileSize
535 535 self.fp
536 536 self.set
537 537 self.flagNoMoreFiles
538 538
539 539 Return:
540 540 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
541 541 1 : si el file fue abierto con exito y esta listo a ser leido
542 542
543 543 Excepciones:
544 544 Si un determinado file no puede ser abierto
545 545 """
546 546 nFiles = 0
547 547 fileOk_flag = False
548 548 firstTime_flag = True
549 549
550 550 self.set += 1
551 551
552 552 #busca el 1er file disponible
553 553 fullfilename, filename = checkForRealPath( self.path, self.year, self.doy, self.set, self.ext )
554 554 if fullfilename:
555 555 if self.__verifyFile(fullfilename, False):
556 556 fileOk_flag = True
557 557
558 558 #si no encuentra un file entonces espera y vuelve a buscar
559 559 if not(fileOk_flag):
560 560 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
561 561
562 562 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
563 563 tries = self.nTries
564 564 else:
565 565 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
566 566
567 567 for nTries in range( tries ):
568 568 if firstTime_flag:
569 569 print "\tWaiting %0.2f sec for the file \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
570 570 time.sleep( self.delay )
571 571 else:
572 572 print "\tSearching next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
573 573
574 574 fullfilename, filename = checkForRealPath( self.path, self.year, self.doy, self.set, self.ext )
575 575 if fullfilename:
576 576 if self.__verifyFile(fullfilename):
577 577 fileOk_flag = True
578 578 break
579 579
580 580 if fileOk_flag:
581 581 break
582 582
583 583 firstTime_flag = False
584 584
585 585 print "\tSkipping the file \"%s\" due to this file doesn't exist" % filename
586 586 self.set += 1
587 587
588 588 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
589 589 self.set = 0
590 590 self.doy += 1
591 591
592 592 if fileOk_flag:
593 593 self.fileSize = os.path.getsize( fullfilename )
594 594 self.filename = fullfilename
595 595 self.flagIsNewFile = 1
596 596 if self.fp != None: self.fp.close()
597 597 self.fp = open(fullfilename, 'rb')
598 598 self.flagNoMoreFiles = 0
599 599 print 'Setting the file: %s' % fullfilename
600 600 else:
601 601 self.fileSize = 0
602 602 self.filename = None
603 603 self.flagIsNewFile = 0
604 604 self.fp = None
605 605 self.flagNoMoreFiles = 1
606 606 print 'No more Files'
607 607
608 608 return fileOk_flag
609 609
610 610
611 611 def setNextFile(self):
612 612 if self.fp != None:
613 613 self.fp.close()
614 614
615 615 if self.online:
616 616 newFile = self.__setNextFileOnline()
617 617 else:
618 618 newFile = self.__setNextFileOffline()
619 619
620 620 if not(newFile):
621 621 return 0
622 622
623 623 self.__readFirstHeader()
624 624 self.nReadBlocks = 0
625 625 return 1
626 626
627 627 def __waitNewBlock(self):
628 628 """
629 629 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
630 630
631 631 Si el modo de lectura es OffLine siempre retorn 0
632 632 """
633 633 if not self.online:
634 634 return 0
635 635
636 636 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
637 637 return 0
638 638
639 639 currentPointer = self.fp.tell()
640 640
641 641 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
642 642
643 643 for nTries in range( self.nTries ):
644 644
645 645 self.fp.close()
646 646 self.fp = open( self.filename, 'rb' )
647 647 self.fp.seek( currentPointer )
648 648
649 649 self.fileSize = os.path.getsize( self.filename )
650 650 currentSize = self.fileSize - currentPointer
651 651
652 652 if ( currentSize >= neededSize ):
653 653 self.__rdBasicHeader()
654 654 return 1
655 655
656 656 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
657 657 time.sleep( self.delay )
658 658
659 659
660 660 return 0
661 661
662 662 def __jumpToLastBlock(self):
663 663
664 664 if not(self.__isFirstTimeOnline):
665 665 return
666 666
667 667 csize = self.fileSize - self.fp.tell()
668 668
669 669 #sata el primer bloque de datos
670 670 if csize > self.processingHeaderObj.blockSize:
671 671 self.fp.seek(self.fp.tell() + self.processingHeaderObj.blockSize)
672 672 else:
673 673 return
674 674
675 675 csize = self.fileSize - self.fp.tell()
676 676 neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
677 677 factor = int(csize/neededsize)
678 678 if factor > 0:
679 679 self.fp.seek(self.fp.tell() + factor*neededsize)
680 680
681 681 self.flagIsNewFile = 0
682 682 self.__isFirstTimeOnline = 0
683 683
684 684
685 685 def __setNewBlock(self):
686 686
687 687 if self.fp == None:
688 688 return 0
689 689
690 690 if self.online:
691 691 self.__jumpToLastBlock()
692 692
693 693 if self.flagIsNewFile:
694 694 return 1
695 695
696 696 self.lastUTTime = self.basicHeaderObj.utc
697 697 currentSize = self.fileSize - self.fp.tell()
698 698 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
699 699
700 700 if (currentSize >= neededSize):
701 701 self.__rdBasicHeader()
702 702 return 1
703 703
704 704 if self.__waitNewBlock():
705 705 return 1
706 706
707 707 if not(self.setNextFile()):
708 708 return 0
709 709
710 710 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
711 711
712 712 self.flagTimeBlock = 0
713 713
714 714 if deltaTime > self.maxTimeStep:
715 715 self.flagTimeBlock = 1
716 716
717 717 return 1
718 718
719 719
720 720 def readNextBlock(self):
721 721 if not(self.__setNewBlock()):
722 722 return 0
723 723
724 724 if not(self.readBlock()):
725 725 return 0
726 726
727 727 return 1
728 728
729 729 def __rdProcessingHeader(self, fp=None):
730 730 if fp == None:
731 731 fp = self.fp
732 732
733 733 self.processingHeaderObj.read(fp)
734 734
735 735 def __rdRadarControllerHeader(self, fp=None):
736 736 if fp == None:
737 737 fp = self.fp
738 738
739 739 self.radarControllerHeaderObj.read(fp)
740 740
741 741 def __rdSystemHeader(self, fp=None):
742 742 if fp == None:
743 743 fp = self.fp
744 744
745 745 self.systemHeaderObj.read(fp)
746 746
747 747 def __rdBasicHeader(self, fp=None):
748 748 if fp == None:
749 749 fp = self.fp
750 750
751 751 self.basicHeaderObj.read(fp)
752 752
753 753
754 754 def __readFirstHeader(self):
755 755 self.__rdBasicHeader()
756 756 self.__rdSystemHeader()
757 757 self.__rdRadarControllerHeader()
758 758 self.__rdProcessingHeader()
759 759
760 760 self.firstHeaderSize = self.basicHeaderObj.size
761 761
762 762 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
763 763 if datatype == 0:
764 764 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
765 765 elif datatype == 1:
766 766 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
767 767 elif datatype == 2:
768 768 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
769 769 elif datatype == 3:
770 770 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
771 771 elif datatype == 4:
772 772 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
773 773 elif datatype == 5:
774 774 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
775 775 else:
776 776 raise ValueError, 'Data type was not defined'
777 777
778 778 self.dtype = datatype_str
779 779 self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
780 780 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
781 781 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
782 782 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
783 783 self.getBlockDimension()
784 784
785 785
786 786 def __verifyFile(self, filename, msgFlag=True):
787 787 msg = None
788 788 try:
789 789 fp = open(filename, 'rb')
790 790 currentPosition = fp.tell()
791 791 except:
792 792 if msgFlag:
793 793 print "The file %s can't be opened" % (filename)
794 794 return False
795 795
796 796 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
797 797
798 798 if neededSize == 0:
799 799 basicHeaderObj = BasicHeader(LOCALTIME)
800 800 systemHeaderObj = SystemHeader()
801 801 radarControllerHeaderObj = RadarControllerHeader()
802 802 processingHeaderObj = ProcessingHeader()
803 803
804 804 try:
805 805 if not( basicHeaderObj.read(fp) ): raise IOError
806 806 if not( systemHeaderObj.read(fp) ): raise IOError
807 807 if not( radarControllerHeaderObj.read(fp) ): raise IOError
808 808 if not( processingHeaderObj.read(fp) ): raise IOError
809 809 data_type = int(numpy.log2((processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
810 810
811 811 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
812 812
813 813 except:
814 814 if msgFlag:
815 815 print "\tThe file %s is empty or it hasn't enough data" % filename
816 816
817 817 fp.close()
818 818 return False
819 819 else:
820 820 msg = "\tSkipping the file %s due to it hasn't enough data" %filename
821 821
822 822 fp.close()
823 823 fileSize = os.path.getsize(filename)
824 824 currentSize = fileSize - currentPosition
825 825 if currentSize < neededSize:
826 826 if msgFlag and (msg != None):
827 827 print msg #print"\tSkipping the file %s due to it hasn't enough data" %filename
828 828 return False
829 829
830 830 return True
831 831
832 832 def setup(self,
833 833 path=None,
834 834 startDate=None,
835 835 endDate=None,
836 836 startTime=datetime.time(0,0,0),
837 837 endTime=datetime.time(23,59,59),
838 838 set=0,
839 839 expLabel = "",
840 840 ext = None,
841 841 online = False,
842 842 delay = 60,
843 843 walk = True):
844 844
845 845 if path == None:
846 846 raise ValueError, "The path is not valid"
847 847
848 848 if ext == None:
849 849 ext = self.ext
850 850
851 851 if online:
852 852 print "Searching files in online mode..."
853 853
854 854 for nTries in range( self.nTries ):
855 855 fullpath, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk)
856 856
857 857 if fullpath:
858 858 break
859 859
860 860 print '\tWaiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
861 861 time.sleep( self.delay )
862 862
863 863 if not(fullpath):
864 864 print "There 'isn't valied files in %s" % path
865 865 return None
866 866
867 867 self.year = year
868 868 self.doy = doy
869 869 self.set = set - 1
870 870 self.path = path
871 871
872 872 else:
873 873 print "Searching files in offline mode ..."
874 874 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
875 875 startTime=startTime, endTime=endTime,
876 876 set=set, expLabel=expLabel, ext=ext,
877 877 walk=walk)
878 878
879 879 if not(pathList):
880 880 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
881 881 datetime.datetime.combine(startDate,startTime).ctime(),
882 882 datetime.datetime.combine(endDate,endTime).ctime())
883 883
884 884 sys.exit(-1)
885 885
886 886
887 887 self.fileIndex = -1
888 888 self.pathList = pathList
889 889 self.filenameList = filenameList
890 890
891 891 self.online = online
892 892 self.delay = delay
893 893 ext = ext.lower()
894 894 self.ext = ext
895 895
896 896 if not(self.setNextFile()):
897 897 if (startDate!=None) and (endDate!=None):
898 898 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
899 899 elif startDate != None:
900 900 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
901 901 else:
902 902 print "No files"
903 903
904 904 sys.exit(-1)
905 905
906 906 # self.updateDataHeader()
907 907
908 908 return self.dataOut
909 909
910 910 def getData():
911 911
912 912 raise ValueError, "This method has not been implemented"
913 913
914 914 def hasNotDataInBuffer():
915 915
916 916 raise ValueError, "This method has not been implemented"
917 917
918 918 def readBlock():
919 919
920 920 raise ValueError, "This method has not been implemented"
921 921
922 922 def isEndProcess(self):
923 923
924 924 return self.flagNoMoreFiles
925 925
926 926 def printReadBlocks(self):
927 927
928 928 print "Number of read blocks per file %04d" %self.nReadBlocks
929 929
930 930 def printTotalBlocks(self):
931 931
932 932 print "Number of read blocks %04d" %self.nTotalBlocks
933 933
934 934 def printNumberOfBlock(self):
935 935
936 936 if self.flagIsNewBlock:
937 937 print "Block No. %04d, Total blocks %04d -> %s" %(self.basicHeaderObj.dataBlock, self.nTotalBlocks, self.dataOut.datatime.ctime())
938 938
939 939 def printInfo(self):
940 940
941 941 if self.__printInfo == False:
942 942 return
943 943
944 944 self.basicHeaderObj.printInfo()
945 945 self.systemHeaderObj.printInfo()
946 946 self.radarControllerHeaderObj.printInfo()
947 947 self.processingHeaderObj.printInfo()
948 948
949 949 self.__printInfo = False
950 950
951 951
952 952 def run(self, **kwargs):
953 953
954 954 if not(self.isConfig):
955 955
956 956 # self.dataOut = dataOut
957 957 self.setup(**kwargs)
958 958 self.isConfig = True
959 959
960 960 self.getData()
961 961
962 962 class JRODataWriter(JRODataIO, Operation):
963 963
964 964 """
965 965 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
966 966 de los datos siempre se realiza por bloques.
967 967 """
968 968
969 969 blockIndex = 0
970 970
971 971 path = None
972 972
973 973 setFile = None
974 974
975 975 profilesPerBlock = None
976 976
977 977 blocksPerFile = None
978 978
979 979 nWriteBlocks = 0
980 980
981 981 def __init__(self, dataOut=None):
982 982 raise ValueError, "Not implemented"
983 983
984 984
985 985 def hasAllDataInBuffer(self):
986 986 raise ValueError, "Not implemented"
987 987
988 988
989 989 def setBlockDimension(self):
990 990 raise ValueError, "Not implemented"
991 991
992 992
993 993 def writeBlock(self):
994 994 raise ValueError, "No implemented"
995 995
996 996
997 997 def putData(self):
998 998 raise ValueError, "No implemented"
999 999
1000 1000 def getDataHeader(self):
1001 1001 """
1002 1002 Obtiene una copia del First Header
1003 1003
1004 1004 Affected:
1005 1005
1006 1006 self.basicHeaderObj
1007 1007 self.systemHeaderObj
1008 1008 self.radarControllerHeaderObj
1009 1009 self.processingHeaderObj self.
1010 1010
1011 1011 Return:
1012 1012 None
1013 1013 """
1014 1014
1015 1015 raise ValueError, "No implemented"
1016 1016
1017 1017 def getBasicHeader(self):
1018 1018
1019 1019 self.basicHeaderObj.size = self.basicHeaderSize #bytes
1020 1020 self.basicHeaderObj.version = self.versionFile
1021 1021 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1022 1022
1023 1023 utc = numpy.floor(self.dataOut.utctime)
1024 1024 milisecond = (self.dataOut.utctime - utc)* 1000.0
1025 1025
1026 1026 self.basicHeaderObj.utc = utc
1027 1027 self.basicHeaderObj.miliSecond = milisecond
1028 1028 self.basicHeaderObj.timeZone = 0
1029 1029 self.basicHeaderObj.dstFlag = 0
1030 1030 self.basicHeaderObj.errorCount = 0
1031 1031
1032 1032 def __writeFirstHeader(self):
1033 1033 """
1034 1034 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1035 1035
1036 1036 Affected:
1037 1037 __dataType
1038 1038
1039 1039 Return:
1040 1040 None
1041 1041 """
1042 1042
1043 1043 # CALCULAR PARAMETROS
1044 1044
1045 1045 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1046 1046 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1047 1047
1048 1048 self.basicHeaderObj.write(self.fp)
1049 1049 self.systemHeaderObj.write(self.fp)
1050 1050 self.radarControllerHeaderObj.write(self.fp)
1051 1051 self.processingHeaderObj.write(self.fp)
1052 1052
1053 1053 self.dtype = self.dataOut.dtype
1054 1054
1055 1055 def __setNewBlock(self):
1056 1056 """
1057 1057 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1058 1058
1059 1059 Return:
1060 1060 0 : si no pudo escribir nada
1061 1061 1 : Si escribio el Basic el First Header
1062 1062 """
1063 1063 if self.fp == None:
1064 1064 self.setNextFile()
1065 1065
1066 1066 if self.flagIsNewFile:
1067 1067 return 1
1068 1068
1069 1069 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1070 1070 self.basicHeaderObj.write(self.fp)
1071 1071 return 1
1072 1072
1073 1073 if not( self.setNextFile() ):
1074 1074 return 0
1075 1075
1076 1076 return 1
1077 1077
1078 1078
1079 1079 def writeNextBlock(self):
1080 1080 """
1081 1081 Selecciona el bloque siguiente de datos y los escribe en un file
1082 1082
1083 1083 Return:
1084 1084 0 : Si no hizo pudo escribir el bloque de datos
1085 1085 1 : Si no pudo escribir el bloque de datos
1086 1086 """
1087 1087 if not( self.__setNewBlock() ):
1088 1088 return 0
1089 1089
1090 1090 self.writeBlock()
1091 1091
1092 1092 return 1
1093 1093
1094 1094 def setNextFile(self):
1095 1095 """
1096 1096 Determina el siguiente file que sera escrito
1097 1097
1098 1098 Affected:
1099 1099 self.filename
1100 1100 self.subfolder
1101 1101 self.fp
1102 1102 self.setFile
1103 1103 self.flagIsNewFile
1104 1104
1105 1105 Return:
1106 1106 0 : Si el archivo no puede ser escrito
1107 1107 1 : Si el archivo esta listo para ser escrito
1108 1108 """
1109 1109 ext = self.ext
1110 1110 path = self.path
1111 1111
1112 1112 if self.fp != None:
1113 1113 self.fp.close()
1114 1114
1115 1115 timeTuple = time.localtime( self.dataOut.utctime)
1116 1116 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1117 1117
1118 1118 fullpath = os.path.join( path, subfolder )
1119 1119 if not( os.path.exists(fullpath) ):
1120 1120 os.mkdir(fullpath)
1121 1121 self.setFile = -1 #inicializo mi contador de seteo
1122 1122 else:
1123 1123 filesList = os.listdir( fullpath )
1124 1124 if len( filesList ) > 0:
1125 1125 filesList = sorted( filesList, key=str.lower )
1126 1126 filen = filesList[-1]
1127 1127 # el filename debera tener el siguiente formato
1128 1128 # 0 1234 567 89A BCDE (hex)
1129 1129 # x YYYY DDD SSS .ext
1130 1130 if isNumber( filen[8:11] ):
1131 1131 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1132 1132 else:
1133 1133 self.setFile = -1
1134 1134 else:
1135 1135 self.setFile = -1 #inicializo mi contador de seteo
1136 1136
1137 1137 setFile = self.setFile
1138 1138 setFile += 1
1139 1139
1140 1140 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
1141 1141 timeTuple.tm_year,
1142 1142 timeTuple.tm_yday,
1143 1143 setFile,
1144 1144 ext )
1145 1145
1146 1146 filename = os.path.join( path, subfolder, file )
1147 1147
1148 1148 fp = open( filename,'wb' )
1149 1149
1150 1150 self.blockIndex = 0
1151 1151
1152 1152 #guardando atributos
1153 1153 self.filename = filename
1154 1154 self.subfolder = subfolder
1155 1155 self.fp = fp
1156 1156 self.setFile = setFile
1157 1157 self.flagIsNewFile = 1
1158 1158
1159 1159 self.getDataHeader()
1160 1160
1161 1161 print 'Writing the file: %s'%self.filename
1162 1162
1163 1163 self.__writeFirstHeader()
1164 1164
1165 1165 return 1
1166 1166
1167 1167 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=None, set=0, ext=None):
1168 1168 """
1169 1169 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1170 1170
1171 1171 Inputs:
1172 1172 path : el path destino en el cual se escribiran los files a crear
1173 1173 format : formato en el cual sera salvado un file
1174 1174 set : el setebo del file
1175 1175
1176 1176 Return:
1177 1177 0 : Si no realizo un buen seteo
1178 1178 1 : Si realizo un buen seteo
1179 1179 """
1180 1180
1181 1181 if ext == None:
1182 1182 ext = self.ext
1183 1183
1184 1184 ext = ext.lower()
1185 1185
1186 1186 self.ext = ext
1187 1187
1188 1188 self.path = path
1189 1189
1190 1190 self.setFile = set - 1
1191 1191
1192 1192 self.blocksPerFile = blocksPerFile
1193 1193
1194 1194 self.profilesPerBlock = profilesPerBlock
1195 1195
1196 1196 self.dataOut = dataOut
1197 1197
1198 1198 if not(self.setNextFile()):
1199 1199 print "There isn't a next file"
1200 1200 return 0
1201 1201
1202 1202 self.setBlockDimension()
1203 1203
1204 1204 return 1
1205 1205
1206 1206 def run(self, dataOut, **kwargs):
1207 1207
1208 1208 if not(self.isConfig):
1209 1209
1210 1210 self.setup(dataOut, **kwargs)
1211 1211 self.isConfig = True
1212 1212
1213 1213 self.putData()
1214 1214
1215 1215 class VoltageReader(JRODataReader):
1216 1216 """
1217 1217 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
1218 1218 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
1219 1219 perfiles*alturas*canales) son almacenados en la variable "buffer".
1220 1220
1221 1221 perfiles * alturas * canales
1222 1222
1223 1223 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1224 1224 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
1225 1225 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
1226 1226 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1227 1227
1228 1228 Example:
1229 1229
1230 1230 dpath = "/home/myuser/data"
1231 1231
1232 1232 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1233 1233
1234 1234 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1235 1235
1236 1236 readerObj = VoltageReader()
1237 1237
1238 1238 readerObj.setup(dpath, startTime, endTime)
1239 1239
1240 1240 while(True):
1241 1241
1242 1242 #to get one profile
1243 1243 profile = readerObj.getData()
1244 1244
1245 1245 #print the profile
1246 1246 print profile
1247 1247
1248 1248 #If you want to see all datablock
1249 1249 print readerObj.datablock
1250 1250
1251 1251 if readerObj.flagNoMoreFiles:
1252 1252 break
1253 1253
1254 1254 """
1255 1255
1256 1256 ext = ".r"
1257 1257
1258 1258 optchar = "D"
1259 1259 dataOut = None
1260 1260
1261 1261
1262 1262 def __init__(self):
1263 1263 """
1264 1264 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
1265 1265
1266 1266 Input:
1267 1267 dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para
1268 1268 almacenar un perfil de datos cada vez que se haga un requerimiento
1269 1269 (getData). El perfil sera obtenido a partir del buffer de datos,
1270 1270 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1271 1271 bloque de datos.
1272 1272 Si este parametro no es pasado se creara uno internamente.
1273 1273
1274 1274 Variables afectadas:
1275 1275 self.dataOut
1276 1276
1277 1277 Return:
1278 1278 None
1279 1279 """
1280 1280
1281 1281 self.isConfig = False
1282 1282
1283 1283 self.datablock = None
1284 1284
1285 1285 self.utc = 0
1286 1286
1287 1287 self.ext = ".r"
1288 1288
1289 1289 self.optchar = "D"
1290 1290
1291 1291 self.basicHeaderObj = BasicHeader(LOCALTIME)
1292 1292
1293 1293 self.systemHeaderObj = SystemHeader()
1294 1294
1295 1295 self.radarControllerHeaderObj = RadarControllerHeader()
1296 1296
1297 1297 self.processingHeaderObj = ProcessingHeader()
1298 1298
1299 1299 self.online = 0
1300 1300
1301 1301 self.fp = None
1302 1302
1303 1303 self.idFile = None
1304 1304
1305 1305 self.dtype = None
1306 1306
1307 1307 self.fileSizeByHeader = None
1308 1308
1309 1309 self.filenameList = []
1310 1310
1311 1311 self.filename = None
1312 1312
1313 1313 self.fileSize = None
1314 1314
1315 1315 self.firstHeaderSize = 0
1316 1316
1317 1317 self.basicHeaderSize = 24
1318 1318
1319 1319 self.pathList = []
1320 1320
1321 1321 self.filenameList = []
1322 1322
1323 1323 self.lastUTTime = 0
1324 1324
1325 1325 self.maxTimeStep = 30
1326 1326
1327 1327 self.flagNoMoreFiles = 0
1328 1328
1329 1329 self.set = 0
1330 1330
1331 1331 self.path = None
1332 1332
1333 1333 self.profileIndex = 2**32-1
1334 1334
1335 1335 self.delay = 3 #seconds
1336 1336
1337 1337 self.nTries = 3 #quantity tries
1338 1338
1339 1339 self.nFiles = 3 #number of files for searching
1340 1340
1341 1341 self.nReadBlocks = 0
1342 1342
1343 1343 self.flagIsNewFile = 1
1344 1344
1345 1345 self.__isFirstTimeOnline = 1
1346 1346
1347 1347 self.ippSeconds = 0
1348 1348
1349 1349 self.flagTimeBlock = 0
1350 1350
1351 1351 self.flagIsNewBlock = 0
1352 1352
1353 1353 self.nTotalBlocks = 0
1354 1354
1355 1355 self.blocksize = 0
1356 1356
1357 1357 self.dataOut = self.createObjByDefault()
1358 1358
1359 1359 def createObjByDefault(self):
1360 1360
1361 1361 dataObj = Voltage()
1362 1362
1363 1363 return dataObj
1364 1364
1365 1365 def __hasNotDataInBuffer(self):
1366 1366 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1367 1367 return 1
1368 1368 return 0
1369 1369
1370 1370
1371 1371 def getBlockDimension(self):
1372 1372 """
1373 1373 Obtiene la cantidad de puntos a leer por cada bloque de datos
1374 1374
1375 1375 Affected:
1376 1376 self.blocksize
1377 1377
1378 1378 Return:
1379 1379 None
1380 1380 """
1381 1381 pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
1382 1382 self.blocksize = pts2read
1383 1383
1384 1384
1385 1385 def readBlock(self):
1386 1386 """
1387 1387 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
1388 1388 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1389 1389 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1390 1390 es seteado a 0
1391 1391
1392 1392 Inputs:
1393 1393 None
1394 1394
1395 1395 Return:
1396 1396 None
1397 1397
1398 1398 Affected:
1399 1399 self.profileIndex
1400 1400 self.datablock
1401 1401 self.flagIsNewFile
1402 1402 self.flagIsNewBlock
1403 1403 self.nTotalBlocks
1404 1404
1405 1405 Exceptions:
1406 1406 Si un bloque leido no es un bloque valido
1407 1407 """
1408 1408
1409 1409 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
1410 1410
1411 1411 try:
1412 1412 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
1413 1413 except:
1414 1414 print "The read block (%3d) has not enough data" %self.nReadBlocks
1415 1415 return 0
1416 1416
1417 1417 junk = numpy.transpose(junk, (2,0,1))
1418 1418 self.datablock = junk['real'] + junk['imag']*1j
1419 1419
1420 1420 self.profileIndex = 0
1421 1421
1422 1422 self.flagIsNewFile = 0
1423 1423 self.flagIsNewBlock = 1
1424 1424
1425 1425 self.nTotalBlocks += 1
1426 1426 self.nReadBlocks += 1
1427 1427
1428 1428 return 1
1429 1429
1430 1430
1431 1431 def getData(self):
1432 1432 """
1433 1433 getData obtiene una unidad de datos del buffer de lectura y la copia a la clase "Voltage"
1434 1434 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
1435 1435 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
1436 1436
1437 1437 Ademas incrementa el contador del buffer en 1.
1438 1438
1439 1439 Return:
1440 1440 data : retorna un perfil de voltages (alturas * canales) copiados desde el
1441 1441 buffer. Si no hay mas archivos a leer retorna None.
1442 1442
1443 1443 Variables afectadas:
1444 1444 self.dataOut
1445 1445 self.profileIndex
1446 1446
1447 1447 Affected:
1448 1448 self.dataOut
1449 1449 self.profileIndex
1450 1450 self.flagTimeBlock
1451 1451 self.flagIsNewBlock
1452 1452 """
1453 1453
1454 1454 if self.flagNoMoreFiles:
1455 1455 self.dataOut.flagNoData = True
1456 1456 print 'Process finished'
1457 1457 return 0
1458 1458
1459 1459 self.flagTimeBlock = 0
1460 1460 self.flagIsNewBlock = 0
1461 1461
1462 1462 if self.__hasNotDataInBuffer():
1463 1463
1464 1464 if not( self.readNextBlock() ):
1465 1465 return 0
1466 1466
1467 1467 self.dataOut.dtype = self.dtype
1468 1468
1469 1469 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
1470 1470
1471 1471 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
1472 1472
1473 1473 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
1474 1474
1475 1475 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
1476 1476
1477 1477 self.dataOut.flagTimeBlock = self.flagTimeBlock
1478 1478
1479 1479 self.dataOut.ippSeconds = self.ippSeconds
1480 1480
1481 1481 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt
1482 1482
1483 1483 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
1484 1484
1485 1485 self.dataOut.flagShiftFFT = False
1486 1486
1487 1487 if self.radarControllerHeaderObj.code != None:
1488 1488
1489 1489 self.dataOut.nCode = self.radarControllerHeaderObj.nCode
1490 1490
1491 1491 self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
1492 1492
1493 1493 self.dataOut.code = self.radarControllerHeaderObj.code
1494 1494
1495 1495 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
1496 1496
1497 1497 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
1498 1498
1499 1499 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
1500 1500
1501 1501 self.dataOut.flagDeflipData = False #asumo q la data no esta sin flip
1502 1502
1503 1503 self.dataOut.flagShiftFFT = False
1504 1504
1505 1505
1506 1506 # self.updateDataHeader()
1507 1507
1508 1508 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
1509 1509
1510 1510 if self.datablock == None:
1511 1511 self.dataOut.flagNoData = True
1512 1512 return 0
1513 1513
1514 1514 self.dataOut.data = self.datablock[:,self.profileIndex,:]
1515 1515
1516 1516 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.ippSeconds
1517 1517
1518 1518 self.profileIndex += 1
1519 1519
1520 1520 self.dataOut.flagNoData = False
1521 1521
1522 1522 # print self.profileIndex, self.dataOut.utctime
1523 1523 # if self.profileIndex == 800:
1524 1524 # a=1
1525 1525
1526 1526
1527 1527 return self.dataOut.data
1528 1528
1529 1529
1530 1530 class VoltageWriter(JRODataWriter):
1531 1531 """
1532 1532 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
1533 1533 de los datos siempre se realiza por bloques.
1534 1534 """
1535 1535
1536 1536 ext = ".r"
1537 1537
1538 1538 optchar = "D"
1539 1539
1540 1540 shapeBuffer = None
1541 1541
1542 1542
1543 1543 def __init__(self):
1544 1544 """
1545 1545 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
1546 1546
1547 1547 Affected:
1548 1548 self.dataOut
1549 1549
1550 1550 Return: None
1551 1551 """
1552 1552
1553 1553 self.nTotalBlocks = 0
1554 1554
1555 1555 self.profileIndex = 0
1556 1556
1557 1557 self.isConfig = False
1558 1558
1559 1559 self.fp = None
1560 1560
1561 1561 self.flagIsNewFile = 1
1562 1562
1563 1563 self.nTotalBlocks = 0
1564 1564
1565 1565 self.flagIsNewBlock = 0
1566 1566
1567 1567 self.setFile = None
1568 1568
1569 1569 self.dtype = None
1570 1570
1571 1571 self.path = None
1572 1572
1573 1573 self.filename = None
1574 1574
1575 1575 self.basicHeaderObj = BasicHeader(LOCALTIME)
1576 1576
1577 1577 self.systemHeaderObj = SystemHeader()
1578 1578
1579 1579 self.radarControllerHeaderObj = RadarControllerHeader()
1580 1580
1581 1581 self.processingHeaderObj = ProcessingHeader()
1582 1582
1583 1583 def hasAllDataInBuffer(self):
1584 1584 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1585 1585 return 1
1586 1586 return 0
1587 1587
1588 1588
1589 1589 def setBlockDimension(self):
1590 1590 """
1591 1591 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
1592 1592
1593 1593 Affected:
1594 1594 self.shape_spc_Buffer
1595 1595 self.shape_cspc_Buffer
1596 1596 self.shape_dc_Buffer
1597 1597
1598 1598 Return: None
1599 1599 """
1600 1600 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
1601 1601 self.processingHeaderObj.nHeights,
1602 1602 self.systemHeaderObj.nChannels)
1603 1603
1604 1604 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
1605 1605 self.processingHeaderObj.profilesPerBlock,
1606 1606 self.processingHeaderObj.nHeights),
1607 1607 dtype=numpy.dtype('complex64'))
1608 1608
1609 1609
1610 1610 def writeBlock(self):
1611 1611 """
1612 1612 Escribe el buffer en el file designado
1613 1613
1614 1614 Affected:
1615 1615 self.profileIndex
1616 1616 self.flagIsNewFile
1617 1617 self.flagIsNewBlock
1618 1618 self.nTotalBlocks
1619 1619 self.blockIndex
1620 1620
1621 1621 Return: None
1622 1622 """
1623 1623 data = numpy.zeros( self.shapeBuffer, self.dtype )
1624 1624
1625 1625 junk = numpy.transpose(self.datablock, (1,2,0))
1626 1626
1627 1627 data['real'] = junk.real
1628 1628 data['imag'] = junk.imag
1629 1629
1630 1630 data = data.reshape( (-1) )
1631 1631
1632 1632 data.tofile( self.fp )
1633 1633
1634 1634 self.datablock.fill(0)
1635 1635
1636 1636 self.profileIndex = 0
1637 1637 self.flagIsNewFile = 0
1638 1638 self.flagIsNewBlock = 1
1639 1639
1640 1640 self.blockIndex += 1
1641 1641 self.nTotalBlocks += 1
1642 1642
1643 1643 def putData(self):
1644 1644 """
1645 1645 Setea un bloque de datos y luego los escribe en un file
1646 1646
1647 1647 Affected:
1648 1648 self.flagIsNewBlock
1649 1649 self.profileIndex
1650 1650
1651 1651 Return:
1652 1652 0 : Si no hay data o no hay mas files que puedan escribirse
1653 1653 1 : Si se escribio la data de un bloque en un file
1654 1654 """
1655 1655 if self.dataOut.flagNoData:
1656 1656 return 0
1657 1657
1658 1658 self.flagIsNewBlock = 0
1659 1659
1660 1660 if self.dataOut.flagTimeBlock:
1661 1661
1662 1662 self.datablock.fill(0)
1663 1663 self.profileIndex = 0
1664 1664 self.setNextFile()
1665 1665
1666 1666 if self.profileIndex == 0:
1667 1667 self.getBasicHeader()
1668 1668
1669 1669 self.datablock[:,self.profileIndex,:] = self.dataOut.data
1670 1670
1671 1671 self.profileIndex += 1
1672 1672
1673 1673 if self.hasAllDataInBuffer():
1674 1674 #if self.flagIsNewFile:
1675 1675 self.writeNextBlock()
1676 1676 # self.getDataHeader()
1677 1677
1678 1678 return 1
1679 1679
1680 1680 def __getProcessFlags(self):
1681 1681
1682 1682 processFlags = 0
1683 1683
1684 1684 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1685 1685 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1686 1686 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1687 1687 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1688 1688 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1689 1689 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1690 1690
1691 1691 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1692 1692
1693 1693
1694 1694
1695 1695 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
1696 1696 PROCFLAG.DATATYPE_SHORT,
1697 1697 PROCFLAG.DATATYPE_LONG,
1698 1698 PROCFLAG.DATATYPE_INT64,
1699 1699 PROCFLAG.DATATYPE_FLOAT,
1700 1700 PROCFLAG.DATATYPE_DOUBLE]
1701 1701
1702 1702
1703 1703 for index in range(len(dtypeList)):
1704 1704 if self.dataOut.dtype == dtypeList[index]:
1705 1705 dtypeValue = datatypeValueList[index]
1706 1706 break
1707 1707
1708 1708 processFlags += dtypeValue
1709 1709
1710 1710 if self.dataOut.flagDecodeData:
1711 1711 processFlags += PROCFLAG.DECODE_DATA
1712 1712
1713 1713 if self.dataOut.flagDeflipData:
1714 1714 processFlags += PROCFLAG.DEFLIP_DATA
1715 1715
1716 1716 if self.dataOut.code != None:
1717 1717 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1718 1718
1719 1719 if self.dataOut.nCohInt > 1:
1720 1720 processFlags += PROCFLAG.COHERENT_INTEGRATION
1721 1721
1722 1722 return processFlags
1723 1723
1724 1724
1725 1725 def __getBlockSize(self):
1726 1726 '''
1727 1727 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
1728 1728 '''
1729 1729
1730 1730 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1731 1731 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1732 1732 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1733 1733 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1734 1734 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1735 1735 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1736 1736
1737 1737 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1738 1738 datatypeValueList = [1,2,4,8,4,8]
1739 1739 for index in range(len(dtypeList)):
1740 1740 if self.dataOut.dtype == dtypeList[index]:
1741 1741 datatypeValue = datatypeValueList[index]
1742 1742 break
1743 1743
1744 1744 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels * self.dataOut.nProfiles * datatypeValue * 2)
1745 1745
1746 1746 return blocksize
1747 1747
1748 1748 def getDataHeader(self):
1749 1749
1750 1750 """
1751 1751 Obtiene una copia del First Header
1752 1752
1753 1753 Affected:
1754 1754 self.systemHeaderObj
1755 1755 self.radarControllerHeaderObj
1756 1756 self.dtype
1757 1757
1758 1758 Return:
1759 1759 None
1760 1760 """
1761 1761
1762 1762 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
1763 1763 self.systemHeaderObj.nChannels = self.dataOut.nChannels
1764 1764 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
1765 1765
1766 1766 self.getBasicHeader()
1767 1767
1768 1768 processingHeaderSize = 40 # bytes
1769 1769 self.processingHeaderObj.dtype = 0 # Voltage
1770 1770 self.processingHeaderObj.blockSize = self.__getBlockSize()
1771 1771 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
1772 1772 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
1773 1773 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
1774 1774 self.processingHeaderObj.processFlags = self.__getProcessFlags()
1775 1775 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
1776 1776 self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage
1777 1777 self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage
1778 1778
1779 1779 if self.dataOut.code != None:
1780 1780 self.processingHeaderObj.code = self.dataOut.code
1781 1781 self.processingHeaderObj.nCode = self.dataOut.nCode
1782 1782 self.processingHeaderObj.nBaud = self.dataOut.nBaud
1783 1783 codesize = int(8 + 4 * self.dataOut.nCode * self.dataOut.nBaud)
1784 1784 processingHeaderSize += codesize
1785 1785
1786 1786 if self.processingHeaderObj.nWindows != 0:
1787 1787 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
1788 1788 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
1789 1789 self.processingHeaderObj.nHeights = self.dataOut.nHeights
1790 1790 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
1791 1791 processingHeaderSize += 12
1792 1792
1793 1793 self.processingHeaderObj.size = processingHeaderSize
1794 1794
1795 1795 class SpectraReader(JRODataReader):
1796 1796 """
1797 1797 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
1798 1798 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
1799 1799 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
1800 1800
1801 1801 paresCanalesIguales * alturas * perfiles (Self Spectra)
1802 1802 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
1803 1803 canales * alturas (DC Channels)
1804 1804
1805 1805 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1806 1806 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
1807 1807 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
1808 1808 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1809 1809
1810 1810 Example:
1811 1811 dpath = "/home/myuser/data"
1812 1812
1813 1813 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1814 1814
1815 1815 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1816 1816
1817 1817 readerObj = SpectraReader()
1818 1818
1819 1819 readerObj.setup(dpath, startTime, endTime)
1820 1820
1821 1821 while(True):
1822 1822
1823 1823 readerObj.getData()
1824 1824
1825 1825 print readerObj.data_spc
1826 1826
1827 1827 print readerObj.data_cspc
1828 1828
1829 1829 print readerObj.data_dc
1830 1830
1831 1831 if readerObj.flagNoMoreFiles:
1832 1832 break
1833 1833
1834 1834 """
1835 1835
1836 1836 pts2read_SelfSpectra = 0
1837 1837
1838 1838 pts2read_CrossSpectra = 0
1839 1839
1840 1840 pts2read_DCchannels = 0
1841 1841
1842 1842 ext = ".pdata"
1843 1843
1844 1844 optchar = "P"
1845 1845
1846 1846 dataOut = None
1847 1847
1848 1848 nRdChannels = None
1849 1849
1850 1850 nRdPairs = None
1851 1851
1852 1852 rdPairList = []
1853 1853
1854 1854
1855 1855 def __init__(self):
1856 1856 """
1857 1857 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
1858 1858
1859 1859 Inputs:
1860 1860 dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para
1861 1861 almacenar un perfil de datos cada vez que se haga un requerimiento
1862 1862 (getData). El perfil sera obtenido a partir del buffer de datos,
1863 1863 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1864 1864 bloque de datos.
1865 1865 Si este parametro no es pasado se creara uno internamente.
1866 1866
1867 1867 Affected:
1868 1868 self.dataOut
1869 1869
1870 1870 Return : None
1871 1871 """
1872 1872
1873 1873 self.isConfig = False
1874 1874
1875 1875 self.pts2read_SelfSpectra = 0
1876 1876
1877 1877 self.pts2read_CrossSpectra = 0
1878 1878
1879 1879 self.pts2read_DCchannels = 0
1880 1880
1881 1881 self.datablock = None
1882 1882
1883 1883 self.utc = None
1884 1884
1885 1885 self.ext = ".pdata"
1886 1886
1887 1887 self.optchar = "P"
1888 1888
1889 1889 self.basicHeaderObj = BasicHeader(LOCALTIME)
1890 1890
1891 1891 self.systemHeaderObj = SystemHeader()
1892 1892
1893 1893 self.radarControllerHeaderObj = RadarControllerHeader()
1894 1894
1895 1895 self.processingHeaderObj = ProcessingHeader()
1896 1896
1897 1897 self.online = 0
1898 1898
1899 1899 self.fp = None
1900 1900
1901 1901 self.idFile = None
1902 1902
1903 1903 self.dtype = None
1904 1904
1905 1905 self.fileSizeByHeader = None
1906 1906
1907 1907 self.filenameList = []
1908 1908
1909 1909 self.filename = None
1910 1910
1911 1911 self.fileSize = None
1912 1912
1913 1913 self.firstHeaderSize = 0
1914 1914
1915 1915 self.basicHeaderSize = 24
1916 1916
1917 1917 self.pathList = []
1918 1918
1919 1919 self.lastUTTime = 0
1920 1920
1921 1921 self.maxTimeStep = 30
1922 1922
1923 1923 self.flagNoMoreFiles = 0
1924 1924
1925 1925 self.set = 0
1926 1926
1927 1927 self.path = None
1928 1928
1929 1929 self.delay = 60 #seconds
1930 1930
1931 1931 self.nTries = 3 #quantity tries
1932 1932
1933 1933 self.nFiles = 3 #number of files for searching
1934 1934
1935 1935 self.nReadBlocks = 0
1936 1936
1937 1937 self.flagIsNewFile = 1
1938 1938
1939 1939 self.__isFirstTimeOnline = 1
1940 1940
1941 1941 self.ippSeconds = 0
1942 1942
1943 1943 self.flagTimeBlock = 0
1944 1944
1945 1945 self.flagIsNewBlock = 0
1946 1946
1947 1947 self.nTotalBlocks = 0
1948 1948
1949 1949 self.blocksize = 0
1950 1950
1951 1951 self.dataOut = self.createObjByDefault()
1952 1952
1953 1953
1954 1954 def createObjByDefault(self):
1955 1955
1956 1956 dataObj = Spectra()
1957 1957
1958 1958 return dataObj
1959 1959
1960 1960 def __hasNotDataInBuffer(self):
1961 1961 return 1
1962 1962
1963 1963
1964 1964 def getBlockDimension(self):
1965 1965 """
1966 1966 Obtiene la cantidad de puntos a leer por cada bloque de datos
1967 1967
1968 1968 Affected:
1969 1969 self.nRdChannels
1970 1970 self.nRdPairs
1971 1971 self.pts2read_SelfSpectra
1972 1972 self.pts2read_CrossSpectra
1973 1973 self.pts2read_DCchannels
1974 1974 self.blocksize
1975 1975 self.dataOut.nChannels
1976 1976 self.dataOut.nPairs
1977 1977
1978 1978 Return:
1979 1979 None
1980 1980 """
1981 1981 self.nRdChannels = 0
1982 1982 self.nRdPairs = 0
1983 1983 self.rdPairList = []
1984 1984
1985 1985 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
1986 1986 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
1987 1987 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
1988 1988 else:
1989 1989 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
1990 1990 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
1991 1991
1992 1992 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
1993 1993
1994 1994 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
1995 1995 self.blocksize = self.pts2read_SelfSpectra
1996 1996
1997 1997 if self.processingHeaderObj.flag_cspc:
1998 1998 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
1999 1999 self.blocksize += self.pts2read_CrossSpectra
2000 2000
2001 2001 if self.processingHeaderObj.flag_dc:
2002 2002 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
2003 2003 self.blocksize += self.pts2read_DCchannels
2004 2004
2005 2005 # self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels
2006 2006
2007 2007
2008 2008 def readBlock(self):
2009 2009 """
2010 2010 Lee el bloque de datos desde la posicion actual del puntero del archivo
2011 2011 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
2012 2012 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
2013 2013 es seteado a 0
2014 2014
2015 2015 Return: None
2016 2016
2017 2017 Variables afectadas:
2018 2018
2019 2019 self.flagIsNewFile
2020 2020 self.flagIsNewBlock
2021 2021 self.nTotalBlocks
2022 2022 self.data_spc
2023 2023 self.data_cspc
2024 2024 self.data_dc
2025 2025
2026 2026 Exceptions:
2027 2027 Si un bloque leido no es un bloque valido
2028 2028 """
2029 2029 blockOk_flag = False
2030 2030 fpointer = self.fp.tell()
2031 2031
2032 2032 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
2033 2033 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
2034 2034
2035 2035 if self.processingHeaderObj.flag_cspc:
2036 2036 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
2037 2037 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
2038 2038
2039 2039 if self.processingHeaderObj.flag_dc:
2040 2040 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
2041 2041 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
2042 2042
2043 2043
2044 2044 if not(self.processingHeaderObj.shif_fft):
2045 2045 #desplaza a la derecha en el eje 2 determinadas posiciones
2046 2046 shift = int(self.processingHeaderObj.profilesPerBlock/2)
2047 2047 spc = numpy.roll( spc, shift , axis=2 )
2048 2048
2049 2049 if self.processingHeaderObj.flag_cspc:
2050 2050 #desplaza a la derecha en el eje 2 determinadas posiciones
2051 2051 cspc = numpy.roll( cspc, shift, axis=2 )
2052 2052
2053 2053 # self.processingHeaderObj.shif_fft = True
2054 2054
2055 2055 spc = numpy.transpose( spc, (0,2,1) )
2056 2056 self.data_spc = spc
2057 2057
2058 2058 if self.processingHeaderObj.flag_cspc:
2059 2059 cspc = numpy.transpose( cspc, (0,2,1) )
2060 2060 self.data_cspc = cspc['real'] + cspc['imag']*1j
2061 2061 else:
2062 2062 self.data_cspc = None
2063 2063
2064 2064 if self.processingHeaderObj.flag_dc:
2065 2065 self.data_dc = dc['real'] + dc['imag']*1j
2066 2066 else:
2067 2067 self.data_dc = None
2068 2068
2069 2069 self.flagIsNewFile = 0
2070 2070 self.flagIsNewBlock = 1
2071 2071
2072 2072 self.nTotalBlocks += 1
2073 2073 self.nReadBlocks += 1
2074 2074
2075 2075 return 1
2076 2076
2077 2077
2078 2078 def getData(self):
2079 2079 """
2080 2080 Copia el buffer de lectura a la clase "Spectra",
2081 2081 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
2082 2082 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
2083 2083
2084 2084 Return:
2085 2085 0 : Si no hay mas archivos disponibles
2086 2086 1 : Si hizo una buena copia del buffer
2087 2087
2088 2088 Affected:
2089 2089 self.dataOut
2090 2090
2091 2091 self.flagTimeBlock
2092 2092 self.flagIsNewBlock
2093 2093 """
2094 2094
2095 2095 if self.flagNoMoreFiles:
2096 2096 self.dataOut.flagNoData = True
2097 2097 print 'Process finished'
2098 2098 return 0
2099 2099
2100 2100 self.flagTimeBlock = 0
2101 2101 self.flagIsNewBlock = 0
2102 2102
2103 2103 if self.__hasNotDataInBuffer():
2104 2104
2105 2105 if not( self.readNextBlock() ):
2106 2106 self.dataOut.flagNoData = True
2107 2107 return 0
2108 2108
2109 2109 # self.updateDataHeader()
2110 2110
2111 2111 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
2112 2112
2113 2113 if self.data_dc == None:
2114 2114 self.dataOut.flagNoData = True
2115 2115 return 0
2116 2116
2117 2117 self.dataOut.data_spc = self.data_spc
2118 2118
2119 2119 self.dataOut.data_cspc = self.data_cspc
2120 2120
2121 2121 self.dataOut.data_dc = self.data_dc
2122 2122
2123 2123 self.dataOut.flagTimeBlock = self.flagTimeBlock
2124 2124
2125 2125 self.dataOut.flagNoData = False
2126 2126
2127 2127 self.dataOut.dtype = self.dtype
2128 2128
2129 2129 # self.dataOut.nChannels = self.nRdChannels
2130 2130
2131 2131 self.dataOut.nPairs = self.nRdPairs
2132 2132
2133 2133 self.dataOut.pairsList = self.rdPairList
2134 2134
2135 2135 # self.dataOut.nHeights = self.processingHeaderObj.nHeights
2136 2136
2137 2137 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
2138 2138
2139 2139 self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
2140 2140
2141 2141 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
2142 2142
2143 2143 self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
2144 2144
2145 2145 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
2146 2146
2147 2147 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
2148 2148
2149 2149 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
2150 2150
2151 2151 # self.dataOut.channelIndexList = range(self.systemHeaderObj.nChannels)
2152 2152
2153 2153 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000.#+ self.profileIndex * self.ippSeconds
2154 2154
2155 2155 self.dataOut.ippSeconds = self.ippSeconds
2156 2156
2157 2157 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt * self.processingHeaderObj.nIncohInt * self.dataOut.nFFTPoints
2158 2158
2159 2159 # self.profileIndex += 1
2160 2160
2161 2161 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
2162 2162
2163 2163 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
2164 2164
2165 2165 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
2166 2166
2167 2167 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
2168 2168
2169 2169 self.dataOut.flagDeflipData = True #asumo q la data no esta sin flip
2170 2170
2171 2171 if self.processingHeaderObj.code != None:
2172 2172
2173 2173 self.dataOut.nCode = self.processingHeaderObj.nCode
2174 2174
2175 2175 self.dataOut.nBaud = self.processingHeaderObj.nBaud
2176 2176
2177 2177 self.dataOut.code = self.processingHeaderObj.code
2178 2178
2179 2179 self.dataOut.flagDecodeData = True
2180 2180
2181 2181 return self.dataOut.data_spc
2182 2182
2183 2183
2184 2184 class SpectraWriter(JRODataWriter):
2185 2185
2186 2186 """
2187 2187 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
2188 2188 de los datos siempre se realiza por bloques.
2189 2189 """
2190 2190
2191 2191 ext = ".pdata"
2192 2192
2193 2193 optchar = "P"
2194 2194
2195 2195 shape_spc_Buffer = None
2196 2196
2197 2197 shape_cspc_Buffer = None
2198 2198
2199 2199 shape_dc_Buffer = None
2200 2200
2201 2201 data_spc = None
2202 2202
2203 2203 data_cspc = None
2204 2204
2205 2205 data_dc = None
2206 2206
2207 2207 # dataOut = None
2208 2208
2209 2209 def __init__(self):
2210 2210 """
2211 2211 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
2212 2212
2213 2213 Affected:
2214 2214 self.dataOut
2215 2215 self.basicHeaderObj
2216 2216 self.systemHeaderObj
2217 2217 self.radarControllerHeaderObj
2218 2218 self.processingHeaderObj
2219 2219
2220 2220 Return: None
2221 2221 """
2222 2222
2223 2223 self.isConfig = False
2224 2224
2225 2225 self.nTotalBlocks = 0
2226 2226
2227 2227 self.data_spc = None
2228 2228
2229 2229 self.data_cspc = None
2230 2230
2231 2231 self.data_dc = None
2232 2232
2233 2233 self.fp = None
2234 2234
2235 2235 self.flagIsNewFile = 1
2236 2236
2237 2237 self.nTotalBlocks = 0
2238 2238
2239 2239 self.flagIsNewBlock = 0
2240 2240
2241 2241 self.setFile = None
2242 2242
2243 2243 self.dtype = None
2244 2244
2245 2245 self.path = None
2246 2246
2247 2247 self.noMoreFiles = 0
2248 2248
2249 2249 self.filename = None
2250 2250
2251 2251 self.basicHeaderObj = BasicHeader(LOCALTIME)
2252 2252
2253 2253 self.systemHeaderObj = SystemHeader()
2254 2254
2255 2255 self.radarControllerHeaderObj = RadarControllerHeader()
2256 2256
2257 2257 self.processingHeaderObj = ProcessingHeader()
2258 2258
2259 2259
2260 2260 def hasAllDataInBuffer(self):
2261 2261 return 1
2262 2262
2263 2263
2264 2264 def setBlockDimension(self):
2265 2265 """
2266 2266 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
2267 2267
2268 2268 Affected:
2269 2269 self.shape_spc_Buffer
2270 2270 self.shape_cspc_Buffer
2271 2271 self.shape_dc_Buffer
2272 2272
2273 2273 Return: None
2274 2274 """
2275 2275 self.shape_spc_Buffer = (self.dataOut.nChannels,
2276 2276 self.processingHeaderObj.nHeights,
2277 2277 self.processingHeaderObj.profilesPerBlock)
2278 2278
2279 2279 self.shape_cspc_Buffer = (self.dataOut.nPairs,
2280 2280 self.processingHeaderObj.nHeights,
2281 2281 self.processingHeaderObj.profilesPerBlock)
2282 2282
2283 2283 self.shape_dc_Buffer = (self.dataOut.nChannels,
2284 2284 self.processingHeaderObj.nHeights)
2285 2285
2286 2286
2287 2287 def writeBlock(self):
2288 2288 """
2289 2289 Escribe el buffer en el file designado
2290 2290
2291 2291 Affected:
2292 2292 self.data_spc
2293 2293 self.data_cspc
2294 2294 self.data_dc
2295 2295 self.flagIsNewFile
2296 2296 self.flagIsNewBlock
2297 2297 self.nTotalBlocks
2298 2298 self.nWriteBlocks
2299 2299
2300 2300 Return: None
2301 2301 """
2302 2302
2303 2303 spc = numpy.transpose( self.data_spc, (0,2,1) )
2304 2304 if not( self.processingHeaderObj.shif_fft ):
2305 2305 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2306 2306 data = spc.reshape((-1))
2307 2307 data = data.astype(self.dtype[0])
2308 2308 data.tofile(self.fp)
2309 2309
2310 2310 if self.data_cspc != None:
2311 2311 data = numpy.zeros( self.shape_cspc_Buffer, self.dtype )
2312 2312 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
2313 2313 if not( self.processingHeaderObj.shif_fft ):
2314 2314 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2315 2315 data['real'] = cspc.real
2316 2316 data['imag'] = cspc.imag
2317 2317 data = data.reshape((-1))
2318 2318 data.tofile(self.fp)
2319 2319
2320 2320 if self.data_dc != None:
2321 2321 data = numpy.zeros( self.shape_dc_Buffer, self.dtype )
2322 2322 dc = self.data_dc
2323 2323 data['real'] = dc.real
2324 2324 data['imag'] = dc.imag
2325 2325 data = data.reshape((-1))
2326 2326 data.tofile(self.fp)
2327 2327
2328 2328 self.data_spc.fill(0)
2329 2329 self.data_dc.fill(0)
2330 2330 if self.data_cspc != None:
2331 2331 self.data_cspc.fill(0)
2332 2332
2333 2333 self.flagIsNewFile = 0
2334 2334 self.flagIsNewBlock = 1
2335 2335 self.nTotalBlocks += 1
2336 2336 self.nWriteBlocks += 1
2337 2337 self.blockIndex += 1
2338 2338
2339 2339
2340 2340 def putData(self):
2341 2341 """
2342 2342 Setea un bloque de datos y luego los escribe en un file
2343 2343
2344 2344 Affected:
2345 2345 self.data_spc
2346 2346 self.data_cspc
2347 2347 self.data_dc
2348 2348
2349 2349 Return:
2350 2350 0 : Si no hay data o no hay mas files que puedan escribirse
2351 2351 1 : Si se escribio la data de un bloque en un file
2352 2352 """
2353 2353
2354 2354 if self.dataOut.flagNoData:
2355 2355 return 0
2356 2356
2357 2357 self.flagIsNewBlock = 0
2358 2358
2359 2359 if self.dataOut.flagTimeBlock:
2360 2360 self.data_spc.fill(0)
2361 2361 self.data_cspc.fill(0)
2362 2362 self.data_dc.fill(0)
2363 2363 self.setNextFile()
2364 2364
2365 2365 if self.flagIsNewFile == 0:
2366 2366 self.getBasicHeader()
2367 2367
2368 2368 self.data_spc = self.dataOut.data_spc.copy()
2369 2369 self.data_cspc = self.dataOut.data_cspc.copy()
2370 2370 self.data_dc = self.dataOut.data_dc.copy()
2371 2371
2372 2372 # #self.processingHeaderObj.dataBlocksPerFile)
2373 2373 if self.hasAllDataInBuffer():
2374 2374 # self.getDataHeader()
2375 2375 self.writeNextBlock()
2376 2376
2377 2377 return 1
2378 2378
2379 2379
2380 2380 def __getProcessFlags(self):
2381 2381
2382 2382 processFlags = 0
2383 2383
2384 2384 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2385 2385 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2386 2386 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2387 2387 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2388 2388 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2389 2389 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2390 2390
2391 2391 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2392 2392
2393 2393
2394 2394
2395 2395 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
2396 2396 PROCFLAG.DATATYPE_SHORT,
2397 2397 PROCFLAG.DATATYPE_LONG,
2398 2398 PROCFLAG.DATATYPE_INT64,
2399 2399 PROCFLAG.DATATYPE_FLOAT,
2400 2400 PROCFLAG.DATATYPE_DOUBLE]
2401 2401
2402 2402
2403 2403 for index in range(len(dtypeList)):
2404 2404 if self.dataOut.dtype == dtypeList[index]:
2405 2405 dtypeValue = datatypeValueList[index]
2406 2406 break
2407 2407
2408 2408 processFlags += dtypeValue
2409 2409
2410 2410 if self.dataOut.flagDecodeData:
2411 2411 processFlags += PROCFLAG.DECODE_DATA
2412 2412
2413 2413 if self.dataOut.flagDeflipData:
2414 2414 processFlags += PROCFLAG.DEFLIP_DATA
2415 2415
2416 2416 if self.dataOut.code != None:
2417 2417 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
2418 2418
2419 2419 if self.dataOut.nIncohInt > 1:
2420 2420 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
2421 2421
2422 2422 if self.dataOut.data_dc != None:
2423 2423 processFlags += PROCFLAG.SAVE_CHANNELS_DC
2424 2424
2425 2425 return processFlags
2426 2426
2427 2427
2428 2428 def __getBlockSize(self):
2429 2429 '''
2430 2430 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
2431 2431 '''
2432 2432
2433 2433 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2434 2434 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2435 2435 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2436 2436 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2437 2437 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2438 2438 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2439 2439
2440 2440 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2441 2441 datatypeValueList = [1,2,4,8,4,8]
2442 2442 for index in range(len(dtypeList)):
2443 2443 if self.dataOut.dtype == dtypeList[index]:
2444 2444 datatypeValue = datatypeValueList[index]
2445 2445 break
2446 2446
2447 2447
2448 2448 pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints
2449 2449
2450 2450 pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write)
2451 2451 blocksize = (pts2write_SelfSpectra*datatypeValue)
2452 2452
2453 2453 if self.dataOut.data_cspc != None:
2454 2454 pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write)
2455 2455 blocksize += (pts2write_CrossSpectra*datatypeValue*2)
2456 2456
2457 2457 if self.dataOut.data_dc != None:
2458 2458 pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights)
2459 2459 blocksize += (pts2write_DCchannels*datatypeValue*2)
2460 2460
2461 2461 blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
2462 2462
2463 2463 return blocksize
2464 2464
2465 2465 def getDataHeader(self):
2466 2466
2467 2467 """
2468 2468 Obtiene una copia del First Header
2469 2469
2470 2470 Affected:
2471 2471 self.systemHeaderObj
2472 2472 self.radarControllerHeaderObj
2473 2473 self.dtype
2474 2474
2475 2475 Return:
2476 2476 None
2477 2477 """
2478 2478
2479 2479 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
2480 2480 self.systemHeaderObj.nChannels = self.dataOut.nChannels
2481 2481 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
2482 2482
2483 2483 self.getBasicHeader()
2484 2484
2485 2485 processingHeaderSize = 40 # bytes
2486 self.processingHeaderObj.dtype = 0 # Voltage
2486 self.processingHeaderObj.dtype = 1 # Spectra
2487 2487 self.processingHeaderObj.blockSize = self.__getBlockSize()
2488 2488 self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints
2489 2489 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
2490 2490 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
2491 2491 self.processingHeaderObj.processFlags = self.__getProcessFlags()
2492 2492 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval
2493 2493 self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt
2494 2494 self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
2495 2495
2496 2496 if self.processingHeaderObj.totalSpectra > 0:
2497 2497 channelList = []
2498 2498 for channel in range(self.dataOut.nChannels):
2499 2499 channelList.append(channel)
2500 2500 channelList.append(channel)
2501 2501
2502 2502 pairsList = []
2503 2503 for pair in self.dataOut.pairsList:
2504 2504 pairsList.append(pair[0])
2505 2505 pairsList.append(pair[1])
2506 2506 spectraComb = channelList + pairsList
2507 2507 spectraComb = numpy.array(spectraComb,dtype="u1")
2508 2508 self.processingHeaderObj.spectraComb = spectraComb
2509 2509 sizeOfSpcComb = len(spectraComb)
2510 2510 processingHeaderSize += sizeOfSpcComb
2511 2511
2512 if self.dataOut.code != None:
2513 self.processingHeaderObj.code = self.dataOut.code
2514 self.processingHeaderObj.nCode = self.dataOut.nCode
2515 self.processingHeaderObj.nBaud = self.dataOut.nBaud
2516 nCodeSize = 4 # bytes
2517 nBaudSize = 4 # bytes
2518 codeSize = 4 # bytes
2519 sizeOfCode = int(nCodeSize + nBaudSize + codeSize * self.dataOut.nCode * self.dataOut.nBaud)
2520 processingHeaderSize += sizeOfCode
2512 # The processing header should not have information about code
2513 # if self.dataOut.code != None:
2514 # self.processingHeaderObj.code = self.dataOut.code
2515 # self.processingHeaderObj.nCode = self.dataOut.nCode
2516 # self.processingHeaderObj.nBaud = self.dataOut.nBaud
2517 # nCodeSize = 4 # bytes
2518 # nBaudSize = 4 # bytes
2519 # codeSize = 4 # bytes
2520 # sizeOfCode = int(nCodeSize + nBaudSize + codeSize * self.dataOut.nCode * self.dataOut.nBaud)
2521 # processingHeaderSize += sizeOfCode
2521 2522
2522 2523 if self.processingHeaderObj.nWindows != 0:
2523 2524 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
2524 2525 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
2525 2526 self.processingHeaderObj.nHeights = self.dataOut.nHeights
2526 2527 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
2527 2528 sizeOfFirstHeight = 4
2528 2529 sizeOfdeltaHeight = 4
2529 2530 sizeOfnHeights = 4
2530 2531 sizeOfWindows = (sizeOfFirstHeight + sizeOfdeltaHeight + sizeOfnHeights)*self.processingHeaderObj.nWindows
2531 2532 processingHeaderSize += sizeOfWindows
2532 2533
2533 2534 self.processingHeaderObj.size = processingHeaderSize
2534 2535
2535 2536 class SpectraHeisWriter():
2536 2537
2537 2538 i=0
2538 2539
2539 2540 def __init__(self, dataOut):
2540 2541
2541 2542 self.wrObj = FITS()
2542 2543 self.dataOut = dataOut
2543 2544
2544 2545 def isNumber(str):
2545 2546 """
2546 2547 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
2547 2548
2548 2549 Excepciones:
2549 2550 Si un determinado string no puede ser convertido a numero
2550 2551 Input:
2551 2552 str, string al cual se le analiza para determinar si convertible a un numero o no
2552 2553
2553 2554 Return:
2554 2555 True : si el string es uno numerico
2555 2556 False : no es un string numerico
2556 2557 """
2557 2558 try:
2558 2559 float( str )
2559 2560 return True
2560 2561 except:
2561 2562 return False
2562 2563
2563 2564 def setup(self, wrpath,):
2564 2565
2565 2566 if not(os.path.exists(wrpath)):
2566 2567 os.mkdir(wrpath)
2567 2568
2568 2569 self.wrpath = wrpath
2569 2570 self.setFile = 0
2570 2571
2571 2572 def putData(self):
2572 2573 # self.wrObj.writeHeader(nChannels=self.dataOut.nChannels, nFFTPoints=self.dataOut.nFFTPoints)
2573 2574 #name = self.dataOut.utctime
2574 2575 name= time.localtime( self.dataOut.utctime)
2575 2576 ext=".fits"
2576 2577 #folder='D%4.4d%3.3d'%(name.tm_year,name.tm_yday)
2577 2578 subfolder = 'D%4.4d%3.3d' % (name.tm_year,name.tm_yday)
2578 2579
2579 2580 fullpath = os.path.join( self.wrpath, subfolder )
2580 2581 if not( os.path.exists(fullpath) ):
2581 2582 os.mkdir(fullpath)
2582 2583 self.setFile += 1
2583 2584 file = 'D%4.4d%3.3d%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext)
2584 2585
2585 2586 filename = os.path.join(self.wrpath,subfolder, file)
2586 2587
2587 2588 # print self.dataOut.ippSeconds
2588 2589 freq=numpy.arange(-1*self.dataOut.nHeights/2.,self.dataOut.nHeights/2.)/(2*self.dataOut.ippSeconds)
2589 2590
2590 2591 col1=self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints)+'E', array=freq)
2591 2592 col2=self.wrObj.writeData(name="P_Ch1",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[0,:]))
2592 2593 col3=self.wrObj.writeData(name="P_Ch2",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[1,:]))
2593 2594 col4=self.wrObj.writeData(name="P_Ch3",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[2,:]))
2594 2595 col5=self.wrObj.writeData(name="P_Ch4",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[3,:]))
2595 2596 col6=self.wrObj.writeData(name="P_Ch5",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[4,:]))
2596 2597 col7=self.wrObj.writeData(name="P_Ch6",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[5,:]))
2597 2598 col8=self.wrObj.writeData(name="P_Ch7",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[6,:]))
2598 2599 col9=self.wrObj.writeData(name="P_Ch8",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[7,:]))
2599 2600 #n=numpy.arange((100))
2600 2601 n=self.dataOut.data_spc[6,:]
2601 2602 a=self.wrObj.cFImage(n)
2602 2603 b=self.wrObj.Ctable(col1,col2,col3,col4,col5,col6,col7,col8,col9)
2603 2604 self.wrObj.CFile(a,b)
2604 2605 self.wrObj.wFile(filename)
2605 2606 return 1
2606 2607
2607 2608 class FITS:
2608 2609
2609 2610 name=None
2610 2611 format=None
2611 2612 array =None
2612 2613 data =None
2613 2614 thdulist=None
2614 2615
2615 2616 def __init__(self):
2616 2617
2617 2618 pass
2618 2619
2619 2620 def setColF(self,name,format,array):
2620 2621 self.name=name
2621 2622 self.format=format
2622 2623 self.array=array
2623 2624 a1=numpy.array([self.array],dtype=numpy.float32)
2624 2625 self.col1 = pyfits.Column(name=self.name, format=self.format, array=a1)
2625 2626 return self.col1
2626 2627
2627 2628 # def setColP(self,name,format,data):
2628 2629 # self.name=name
2629 2630 # self.format=format
2630 2631 # self.data=data
2631 2632 # a2=numpy.array([self.data],dtype=numpy.float32)
2632 2633 # self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2633 2634 # return self.col2
2634 2635
2635 2636 def writeHeader(self,):
2636 2637 pass
2637 2638
2638 2639 def writeData(self,name,format,data):
2639 2640 self.name=name
2640 2641 self.format=format
2641 2642 self.data=data
2642 2643 a2=numpy.array([self.data],dtype=numpy.float32)
2643 2644 self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2644 2645 return self.col2
2645 2646
2646 2647 def cFImage(self,n):
2647 2648 self.hdu= pyfits.PrimaryHDU(n)
2648 2649 return self.hdu
2649 2650
2650 2651 def Ctable(self,col1,col2,col3,col4,col5,col6,col7,col8,col9):
2651 2652 self.cols=pyfits.ColDefs( [col1,col2,col3,col4,col5,col6,col7,col8,col9])
2652 2653 self.tbhdu = pyfits.new_table(self.cols)
2653 2654 return self.tbhdu
2654 2655
2655 2656 def CFile(self,hdu,tbhdu):
2656 2657 self.thdulist=pyfits.HDUList([hdu,tbhdu])
2657 2658
2658 2659 def wFile(self,filename):
2659 2660 self.thdulist.writeto(filename) No newline at end of file
@@ -1,531 +1,530
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JROHeaderIO.py 151 2012-10-31 19:00:51Z murco $
5 5 '''
6 6 import sys
7 7 import numpy
8 8 import copy
9 9 import datetime
10 10
11 11 class Header:
12 12
13 13 def __init__(self):
14 14 raise
15 15
16 16 def copy(self):
17 17 return copy.deepcopy(self)
18 18
19 19 def read():
20 20 pass
21 21
22 22 def write():
23 23 pass
24 24
25 25 def printInfo(self):
26 26
27 27 print "#"*100
28 28 print self.__class__.__name__.upper()
29 29 print "#"*100
30 30 for key in self.__dict__.keys():
31 31 print "%s = %s" %(key, self.__dict__[key])
32 32
33 33 class BasicHeader(Header):
34 34
35 35 size = None
36 36 version = None
37 37 dataBlock = None
38 38 utc = None
39 39 miliSecond = None
40 40 timeZone = None
41 41 dstFlag = None
42 42 errorCount = None
43 43 struct = None
44 44 datatime = None
45 45
46 46 __LOCALTIME = None
47 47
48 48 def __init__(self, localtime=0):
49 49
50 50 self.size = 0
51 51 self.version = 0
52 52 self.dataBlock = 0
53 53 self.utc = 0
54 54 self.miliSecond = 0
55 55 self.timeZone = 0
56 56 self.dstFlag = 0
57 57 self.errorCount = 0
58 58 self.struct = numpy.dtype([
59 59 ('nSize','<u4'),
60 60 ('nVersion','<u2'),
61 61 ('nDataBlockId','<u4'),
62 62 ('nUtime','<u4'),
63 63 ('nMilsec','<u2'),
64 64 ('nTimezone','<i2'),
65 65 ('nDstflag','<i2'),
66 66 ('nErrorCount','<u4')
67 67 ])
68 68
69 69 self.__LOCALTIME = localtime
70 70
71 71 def read(self, fp):
72 72 try:
73 73 header = numpy.fromfile(fp, self.struct,1)
74 74 self.size = int(header['nSize'][0])
75 75 self.version = int(header['nVersion'][0])
76 76 self.dataBlock = int(header['nDataBlockId'][0])
77 77 self.utc = int(header['nUtime'][0])
78 78 self.miliSecond = int(header['nMilsec'][0])
79 79 self.timeZone = int(header['nTimezone'][0])
80 80 self.dstFlag = int(header['nDstflag'][0])
81 81 self.errorCount = int(header['nErrorCount'][0])
82 82
83 83 self.utc += self.__LOCALTIME
84 84
85 85 self.datatime = datetime.datetime.utcfromtimestamp(self.utc)
86 86
87 87 except Exception, e:
88 88 print "BasicHeader: "
89 89 print e
90 90 return 0
91 91
92 92 return 1
93 93
94 94 def write(self, fp):
95 95 self.utc -= self.__LOCALTIME
96 96 headerTuple = (self.size,self.version,self.dataBlock,self.utc,self.miliSecond,self.timeZone,self.dstFlag,self.errorCount)
97 97 header = numpy.array(headerTuple,self.struct)
98 98 header.tofile(fp)
99 99
100 100 return 1
101 101
102 102 class SystemHeader(Header):
103 103
104 104 size = None
105 105 nSamples = None
106 106 nProfiles = None
107 107 nChannels = None
108 108 adcResolution = None
109 109 pciDioBusWidth = None
110 110 struct = None
111 111
112 112 def __init__(self):
113 113 self.size = 0
114 114 self.nSamples = 0
115 115 self.nProfiles = 0
116 116 self.nChannels = 0
117 117 self.adcResolution = 0
118 118 self.pciDioBusWidth = 0
119 119 self.struct = numpy.dtype([
120 120 ('nSize','<u4'),
121 121 ('nNumSamples','<u4'),
122 122 ('nNumProfiles','<u4'),
123 123 ('nNumChannels','<u4'),
124 124 ('nADCResolution','<u4'),
125 125 ('nPCDIOBusWidth','<u4'),
126 126 ])
127 127
128 128
129 129 def read(self, fp):
130 130 try:
131 131 header = numpy.fromfile(fp,self.struct,1)
132 132 self.size = header['nSize'][0]
133 133 self.nSamples = header['nNumSamples'][0]
134 134 self.nProfiles = header['nNumProfiles'][0]
135 135 self.nChannels = header['nNumChannels'][0]
136 136 self.adcResolution = header['nADCResolution'][0]
137 137 self.pciDioBusWidth = header['nPCDIOBusWidth'][0]
138 138
139 139 except Exception, e:
140 140 print "SystemHeader: " + e
141 141 return 0
142 142
143 143 return 1
144 144
145 145 def write(self, fp):
146 146 headerTuple = (self.size,self.nSamples,self.nProfiles,self.nChannels,self.adcResolution,self.pciDioBusWidth)
147 147 header = numpy.array(headerTuple,self.struct)
148 148 header.tofile(fp)
149 149
150 150 return 1
151 151
152 152 class RadarControllerHeader(Header):
153 153
154 154 size = None
155 155 expType = None
156 156 nTx = None
157 157 ipp = None
158 158 txA = None
159 159 txB = None
160 160 nWindows = None
161 161 numTaus = None
162 162 codeType = None
163 163 line6Function = None
164 164 line5Function = None
165 165 fClock = None
166 166 prePulseBefore = None
167 167 prePulserAfter = None
168 168 rangeIpp = None
169 169 rangeTxA = None
170 170 rangeTxB = None
171 171 struct = None
172 172
173 173 def __init__(self):
174 174 self.size = 0
175 175 self.expType = 0
176 176 self.nTx = 0
177 177 self.ipp = 0
178 178 self.txA = 0
179 179 self.txB = 0
180 180 self.nWindows = 0
181 181 self.numTaus = 0
182 182 self.codeType = 0
183 183 self.line6Function = 0
184 184 self.line5Function = 0
185 185 self.fClock = 0
186 186 self.prePulseBefore = 0
187 187 self.prePulserAfter = 0
188 188 self.rangeIpp = 0
189 189 self.rangeTxA = 0
190 190 self.rangeTxB = 0
191 191 self.struct = numpy.dtype([
192 192 ('nSize','<u4'),
193 193 ('nExpType','<u4'),
194 194 ('nNTx','<u4'),
195 195 ('fIpp','<f4'),
196 196 ('fTxA','<f4'),
197 197 ('fTxB','<f4'),
198 198 ('nNumWindows','<u4'),
199 199 ('nNumTaus','<u4'),
200 200 ('nCodeType','<u4'),
201 201 ('nLine6Function','<u4'),
202 202 ('nLine5Function','<u4'),
203 203 ('fClock','<f4'),
204 204 ('nPrePulseBefore','<u4'),
205 205 ('nPrePulseAfter','<u4'),
206 206 ('sRangeIPP','<a20'),
207 207 ('sRangeTxA','<a20'),
208 208 ('sRangeTxB','<a20'),
209 209 ])
210 210
211 211 self.samplingWindowStruct = numpy.dtype([('h0','<f4'),('dh','<f4'),('nsa','<u4')])
212 212
213 213 self.samplingWindow = None
214 214 self.nHeights = None
215 215 self.firstHeight = None
216 216 self.deltaHeight = None
217 217 self.samplesWin = None
218 218
219 219 self.nCode = None
220 220 self.nBaud = None
221 221 self.code = None
222 222 self.flip1 = None
223 223 self.flip2 = None
224 224
225 225 self.dynamic = numpy.array([],numpy.dtype('byte'))
226 226
227 227
228 228 def read(self, fp):
229 229 try:
230 230 startFp = fp.tell()
231 231 header = numpy.fromfile(fp,self.struct,1)
232 232 self.size = int(header['nSize'][0])
233 233 self.expType = int(header['nExpType'][0])
234 234 self.nTx = int(header['nNTx'][0])
235 235 self.ipp = float(header['fIpp'][0])
236 236 self.txA = float(header['fTxA'][0])
237 237 self.txB = float(header['fTxB'][0])
238 238 self.nWindows = int(header['nNumWindows'][0])
239 239 self.numTaus = int(header['nNumTaus'][0])
240 240 self.codeType = int(header['nCodeType'][0])
241 241 self.line6Function = int(header['nLine6Function'][0])
242 242 self.line5Function = int(header['nLine5Function'][0])
243 243 self.fClock = float(header['fClock'][0])
244 244 self.prePulseBefore = int(header['nPrePulseBefore'][0])
245 245 self.prePulserAfter = int(header['nPrePulseAfter'][0])
246 246 self.rangeIpp = header['sRangeIPP'][0]
247 247 self.rangeTxA = header['sRangeTxA'][0]
248 248 self.rangeTxB = header['sRangeTxB'][0]
249 249 # jump Dynamic Radar Controller Header
250 250 jumpFp = self.size - 116
251 251 self.dynamic = numpy.fromfile(fp,numpy.dtype('byte'),jumpFp)
252 252 #pointer backward to dynamic header and read
253 253 backFp = fp.tell() - jumpFp
254 254 fp.seek(backFp)
255 255
256 256 self.samplingWindow = numpy.fromfile(fp,self.samplingWindowStruct,self.nWindows)
257 257 self.nHeights = int(numpy.sum(self.samplingWindow['nsa']))
258 258 self.firstHeight = self.samplingWindow['h0']
259 259 self.deltaHeight = self.samplingWindow['dh']
260 260 self.samplesWin = self.samplingWindow['nsa']
261 261
262 262 self.Taus = numpy.fromfile(fp,'<f4',self.numTaus)
263 263
264 264 if self.codeType != 0:
265 265 self.nCode = int(numpy.fromfile(fp,'<u4',1))
266 266 self.nBaud = int(numpy.fromfile(fp,'<u4',1))
267 267 self.code = numpy.empty([self.nCode,self.nBaud],dtype='u1')
268 268 tempList = []
269 269 for ic in range(self.nCode):
270 270 temp = numpy.fromfile(fp,'u1',4*int(numpy.ceil(self.nBaud/32.)))
271 271 tempList.append(temp)
272 272 self.code[ic] = numpy.unpackbits(temp[::-1])[-1*self.nBaud:]
273 273 self.code = 2.0*self.code - 1.0
274 274
275 275 if self.line5Function == RCfunction.FLIP:
276 276 self.flip1 = numpy.fromfile(fp,'<u4',1)
277 277
278 278 if self.line6Function == RCfunction.FLIP:
279 279 self.flip2 = numpy.fromfile(fp,'<u4',1)
280 280
281 281 endFp = self.size + startFp
282 282 jumpFp = endFp - fp.tell()
283 283 if jumpFp > 0:
284 284 fp.seek(jumpFp)
285 285
286 286 except Exception, e:
287 287 print "RadarControllerHeader: " + e
288 288 return 0
289 289
290 290 return 1
291 291
292 292 def write(self, fp):
293 293 headerTuple = (self.size,
294 294 self.expType,
295 295 self.nTx,
296 296 self.ipp,
297 297 self.txA,
298 298 self.txB,
299 299 self.nWindows,
300 300 self.numTaus,
301 301 self.codeType,
302 302 self.line6Function,
303 303 self.line5Function,
304 304 self.fClock,
305 305 self.prePulseBefore,
306 306 self.prePulserAfter,
307 307 self.rangeIpp,
308 308 self.rangeTxA,
309 309 self.rangeTxB)
310 310
311 311 header = numpy.array(headerTuple,self.struct)
312 312 header.tofile(fp)
313 313
314 314 dynamic = self.dynamic
315 315 dynamic.tofile(fp)
316 316
317 317 return 1
318 318
319 319
320 320
321 321 class ProcessingHeader(Header):
322 322
323 323 size = None
324 324 dtype = None
325 325 blockSize = None
326 326 profilesPerBlock = None
327 327 dataBlocksPerFile = None
328 328 nWindows = None
329 329 processFlags = None
330 330 nCohInt = None
331 331 nIncohInt = None
332 332 totalSpectra = None
333 333 struct = None
334 334 flag_dc = None
335 335 flag_cspc = None
336 336
337 337 def __init__(self):
338 338 self.size = 0
339 339 self.dtype = 0
340 340 self.blockSize = 0
341 341 self.profilesPerBlock = 0
342 342 self.dataBlocksPerFile = 0
343 343 self.nWindows = 0
344 344 self.processFlags = 0
345 345 self.nCohInt = 0
346 346 self.nIncohInt = 0
347 347 self.totalSpectra = 0
348 348 self.struct = numpy.dtype([
349 349 ('nSize','<u4'),
350 350 ('nDataType','<u4'),
351 351 ('nSizeOfDataBlock','<u4'),
352 352 ('nProfilesperBlock','<u4'),
353 353 ('nDataBlocksperFile','<u4'),
354 354 ('nNumWindows','<u4'),
355 355 ('nProcessFlags','<u4'),
356 356 ('nCoherentIntegrations','<u4'),
357 357 ('nIncoherentIntegrations','<u4'),
358 358 ('nTotalSpectra','<u4')
359 359 ])
360 360 self.samplingWindow = 0
361 361 self.structSamplingWindow = numpy.dtype([('h0','<f4'),('dh','<f4'),('nsa','<u4')])
362 362 self.nHeights = 0
363 363 self.firstHeight = 0
364 364 self.deltaHeight = 0
365 365 self.samplesWin = 0
366 366 self.spectraComb = 0
367 367 self.nCode = None
368 368 self.code = None
369 369 self.nBaud = None
370 370 self.shif_fft = False
371 371 self.flag_dc = False
372 372 self.flag_cspc = False
373 373
374 374 def read(self, fp):
375 375 try:
376 376 header = numpy.fromfile(fp,self.struct,1)
377 377 self.size = int(header['nSize'][0])
378 378 self.dtype = int(header['nDataType'][0])
379 379 self.blockSize = int(header['nSizeOfDataBlock'][0])
380 380 self.profilesPerBlock = int(header['nProfilesperBlock'][0])
381 381 self.dataBlocksPerFile = int(header['nDataBlocksperFile'][0])
382 382 self.nWindows = int(header['nNumWindows'][0])
383 383 self.processFlags = header['nProcessFlags']
384 384 self.nCohInt = int(header['nCoherentIntegrations'][0])
385 385 self.nIncohInt = int(header['nIncoherentIntegrations'][0])
386 386 self.totalSpectra = int(header['nTotalSpectra'][0])
387 387 self.samplingWindow = numpy.fromfile(fp,self.structSamplingWindow,self.nWindows)
388 388 self.nHeights = int(numpy.sum(self.samplingWindow['nsa']))
389 389 self.firstHeight = float(self.samplingWindow['h0'][0])
390 390 self.deltaHeight = float(self.samplingWindow['dh'][0])
391 391 self.samplesWin = self.samplingWindow['nsa']
392 392 self.spectraComb = numpy.fromfile(fp,'u1',2*self.totalSpectra)
393 393
394 394 if ((self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE) == PROCFLAG.DEFINE_PROCESS_CODE):
395 395 self.nCode = int(numpy.fromfile(fp,'<u4',1))
396 396 self.nBaud = int(numpy.fromfile(fp,'<u4',1))
397 397 self.code = numpy.fromfile(fp,'<f4',self.nCode*self.nBaud).reshape(self.nCode,self.nBaud)
398 398
399 399 if ((self.processFlags & PROCFLAG.SHIFT_FFT_DATA) == PROCFLAG.SHIFT_FFT_DATA):
400 400 self.shif_fft = True
401 401 else:
402 402 self.shif_fft = False
403 403
404 404 if ((self.processFlags & PROCFLAG.SAVE_CHANNELS_DC) == PROCFLAG.SAVE_CHANNELS_DC):
405 405 self.flag_dc = True
406 406
407 407 nChannels = 0
408 408 nPairs = 0
409 409 pairList = []
410 410
411 411 for i in range( 0, self.totalSpectra*2, 2 ):
412 412 if self.spectraComb[i] == self.spectraComb[i+1]:
413 413 nChannels = nChannels + 1 #par de canales iguales
414 414 else:
415 415 nPairs = nPairs + 1 #par de canales diferentes
416 416 pairList.append( (self.spectraComb[i], self.spectraComb[i+1]) )
417 417
418 418 self.flag_cspc = False
419 419 if nPairs > 0:
420 420 self.flag_cspc = True
421 421
422 422 except Exception, e:
423 423 print "ProcessingHeader: " + e
424 424 return 0
425 425
426 426 return 1
427 427
428 428 def write(self, fp):
429 429 headerTuple = (self.size,
430 430 self.dtype,
431 431 self.blockSize,
432 432 self.profilesPerBlock,
433 433 self.dataBlocksPerFile,
434 434 self.nWindows,
435 435 self.processFlags,
436 436 self.nCohInt,
437 437 self.nIncohInt,
438 438 self.totalSpectra)
439 439
440 440 header = numpy.array(headerTuple,self.struct)
441 441 header.tofile(fp)
442 442
443 443 if self.nWindows != 0:
444 444 sampleWindowTuple = (self.firstHeight,self.deltaHeight,self.samplesWin)
445 445 samplingWindow = numpy.array(sampleWindowTuple,self.structSamplingWindow)
446 446 samplingWindow.tofile(fp)
447 447
448 448
449 449 if self.totalSpectra != 0:
450 450 spectraComb = numpy.array([],numpy.dtype('u1'))
451 451 spectraComb = self.spectraComb
452 452 spectraComb.tofile(fp)
453 453
454
455 if self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE == PROCFLAG.DEFINE_PROCESS_CODE:
456 nCode = numpy.array([self.nCode], numpy.dtype('u4')) #Probar con un dato que almacene codigo, hasta el momento no se hizo la prueba
457 nCode.tofile(fp)
458
459 nBaud = numpy.array([self.nBaud], numpy.dtype('u4'))
460 nBaud.tofile(fp)
461
462 code = self.code.reshape(self.nCode*self.nBaud)
463 code = code.astype(numpy.dtype('<f4'))
464 code.tofile(fp)
454 # if self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE == PROCFLAG.DEFINE_PROCESS_CODE:
455 # nCode = numpy.array([self.nCode], numpy.dtype('u4')) #Probar con un dato que almacene codigo, hasta el momento no se hizo la prueba
456 # nCode.tofile(fp)
457 #
458 # nBaud = numpy.array([self.nBaud], numpy.dtype('u4'))
459 # nBaud.tofile(fp)
460 #
461 # code = self.code.reshape(self.nCode*self.nBaud)
462 # code = code.astype(numpy.dtype('<f4'))
463 # code.tofile(fp)
465 464
466 465 return 1
467 466
468 467 class RCfunction:
469 468 NONE=0
470 469 FLIP=1
471 470 CODE=2
472 471 SAMPLING=3
473 472 LIN6DIV256=4
474 473 SYNCHRO=5
475 474
476 475 class nCodeType:
477 476 NONE=0
478 477 USERDEFINE=1
479 478 BARKER2=2
480 479 BARKER3=3
481 480 BARKER4=4
482 481 BARKER5=5
483 482 BARKER7=6
484 483 BARKER11=7
485 484 BARKER13=8
486 485 AC128=9
487 486 COMPLEMENTARYCODE2=10
488 487 COMPLEMENTARYCODE4=11
489 488 COMPLEMENTARYCODE8=12
490 489 COMPLEMENTARYCODE16=13
491 490 COMPLEMENTARYCODE32=14
492 491 COMPLEMENTARYCODE64=15
493 492 COMPLEMENTARYCODE128=16
494 493 CODE_BINARY28=17
495 494
496 495 class PROCFLAG:
497 496 COHERENT_INTEGRATION = numpy.uint32(0x00000001)
498 497 DECODE_DATA = numpy.uint32(0x00000002)
499 498 SPECTRA_CALC = numpy.uint32(0x00000004)
500 499 INCOHERENT_INTEGRATION = numpy.uint32(0x00000008)
501 500 POST_COHERENT_INTEGRATION = numpy.uint32(0x00000010)
502 501 SHIFT_FFT_DATA = numpy.uint32(0x00000020)
503 502
504 503 DATATYPE_CHAR = numpy.uint32(0x00000040)
505 504 DATATYPE_SHORT = numpy.uint32(0x00000080)
506 505 DATATYPE_LONG = numpy.uint32(0x00000100)
507 506 DATATYPE_INT64 = numpy.uint32(0x00000200)
508 507 DATATYPE_FLOAT = numpy.uint32(0x00000400)
509 508 DATATYPE_DOUBLE = numpy.uint32(0x00000800)
510 509
511 510 DATAARRANGE_CONTIGUOUS_CH = numpy.uint32(0x00001000)
512 511 DATAARRANGE_CONTIGUOUS_H = numpy.uint32(0x00002000)
513 512 DATAARRANGE_CONTIGUOUS_P = numpy.uint32(0x00004000)
514 513
515 514 SAVE_CHANNELS_DC = numpy.uint32(0x00008000)
516 515 DEFLIP_DATA = numpy.uint32(0x00010000)
517 516 DEFINE_PROCESS_CODE = numpy.uint32(0x00020000)
518 517
519 518 ACQ_SYS_NATALIA = numpy.uint32(0x00040000)
520 519 ACQ_SYS_ECHOTEK = numpy.uint32(0x00080000)
521 520 ACQ_SYS_ADRXD = numpy.uint32(0x000C0000)
522 521 ACQ_SYS_JULIA = numpy.uint32(0x00100000)
523 522 ACQ_SYS_XXXXXX = numpy.uint32(0x00140000)
524 523
525 524 EXP_NAME_ESP = numpy.uint32(0x00200000)
526 525 CHANNEL_NAMES_ESP = numpy.uint32(0x00400000)
527 526
528 527 OPERATION_MASK = numpy.uint32(0x0000003F)
529 528 DATATYPE_MASK = numpy.uint32(0x00000FC0)
530 529 DATAARRANGE_MASK = numpy.uint32(0x00007000)
531 530 ACQ_SYS_MASK = numpy.uint32(0x001C0000) No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now