##// END OF EJS Templates
Adicion de informacion de la fecha y hora en la busqueda de archivos
Miguel Valdez -
r332:395575e10cc2
parent child
Show More
@@ -1,2656 +1,2659
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JRODataIO.py 169 2012-11-19 21:57:03Z murco $
5 5 '''
6 6
7 7 import os, sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import time, datetime
13 13
14 14 from jrodata import *
15 15 from jroheaderIO import *
16 16 from jroprocessing import *
17 17
18 18 LOCALTIME = -18000
19 19
20 20 def isNumber(str):
21 21 """
22 22 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
23 23
24 24 Excepciones:
25 25 Si un determinado string no puede ser convertido a numero
26 26 Input:
27 27 str, string al cual se le analiza para determinar si convertible a un numero o no
28 28
29 29 Return:
30 30 True : si el string es uno numerico
31 31 False : no es un string numerico
32 32 """
33 33 try:
34 34 float( str )
35 35 return True
36 36 except:
37 37 return False
38 38
39 39 def isThisFileinRange(filename, startUTSeconds, endUTSeconds):
40 40 """
41 41 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
42 42
43 43 Inputs:
44 44 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
45 45
46 46 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
47 47 segundos contados desde 01/01/1970.
48 48 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
49 49 segundos contados desde 01/01/1970.
50 50
51 51 Return:
52 52 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
53 53 fecha especificado, de lo contrario retorna False.
54 54
55 55 Excepciones:
56 56 Si el archivo no existe o no puede ser abierto
57 57 Si la cabecera no puede ser leida.
58 58
59 59 """
60 60 basicHeaderObj = BasicHeader(LOCALTIME)
61 61
62 62 try:
63 63 fp = open(filename,'rb')
64 64 except:
65 65 raise IOError, "The file %s can't be opened" %(filename)
66 66
67 67 sts = basicHeaderObj.read(fp)
68 68 fp.close()
69 69
70 70 if not(sts):
71 71 print "Skipping the file %s because it has not a valid header" %(filename)
72 72 return 0
73 73
74 74 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
75 75 return 0
76 76
77 77 return 1
78 78
79 79 def isFileinThisTime(filename, startTime, endTime):
80 80 """
81 81 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
82 82
83 83 Inputs:
84 84 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
85 85
86 86 startTime : tiempo inicial del rango seleccionado en formato datetime.time
87 87
88 88 endTime : tiempo final del rango seleccionado en formato datetime.time
89 89
90 90 Return:
91 91 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
92 92 fecha especificado, de lo contrario retorna False.
93 93
94 94 Excepciones:
95 95 Si el archivo no existe o no puede ser abierto
96 96 Si la cabecera no puede ser leida.
97 97
98 98 """
99 99
100 100
101 101 try:
102 102 fp = open(filename,'rb')
103 103 except:
104 104 raise IOError, "The file %s can't be opened" %(filename)
105 105
106 106 basicHeaderObj = BasicHeader(LOCALTIME)
107 107 sts = basicHeaderObj.read(fp)
108 108 fp.close()
109 109
110 thisDatetime = basicHeaderObj.datatime
110 111 thisTime = basicHeaderObj.datatime.time()
111 112
112 113 if not(sts):
113 114 print "Skipping the file %s because it has not a valid header" %(filename)
114 return 0
115 return None
115 116
116 117 if not ((startTime <= thisTime) and (endTime > thisTime)):
117 return 0
118 return None
118 119
119 return thisTime
120 return thisDatetime
120 121
121 122 def getlastFileFromPath(path, ext):
122 123 """
123 124 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
124 125 al final de la depuracion devuelve el ultimo file de la lista que quedo.
125 126
126 127 Input:
127 128 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
128 129 ext : extension de los files contenidos en una carpeta
129 130
130 131 Return:
131 132 El ultimo file de una determinada carpeta, no se considera el path.
132 133 """
133 134 validFilelist = []
134 135 fileList = os.listdir(path)
135 136
136 137 # 0 1234 567 89A BCDE
137 138 # H YYYY DDD SSS .ext
138 139
139 140 for file in fileList:
140 141 try:
141 142 year = int(file[1:5])
142 143 doy = int(file[5:8])
143 144
144 145
145 146 except:
146 147 continue
147 148
148 149 if (os.path.splitext(file)[-1].lower() != ext.lower()):
149 150 continue
150 151
151 152 validFilelist.append(file)
152 153
153 154 if validFilelist:
154 155 validFilelist = sorted( validFilelist, key=str.lower )
155 156 return validFilelist[-1]
156 157
157 158 return None
158 159
159 160 def checkForRealPath(path, year, doy, set, ext):
160 161 """
161 162 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
162 163 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
163 164 el path exacto de un determinado file.
164 165
165 166 Example :
166 167 nombre correcto del file es .../.../D2009307/P2009307367.ext
167 168
168 169 Entonces la funcion prueba con las siguientes combinaciones
169 170 .../.../y2009307367.ext
170 171 .../.../Y2009307367.ext
171 172 .../.../x2009307/y2009307367.ext
172 173 .../.../x2009307/Y2009307367.ext
173 174 .../.../X2009307/y2009307367.ext
174 175 .../.../X2009307/Y2009307367.ext
175 176 siendo para este caso, la ultima combinacion de letras, identica al file buscado
176 177
177 178 Return:
178 179 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
179 180 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
180 181 para el filename
181 182 """
182 183 fullfilename = None
183 184 find_flag = False
184 185 filename = None
185 186
186 187 prefixDirList = [None,'d','D']
187 188 if ext.lower() == ".r": #voltage
188 189 prefixFileList = ['d','D']
189 190 elif ext.lower() == ".pdata": #spectra
190 191 prefixFileList = ['p','P']
191 192 else:
192 193 return None, filename
193 194
194 195 #barrido por las combinaciones posibles
195 196 for prefixDir in prefixDirList:
196 197 thispath = path
197 198 if prefixDir != None:
198 199 #formo el nombre del directorio xYYYYDDD (x=d o x=D)
199 200 thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy ))
200 201
201 202 for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D"
202 203 filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
203 204 fullfilename = os.path.join( thispath, filename ) #formo el path completo
204 205
205 206 if os.path.exists( fullfilename ): #verifico que exista
206 207 find_flag = True
207 208 break
208 209 if find_flag:
209 210 break
210 211
211 212 if not(find_flag):
212 213 return None, filename
213 214
214 215 return fullfilename, filename
215 216
216 217 def isDoyFolder(folder):
217 218 try:
218 219 year = int(folder[1:5])
219 220 except:
220 221 return 0
221 222
222 223 try:
223 224 doy = int(folder[5:8])
224 225 except:
225 226 return 0
226 227
227 228 return 1
228 229
229 230 class JRODataIO:
230 231
231 232 c = 3E8
232 233
233 234 isConfig = False
234 235
235 236 basicHeaderObj = BasicHeader(LOCALTIME)
236 237
237 238 systemHeaderObj = SystemHeader()
238 239
239 240 radarControllerHeaderObj = RadarControllerHeader()
240 241
241 242 processingHeaderObj = ProcessingHeader()
242 243
243 244 online = 0
244 245
245 246 dtype = None
246 247
247 248 pathList = []
248 249
249 250 filenameList = []
250 251
251 252 filename = None
252 253
253 254 ext = None
254 255
255 256 flagIsNewFile = 1
256 257
257 258 flagTimeBlock = 0
258 259
259 260 flagIsNewBlock = 0
260 261
261 262 fp = None
262 263
263 264 firstHeaderSize = 0
264 265
265 266 basicHeaderSize = 24
266 267
267 268 versionFile = 1103
268 269
269 270 fileSize = None
270 271
271 272 ippSeconds = None
272 273
273 274 fileSizeByHeader = None
274 275
275 276 fileIndex = None
276 277
277 278 profileIndex = None
278 279
279 280 blockIndex = None
280 281
281 282 nTotalBlocks = None
282 283
283 284 maxTimeStep = 30
284 285
285 286 lastUTTime = None
286 287
287 288 datablock = None
288 289
289 290 dataOut = None
290 291
291 292 blocksize = None
292 293
293 294 def __init__(self):
294 295
295 296 raise ValueError, "Not implemented"
296 297
297 298 def run(self):
298 299
299 300 raise ValueError, "Not implemented"
300 301
301 302 def getOutput(self):
302 303
303 304 return self.dataOut
304 305
305 306 class JRODataReader(JRODataIO, ProcessingUnit):
306 307
307 308 nReadBlocks = 0
308 309
309 310 delay = 10 #number of seconds waiting a new file
310 311
311 312 nTries = 3 #quantity tries
312 313
313 314 nFiles = 3 #number of files for searching
314 315
315 316 flagNoMoreFiles = 0
316 317
317 318 datetimeList = []
318 319
319 320 __isFirstTimeOnline = 1
320 321
322 __printInfo = True
323
321 324 def __init__(self):
322 325
323 326 """
324 327
325 328 """
326 329
327 330 raise ValueError, "This method has not been implemented"
328 331
329 332
330 333 def createObjByDefault(self):
331 334 """
332 335
333 336 """
334 337 raise ValueError, "This method has not been implemented"
335 338
336 339 def getBlockDimension(self):
337 340
338 341 raise ValueError, "No implemented"
339 342
340 343 def __searchFilesOffLine(self,
341 344 path,
342 345 startDate,
343 346 endDate,
344 347 startTime=datetime.time(0,0,0),
345 348 endTime=datetime.time(23,59,59),
346 349 set=None,
347 350 expLabel='',
348 351 ext='.r',
349 352 walk=True):
350 353
351 354 pathList = []
352 dateList = []
353 355
354 356 if not walk:
355 357 pathList.append(path)
356 358
357 359 else:
358 360 dirList = []
359 361 for thisPath in os.listdir(path):
360 362 if not os.path.isdir(os.path.join(path,thisPath)):
361 363 continue
362 364 if not isDoyFolder(thisPath):
363 365 continue
364 366
365 367 dirList.append(thisPath)
366 368
367 369 if not(dirList):
368 370 return None, None
369 371
370 372 thisDate = startDate
371 373
372 374 while(thisDate <= endDate):
373 375 year = thisDate.timetuple().tm_year
374 376 doy = thisDate.timetuple().tm_yday
375 377
376 378 match = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy))
377 379 if len(match) == 0:
378 380 thisDate += datetime.timedelta(1)
379 381 continue
380 382
381 383 pathList.append(os.path.join(path,match[0],expLabel))
382 dateList.append(thisDate)
383 384
384 385 thisDate += datetime.timedelta(1)
385 386
386 387 if pathList == []:
387 388 print "Any folder was found for the date range: %s-%s" %(startDate, endDate)
388 389 return None, None
389 390
390 391 print "%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate)
391 392
392 393 filenameList = []
393 394 datetimeList = []
394 395
395 396 for i in range(len(pathList)):
396 397
397 398 thisPath = pathList[i]
398 thisDate = dateList[i]
399 399
400 400 fileList = glob.glob1(thisPath, "*%s" %ext)
401 401 fileList.sort()
402 402
403 403 for file in fileList:
404 404
405 405 filename = os.path.join(thisPath,file)
406 thisTime = isFileinThisTime(filename, startTime, endTime)
406 thisDatetime = isFileinThisTime(filename, startTime, endTime)
407 407
408 if thisTime == 0:
408 if not(thisDatetime):
409 409 continue
410 410
411 411 filenameList.append(filename)
412 datetimeList.append(datetime.datetime.combine(thisDate,thisTime))
412 datetimeList.append(thisDatetime)
413 413
414 414 if not(filenameList):
415 415 print "Any file was found for the time range %s - %s" %(startTime, endTime)
416 416 return None, None
417 417
418 418 print "%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime)
419 419 print
420 420
421 421 for i in range(len(filenameList)):
422 422 print "%s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
423 423
424 424 self.filenameList = filenameList
425 425 self.datetimeList = datetimeList
426 426
427 427 return pathList, filenameList
428 428
429 429 def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True):
430 430
431 431 """
432 432 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
433 433 devuelve el archivo encontrado ademas de otros datos.
434 434
435 435 Input:
436 436 path : carpeta donde estan contenidos los files que contiene data
437 437
438 438 expLabel : Nombre del subexperimento (subfolder)
439 439
440 440 ext : extension de los files
441 441
442 442 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
443 443
444 444 Return:
445 445 directory : eL directorio donde esta el file encontrado
446 446 filename : el ultimo file de una determinada carpeta
447 447 year : el anho
448 448 doy : el numero de dia del anho
449 449 set : el set del archivo
450 450
451 451
452 452 """
453 453 dirList = []
454 454
455 if walk:
455 if not walk:
456 fullpath = path
456 457
458 else:
457 459 #Filtra solo los directorios
458 460 for thisPath in os.listdir(path):
459 461 if not os.path.isdir(os.path.join(path,thisPath)):
460 462 continue
461 463 if not isDoyFolder(thisPath):
462 464 continue
463 465
464 466 dirList.append(thisPath)
465 467
466 468 if not(dirList):
467 469 return None, None, None, None, None
468 470
469 471 dirList = sorted( dirList, key=str.lower )
470 472
471 473 doypath = dirList[-1]
472 474 fullpath = os.path.join(path, doypath, expLabel)
473 475
474 else:
475 fullpath = path
476 476
477 477 print "%s folder was found: " %(fullpath )
478 478
479 479 filename = getlastFileFromPath(fullpath, ext)
480 480
481 481 if not(filename):
482 482 return None, None, None, None, None
483 483
484 484 print "%s file was found" %(filename)
485 485
486 486 if not(self.__verifyFile(os.path.join(fullpath, filename))):
487 487 return None, None, None, None, None
488 488
489 489 year = int( filename[1:5] )
490 490 doy = int( filename[5:8] )
491 491 set = int( filename[8:11] )
492 492
493 493 return fullpath, filename, year, doy, set
494 494
495
496
497 495 def __setNextFileOffline(self):
498 496
499 497 idFile = self.fileIndex
500 498
501 499 while (True):
502 500 idFile += 1
503 501 if not(idFile < len(self.filenameList)):
504 502 self.flagNoMoreFiles = 1
505 503 print "No more Files"
506 504 return 0
507 505
508 506 filename = self.filenameList[idFile]
509 507
510 508 if not(self.__verifyFile(filename)):
511 509 continue
512 510
513 511 fileSize = os.path.getsize(filename)
514 512 fp = open(filename,'rb')
515 513 break
516 514
517 515 self.flagIsNewFile = 1
518 516 self.fileIndex = idFile
519 517 self.filename = filename
520 518 self.fileSize = fileSize
521 519 self.fp = fp
522 520
523 521 print "Setting the file: %s"%self.filename
524 522
525 523 return 1
526 524
527 525 def __setNextFileOnline(self):
528 526 """
529 527 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
530 528 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
531 529 siguientes.
532 530
533 531 Affected:
534 532 self.flagIsNewFile
535 533 self.filename
536 534 self.fileSize
537 535 self.fp
538 536 self.set
539 537 self.flagNoMoreFiles
540 538
541 539 Return:
542 540 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
543 541 1 : si el file fue abierto con exito y esta listo a ser leido
544 542
545 543 Excepciones:
546 544 Si un determinado file no puede ser abierto
547 545 """
548 546 nFiles = 0
549 547 fileOk_flag = False
550 548 firstTime_flag = True
551 549
552 550 self.set += 1
553 551
554 552 #busca el 1er file disponible
555 553 fullfilename, filename = checkForRealPath( self.path, self.year, self.doy, self.set, self.ext )
556 554 if fullfilename:
557 555 if self.__verifyFile(fullfilename, False):
558 556 fileOk_flag = True
559 557
560 558 #si no encuentra un file entonces espera y vuelve a buscar
561 559 if not(fileOk_flag):
562 560 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
563 561
564 562 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
565 563 tries = self.nTries
566 564 else:
567 565 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
568 566
569 567 for nTries in range( tries ):
570 568 if firstTime_flag:
571 569 print "\tWaiting %0.2f sec for the file \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
572 570 time.sleep( self.delay )
573 571 else:
574 572 print "\tSearching next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
575 573
576 574 fullfilename, filename = checkForRealPath( self.path, self.year, self.doy, self.set, self.ext )
577 575 if fullfilename:
578 576 if self.__verifyFile(fullfilename):
579 577 fileOk_flag = True
580 578 break
581 579
582 580 if fileOk_flag:
583 581 break
584 582
585 583 firstTime_flag = False
586 584
587 585 print "\tSkipping the file \"%s\" due to this file doesn't exist" % filename
588 586 self.set += 1
589 587
590 588 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
591 589 self.set = 0
592 590 self.doy += 1
593 591
594 592 if fileOk_flag:
595 593 self.fileSize = os.path.getsize( fullfilename )
596 594 self.filename = fullfilename
597 595 self.flagIsNewFile = 1
598 596 if self.fp != None: self.fp.close()
599 597 self.fp = open(fullfilename, 'rb')
600 598 self.flagNoMoreFiles = 0
601 599 print 'Setting the file: %s' % fullfilename
602 600 else:
603 601 self.fileSize = 0
604 602 self.filename = None
605 603 self.flagIsNewFile = 0
606 604 self.fp = None
607 605 self.flagNoMoreFiles = 1
608 606 print 'No more Files'
609 607
610 608 return fileOk_flag
611 609
612 610
613 611 def setNextFile(self):
614 612 if self.fp != None:
615 613 self.fp.close()
616 614
617 615 if self.online:
618 616 newFile = self.__setNextFileOnline()
619 617 else:
620 618 newFile = self.__setNextFileOffline()
621 619
622 620 if not(newFile):
623 621 return 0
624 622
625 623 self.__readFirstHeader()
626 624 self.nReadBlocks = 0
627 625 return 1
628 626
629 627 def __waitNewBlock(self):
630 628 """
631 629 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
632 630
633 631 Si el modo de lectura es OffLine siempre retorn 0
634 632 """
635 633 if not self.online:
636 634 return 0
637 635
638 636 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
639 637 return 0
640 638
641 639 currentPointer = self.fp.tell()
642 640
643 641 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
644 642
645 643 for nTries in range( self.nTries ):
646 644
647 645 self.fp.close()
648 646 self.fp = open( self.filename, 'rb' )
649 647 self.fp.seek( currentPointer )
650 648
651 649 self.fileSize = os.path.getsize( self.filename )
652 650 currentSize = self.fileSize - currentPointer
653 651
654 652 if ( currentSize >= neededSize ):
655 653 self.__rdBasicHeader()
656 654 return 1
657 655
658 656 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
659 657 time.sleep( self.delay )
660 658
661 659
662 660 return 0
663 661
664 662 def __jumpToLastBlock(self):
665 663
666 664 if not(self.__isFirstTimeOnline):
667 665 return
668 666
669 667 csize = self.fileSize - self.fp.tell()
670 668
671 669 #sata el primer bloque de datos
672 670 if csize > self.processingHeaderObj.blockSize:
673 671 self.fp.seek(self.fp.tell() + self.processingHeaderObj.blockSize)
674 672 else:
675 673 return
676 674
677 675 csize = self.fileSize - self.fp.tell()
678 676 neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
679 677 factor = int(csize/neededsize)
680 678 if factor > 0:
681 679 self.fp.seek(self.fp.tell() + factor*neededsize)
682 680
683 681 self.flagIsNewFile = 0
684 682 self.__isFirstTimeOnline = 0
685 683
686 684
687 685 def __setNewBlock(self):
688 686
689 687 if self.fp == None:
690 688 return 0
691 689
692 690 if self.online:
693 691 self.__jumpToLastBlock()
694 692
695 693 if self.flagIsNewFile:
696 694 return 1
697 695
698 696 self.lastUTTime = self.basicHeaderObj.utc
699 697 currentSize = self.fileSize - self.fp.tell()
700 698 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
701 699
702 700 if (currentSize >= neededSize):
703 701 self.__rdBasicHeader()
704 702 return 1
705 703
706 704 if self.__waitNewBlock():
707 705 return 1
708 706
709 707 if not(self.setNextFile()):
710 708 return 0
711 709
712 710 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
713 711
714 712 self.flagTimeBlock = 0
715 713
716 714 if deltaTime > self.maxTimeStep:
717 715 self.flagTimeBlock = 1
718 716
719 717 return 1
720 718
721 719
722 720 def readNextBlock(self):
723 721 if not(self.__setNewBlock()):
724 722 return 0
725 723
726 724 if not(self.readBlock()):
727 725 return 0
728 726
729 727 return 1
730 728
731 729 def __rdProcessingHeader(self, fp=None):
732 730 if fp == None:
733 731 fp = self.fp
734 732
735 733 self.processingHeaderObj.read(fp)
736 734
737 735 def __rdRadarControllerHeader(self, fp=None):
738 736 if fp == None:
739 737 fp = self.fp
740 738
741 739 self.radarControllerHeaderObj.read(fp)
742 740
743 741 def __rdSystemHeader(self, fp=None):
744 742 if fp == None:
745 743 fp = self.fp
746 744
747 745 self.systemHeaderObj.read(fp)
748 746
749 747 def __rdBasicHeader(self, fp=None):
750 748 if fp == None:
751 749 fp = self.fp
752 750
753 751 self.basicHeaderObj.read(fp)
754 752
755 753
756 754 def __readFirstHeader(self):
757 755 self.__rdBasicHeader()
758 756 self.__rdSystemHeader()
759 757 self.__rdRadarControllerHeader()
760 758 self.__rdProcessingHeader()
761 759
762 760 self.firstHeaderSize = self.basicHeaderObj.size
763 761
764 762 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
765 763 if datatype == 0:
766 764 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
767 765 elif datatype == 1:
768 766 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
769 767 elif datatype == 2:
770 768 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
771 769 elif datatype == 3:
772 770 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
773 771 elif datatype == 4:
774 772 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
775 773 elif datatype == 5:
776 774 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
777 775 else:
778 776 raise ValueError, 'Data type was not defined'
779 777
780 778 self.dtype = datatype_str
781 779 self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
782 780 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
783 781 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
784 782 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
785 783 self.getBlockDimension()
786 784
787 785
788 786 def __verifyFile(self, filename, msgFlag=True):
789 787 msg = None
790 788 try:
791 789 fp = open(filename, 'rb')
792 790 currentPosition = fp.tell()
793 791 except:
794 792 if msgFlag:
795 793 print "The file %s can't be opened" % (filename)
796 794 return False
797 795
798 796 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
799 797
800 798 if neededSize == 0:
801 799 basicHeaderObj = BasicHeader(LOCALTIME)
802 800 systemHeaderObj = SystemHeader()
803 801 radarControllerHeaderObj = RadarControllerHeader()
804 802 processingHeaderObj = ProcessingHeader()
805 803
806 804 try:
807 805 if not( basicHeaderObj.read(fp) ): raise IOError
808 806 if not( systemHeaderObj.read(fp) ): raise IOError
809 807 if not( radarControllerHeaderObj.read(fp) ): raise IOError
810 808 if not( processingHeaderObj.read(fp) ): raise IOError
811 809 data_type = int(numpy.log2((processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
812 810
813 811 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
814 812
815 813 except:
816 814 if msgFlag:
817 815 print "\tThe file %s is empty or it hasn't enough data" % filename
818 816
819 817 fp.close()
820 818 return False
821 819 else:
822 820 msg = "\tSkipping the file %s due to it hasn't enough data" %filename
823 821
824 822 fp.close()
825 823 fileSize = os.path.getsize(filename)
826 824 currentSize = fileSize - currentPosition
827 825 if currentSize < neededSize:
828 826 if msgFlag and (msg != None):
829 827 print msg #print"\tSkipping the file %s due to it hasn't enough data" %filename
830 828 return False
831 829
832 830 return True
833 831
834 832 def setup(self,
835 833 path=None,
836 834 startDate=None,
837 835 endDate=None,
838 836 startTime=datetime.time(0,0,0),
839 837 endTime=datetime.time(23,59,59),
840 838 set=0,
841 839 expLabel = "",
842 840 ext = None,
843 841 online = False,
844 842 delay = 60,
845 843 walk = True):
846 844
847 845 if path == None:
848 846 raise ValueError, "The path is not valid"
849 847
850 848 if ext == None:
851 849 ext = self.ext
852 850
853 851 if online:
854 852 print "Searching files in online mode..."
855 853
856 854 for nTries in range( self.nTries ):
857 855 fullpath, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk)
858 856
859 857 if fullpath:
860 858 break
861 859
862 860 print '\tWaiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
863 861 time.sleep( self.delay )
864 862
865 863 if not(fullpath):
866 864 print "There 'isn't valied files in %s" % path
867 865 return None
868 866
869 867 self.year = year
870 868 self.doy = doy
871 869 self.set = set - 1
872 870 self.path = path
873 871
874 872 else:
875 873 print "Searching files in offline mode ..."
876 874 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
877 875 startTime=startTime, endTime=endTime,
878 876 set=set, expLabel=expLabel, ext=ext,
879 877 walk=walk)
880 878
881 879 if not(pathList):
882 880 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
883 881 datetime.datetime.combine(startDate,startTime).ctime(),
884 882 datetime.datetime.combine(endDate,endTime).ctime())
885 883
886 884 sys.exit(-1)
887 885
888 886
889 887 self.fileIndex = -1
890 888 self.pathList = pathList
891 889 self.filenameList = filenameList
892 890
893 891 self.online = online
894 892 self.delay = delay
895 893 ext = ext.lower()
896 894 self.ext = ext
897 895
898 896 if not(self.setNextFile()):
899 897 if (startDate!=None) and (endDate!=None):
900 898 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
901 899 elif startDate != None:
902 900 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
903 901 else:
904 902 print "No files"
905 903
906 904 sys.exit(-1)
907 905
908 906 # self.updateDataHeader()
909 907
910 908 return self.dataOut
911 909
912 910 def getData():
913 911
914 912 raise ValueError, "This method has not been implemented"
915 913
916 914 def hasNotDataInBuffer():
917 915
918 916 raise ValueError, "This method has not been implemented"
919 917
920 918 def readBlock():
921 919
922 920 raise ValueError, "This method has not been implemented"
923 921
924 922 def isEndProcess(self):
925 923
926 924 return self.flagNoMoreFiles
927 925
928 926 def printReadBlocks(self):
929 927
930 928 print "Number of read blocks per file %04d" %self.nReadBlocks
931 929
932 930 def printTotalBlocks(self):
933 931
934 932 print "Number of read blocks %04d" %self.nTotalBlocks
935 933
936 934 def printNumberOfBlock(self):
937 935
938 936 if self.flagIsNewBlock:
939 937 print "Block No. %04d, Total blocks %04d -> %s" %(self.basicHeaderObj.dataBlock, self.nTotalBlocks, self.dataOut.datatime.ctime())
940 938
941 939 def printInfo(self):
942 940
943 print self.basicHeaderObj.printInfo()
944 print self.systemHeaderObj.printInfo()
945 print self.radarControllerHeaderObj.printInfo()
946 print self.processingHeaderObj.printInfo()
941 if self.__printInfo == False:
942 return
943
944 self.basicHeaderObj.printInfo()
945 self.systemHeaderObj.printInfo()
946 self.radarControllerHeaderObj.printInfo()
947 self.processingHeaderObj.printInfo()
948
949 self.__printInfo = False
947 950
948 951
949 952 def run(self, **kwargs):
950 953
951 954 if not(self.isConfig):
952 955
953 956 # self.dataOut = dataOut
954 957 self.setup(**kwargs)
955 958 self.isConfig = True
956 959
957 960 self.getData()
958 961
959 962 class JRODataWriter(JRODataIO, Operation):
960 963
961 964 """
962 965 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
963 966 de los datos siempre se realiza por bloques.
964 967 """
965 968
966 969 blockIndex = 0
967 970
968 971 path = None
969 972
970 973 setFile = None
971 974
972 975 profilesPerBlock = None
973 976
974 977 blocksPerFile = None
975 978
976 979 nWriteBlocks = 0
977 980
978 981 def __init__(self, dataOut=None):
979 982 raise ValueError, "Not implemented"
980 983
981 984
982 985 def hasAllDataInBuffer(self):
983 986 raise ValueError, "Not implemented"
984 987
985 988
986 989 def setBlockDimension(self):
987 990 raise ValueError, "Not implemented"
988 991
989 992
990 993 def writeBlock(self):
991 994 raise ValueError, "No implemented"
992 995
993 996
994 997 def putData(self):
995 998 raise ValueError, "No implemented"
996 999
997 1000 def getDataHeader(self):
998 1001 """
999 1002 Obtiene una copia del First Header
1000 1003
1001 1004 Affected:
1002 1005
1003 1006 self.basicHeaderObj
1004 1007 self.systemHeaderObj
1005 1008 self.radarControllerHeaderObj
1006 1009 self.processingHeaderObj self.
1007 1010
1008 1011 Return:
1009 1012 None
1010 1013 """
1011 1014
1012 1015 raise ValueError, "No implemented"
1013 1016
1014 1017 def getBasicHeader(self):
1015 1018
1016 1019 self.basicHeaderObj.size = self.basicHeaderSize #bytes
1017 1020 self.basicHeaderObj.version = self.versionFile
1018 1021 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1019 1022
1020 1023 utc = numpy.floor(self.dataOut.utctime)
1021 1024 milisecond = (self.dataOut.utctime - utc)* 1000.0
1022 1025
1023 1026 self.basicHeaderObj.utc = utc
1024 1027 self.basicHeaderObj.miliSecond = milisecond
1025 1028 self.basicHeaderObj.timeZone = 0
1026 1029 self.basicHeaderObj.dstFlag = 0
1027 1030 self.basicHeaderObj.errorCount = 0
1028 1031
1029 1032 def __writeFirstHeader(self):
1030 1033 """
1031 1034 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1032 1035
1033 1036 Affected:
1034 1037 __dataType
1035 1038
1036 1039 Return:
1037 1040 None
1038 1041 """
1039 1042
1040 1043 # CALCULAR PARAMETROS
1041 1044
1042 1045 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1043 1046 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1044 1047
1045 1048 self.basicHeaderObj.write(self.fp)
1046 1049 self.systemHeaderObj.write(self.fp)
1047 1050 self.radarControllerHeaderObj.write(self.fp)
1048 1051 self.processingHeaderObj.write(self.fp)
1049 1052
1050 1053 self.dtype = self.dataOut.dtype
1051 1054
1052 1055 def __setNewBlock(self):
1053 1056 """
1054 1057 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1055 1058
1056 1059 Return:
1057 1060 0 : si no pudo escribir nada
1058 1061 1 : Si escribio el Basic el First Header
1059 1062 """
1060 1063 if self.fp == None:
1061 1064 self.setNextFile()
1062 1065
1063 1066 if self.flagIsNewFile:
1064 1067 return 1
1065 1068
1066 1069 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1067 1070 self.basicHeaderObj.write(self.fp)
1068 1071 return 1
1069 1072
1070 1073 if not( self.setNextFile() ):
1071 1074 return 0
1072 1075
1073 1076 return 1
1074 1077
1075 1078
1076 1079 def writeNextBlock(self):
1077 1080 """
1078 1081 Selecciona el bloque siguiente de datos y los escribe en un file
1079 1082
1080 1083 Return:
1081 1084 0 : Si no hizo pudo escribir el bloque de datos
1082 1085 1 : Si no pudo escribir el bloque de datos
1083 1086 """
1084 1087 if not( self.__setNewBlock() ):
1085 1088 return 0
1086 1089
1087 1090 self.writeBlock()
1088 1091
1089 1092 return 1
1090 1093
1091 1094 def setNextFile(self):
1092 1095 """
1093 1096 Determina el siguiente file que sera escrito
1094 1097
1095 1098 Affected:
1096 1099 self.filename
1097 1100 self.subfolder
1098 1101 self.fp
1099 1102 self.setFile
1100 1103 self.flagIsNewFile
1101 1104
1102 1105 Return:
1103 1106 0 : Si el archivo no puede ser escrito
1104 1107 1 : Si el archivo esta listo para ser escrito
1105 1108 """
1106 1109 ext = self.ext
1107 1110 path = self.path
1108 1111
1109 1112 if self.fp != None:
1110 1113 self.fp.close()
1111 1114
1112 1115 timeTuple = time.localtime( self.dataOut.utctime)
1113 1116 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1114 1117
1115 1118 fullpath = os.path.join( path, subfolder )
1116 1119 if not( os.path.exists(fullpath) ):
1117 1120 os.mkdir(fullpath)
1118 1121 self.setFile = -1 #inicializo mi contador de seteo
1119 1122 else:
1120 1123 filesList = os.listdir( fullpath )
1121 1124 if len( filesList ) > 0:
1122 1125 filesList = sorted( filesList, key=str.lower )
1123 1126 filen = filesList[-1]
1124 1127 # el filename debera tener el siguiente formato
1125 1128 # 0 1234 567 89A BCDE (hex)
1126 1129 # x YYYY DDD SSS .ext
1127 1130 if isNumber( filen[8:11] ):
1128 1131 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1129 1132 else:
1130 1133 self.setFile = -1
1131 1134 else:
1132 1135 self.setFile = -1 #inicializo mi contador de seteo
1133 1136
1134 1137 setFile = self.setFile
1135 1138 setFile += 1
1136 1139
1137 1140 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
1138 1141 timeTuple.tm_year,
1139 1142 timeTuple.tm_yday,
1140 1143 setFile,
1141 1144 ext )
1142 1145
1143 1146 filename = os.path.join( path, subfolder, file )
1144 1147
1145 1148 fp = open( filename,'wb' )
1146 1149
1147 1150 self.blockIndex = 0
1148 1151
1149 1152 #guardando atributos
1150 1153 self.filename = filename
1151 1154 self.subfolder = subfolder
1152 1155 self.fp = fp
1153 1156 self.setFile = setFile
1154 1157 self.flagIsNewFile = 1
1155 1158
1156 1159 self.getDataHeader()
1157 1160
1158 1161 print 'Writing the file: %s'%self.filename
1159 1162
1160 1163 self.__writeFirstHeader()
1161 1164
1162 1165 return 1
1163 1166
1164 1167 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=None, set=0, ext=None):
1165 1168 """
1166 1169 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1167 1170
1168 1171 Inputs:
1169 1172 path : el path destino en el cual se escribiran los files a crear
1170 1173 format : formato en el cual sera salvado un file
1171 1174 set : el setebo del file
1172 1175
1173 1176 Return:
1174 1177 0 : Si no realizo un buen seteo
1175 1178 1 : Si realizo un buen seteo
1176 1179 """
1177 1180
1178 1181 if ext == None:
1179 1182 ext = self.ext
1180 1183
1181 1184 ext = ext.lower()
1182 1185
1183 1186 self.ext = ext
1184 1187
1185 1188 self.path = path
1186 1189
1187 1190 self.setFile = set - 1
1188 1191
1189 1192 self.blocksPerFile = blocksPerFile
1190 1193
1191 1194 self.profilesPerBlock = profilesPerBlock
1192 1195
1193 1196 self.dataOut = dataOut
1194 1197
1195 1198 if not(self.setNextFile()):
1196 1199 print "There isn't a next file"
1197 1200 return 0
1198 1201
1199 1202 self.setBlockDimension()
1200 1203
1201 1204 return 1
1202 1205
1203 1206 def run(self, dataOut, **kwargs):
1204 1207
1205 1208 if not(self.isConfig):
1206 1209
1207 1210 self.setup(dataOut, **kwargs)
1208 1211 self.isConfig = True
1209 1212
1210 1213 self.putData()
1211 1214
1212 1215 class VoltageReader(JRODataReader):
1213 1216 """
1214 1217 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
1215 1218 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
1216 1219 perfiles*alturas*canales) son almacenados en la variable "buffer".
1217 1220
1218 1221 perfiles * alturas * canales
1219 1222
1220 1223 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1221 1224 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
1222 1225 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
1223 1226 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1224 1227
1225 1228 Example:
1226 1229
1227 1230 dpath = "/home/myuser/data"
1228 1231
1229 1232 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1230 1233
1231 1234 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1232 1235
1233 1236 readerObj = VoltageReader()
1234 1237
1235 1238 readerObj.setup(dpath, startTime, endTime)
1236 1239
1237 1240 while(True):
1238 1241
1239 1242 #to get one profile
1240 1243 profile = readerObj.getData()
1241 1244
1242 1245 #print the profile
1243 1246 print profile
1244 1247
1245 1248 #If you want to see all datablock
1246 1249 print readerObj.datablock
1247 1250
1248 1251 if readerObj.flagNoMoreFiles:
1249 1252 break
1250 1253
1251 1254 """
1252 1255
1253 1256 ext = ".r"
1254 1257
1255 1258 optchar = "D"
1256 1259 dataOut = None
1257 1260
1258 1261
1259 1262 def __init__(self):
1260 1263 """
1261 1264 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
1262 1265
1263 1266 Input:
1264 1267 dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para
1265 1268 almacenar un perfil de datos cada vez que se haga un requerimiento
1266 1269 (getData). El perfil sera obtenido a partir del buffer de datos,
1267 1270 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1268 1271 bloque de datos.
1269 1272 Si este parametro no es pasado se creara uno internamente.
1270 1273
1271 1274 Variables afectadas:
1272 1275 self.dataOut
1273 1276
1274 1277 Return:
1275 1278 None
1276 1279 """
1277 1280
1278 1281 self.isConfig = False
1279 1282
1280 1283 self.datablock = None
1281 1284
1282 1285 self.utc = 0
1283 1286
1284 1287 self.ext = ".r"
1285 1288
1286 1289 self.optchar = "D"
1287 1290
1288 1291 self.basicHeaderObj = BasicHeader(LOCALTIME)
1289 1292
1290 1293 self.systemHeaderObj = SystemHeader()
1291 1294
1292 1295 self.radarControllerHeaderObj = RadarControllerHeader()
1293 1296
1294 1297 self.processingHeaderObj = ProcessingHeader()
1295 1298
1296 1299 self.online = 0
1297 1300
1298 1301 self.fp = None
1299 1302
1300 1303 self.idFile = None
1301 1304
1302 1305 self.dtype = None
1303 1306
1304 1307 self.fileSizeByHeader = None
1305 1308
1306 1309 self.filenameList = []
1307 1310
1308 1311 self.filename = None
1309 1312
1310 1313 self.fileSize = None
1311 1314
1312 1315 self.firstHeaderSize = 0
1313 1316
1314 1317 self.basicHeaderSize = 24
1315 1318
1316 1319 self.pathList = []
1317 1320
1318 1321 self.filenameList = []
1319 1322
1320 1323 self.lastUTTime = 0
1321 1324
1322 1325 self.maxTimeStep = 30
1323 1326
1324 1327 self.flagNoMoreFiles = 0
1325 1328
1326 1329 self.set = 0
1327 1330
1328 1331 self.path = None
1329 1332
1330 1333 self.profileIndex = 2**32-1
1331 1334
1332 1335 self.delay = 3 #seconds
1333 1336
1334 1337 self.nTries = 3 #quantity tries
1335 1338
1336 1339 self.nFiles = 3 #number of files for searching
1337 1340
1338 1341 self.nReadBlocks = 0
1339 1342
1340 1343 self.flagIsNewFile = 1
1341 1344
1342 1345 self.__isFirstTimeOnline = 1
1343 1346
1344 1347 self.ippSeconds = 0
1345 1348
1346 1349 self.flagTimeBlock = 0
1347 1350
1348 1351 self.flagIsNewBlock = 0
1349 1352
1350 1353 self.nTotalBlocks = 0
1351 1354
1352 1355 self.blocksize = 0
1353 1356
1354 1357 self.dataOut = self.createObjByDefault()
1355 1358
1356 1359 def createObjByDefault(self):
1357 1360
1358 1361 dataObj = Voltage()
1359 1362
1360 1363 return dataObj
1361 1364
1362 1365 def __hasNotDataInBuffer(self):
1363 1366 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1364 1367 return 1
1365 1368 return 0
1366 1369
1367 1370
1368 1371 def getBlockDimension(self):
1369 1372 """
1370 1373 Obtiene la cantidad de puntos a leer por cada bloque de datos
1371 1374
1372 1375 Affected:
1373 1376 self.blocksize
1374 1377
1375 1378 Return:
1376 1379 None
1377 1380 """
1378 1381 pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
1379 1382 self.blocksize = pts2read
1380 1383
1381 1384
1382 1385 def readBlock(self):
1383 1386 """
1384 1387 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
1385 1388 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1386 1389 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1387 1390 es seteado a 0
1388 1391
1389 1392 Inputs:
1390 1393 None
1391 1394
1392 1395 Return:
1393 1396 None
1394 1397
1395 1398 Affected:
1396 1399 self.profileIndex
1397 1400 self.datablock
1398 1401 self.flagIsNewFile
1399 1402 self.flagIsNewBlock
1400 1403 self.nTotalBlocks
1401 1404
1402 1405 Exceptions:
1403 1406 Si un bloque leido no es un bloque valido
1404 1407 """
1405 1408
1406 1409 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
1407 1410
1408 1411 try:
1409 1412 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
1410 1413 except:
1411 1414 print "The read block (%3d) has not enough data" %self.nReadBlocks
1412 1415 return 0
1413 1416
1414 1417 junk = numpy.transpose(junk, (2,0,1))
1415 1418 self.datablock = junk['real'] + junk['imag']*1j
1416 1419
1417 1420 self.profileIndex = 0
1418 1421
1419 1422 self.flagIsNewFile = 0
1420 1423 self.flagIsNewBlock = 1
1421 1424
1422 1425 self.nTotalBlocks += 1
1423 1426 self.nReadBlocks += 1
1424 1427
1425 1428 return 1
1426 1429
1427 1430
1428 1431 def getData(self):
1429 1432 """
1430 1433 getData obtiene una unidad de datos del buffer de lectura y la copia a la clase "Voltage"
1431 1434 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
1432 1435 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
1433 1436
1434 1437 Ademas incrementa el contador del buffer en 1.
1435 1438
1436 1439 Return:
1437 1440 data : retorna un perfil de voltages (alturas * canales) copiados desde el
1438 1441 buffer. Si no hay mas archivos a leer retorna None.
1439 1442
1440 1443 Variables afectadas:
1441 1444 self.dataOut
1442 1445 self.profileIndex
1443 1446
1444 1447 Affected:
1445 1448 self.dataOut
1446 1449 self.profileIndex
1447 1450 self.flagTimeBlock
1448 1451 self.flagIsNewBlock
1449 1452 """
1450 1453
1451 1454 if self.flagNoMoreFiles:
1452 1455 self.dataOut.flagNoData = True
1453 1456 print 'Process finished'
1454 1457 return 0
1455 1458
1456 1459 self.flagTimeBlock = 0
1457 1460 self.flagIsNewBlock = 0
1458 1461
1459 1462 if self.__hasNotDataInBuffer():
1460 1463
1461 1464 if not( self.readNextBlock() ):
1462 1465 return 0
1463 1466
1464 1467 self.dataOut.dtype = self.dtype
1465 1468
1466 1469 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
1467 1470
1468 1471 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
1469 1472
1470 1473 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
1471 1474
1472 1475 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
1473 1476
1474 1477 self.dataOut.flagTimeBlock = self.flagTimeBlock
1475 1478
1476 1479 self.dataOut.ippSeconds = self.ippSeconds
1477 1480
1478 1481 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt
1479 1482
1480 1483 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
1481 1484
1482 1485 self.dataOut.flagShiftFFT = False
1483 1486
1484 1487 if self.radarControllerHeaderObj.code != None:
1485 1488
1486 1489 self.dataOut.nCode = self.radarControllerHeaderObj.nCode
1487 1490
1488 1491 self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
1489 1492
1490 1493 self.dataOut.code = self.radarControllerHeaderObj.code
1491 1494
1492 1495 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
1493 1496
1494 1497 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
1495 1498
1496 1499 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
1497 1500
1498 1501 self.dataOut.flagDeflipData = False #asumo q la data no esta sin flip
1499 1502
1500 1503 self.dataOut.flagShiftFFT = False
1501 1504
1502 1505
1503 1506 # self.updateDataHeader()
1504 1507
1505 1508 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
1506 1509
1507 1510 if self.datablock == None:
1508 1511 self.dataOut.flagNoData = True
1509 1512 return 0
1510 1513
1511 1514 self.dataOut.data = self.datablock[:,self.profileIndex,:]
1512 1515
1513 1516 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.ippSeconds
1514 1517
1515 1518 self.profileIndex += 1
1516 1519
1517 1520 self.dataOut.flagNoData = False
1518 1521
1519 1522 # print self.profileIndex, self.dataOut.utctime
1520 1523 # if self.profileIndex == 800:
1521 1524 # a=1
1522 1525
1523 1526
1524 1527 return self.dataOut.data
1525 1528
1526 1529
1527 1530 class VoltageWriter(JRODataWriter):
1528 1531 """
1529 1532 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
1530 1533 de los datos siempre se realiza por bloques.
1531 1534 """
1532 1535
1533 1536 ext = ".r"
1534 1537
1535 1538 optchar = "D"
1536 1539
1537 1540 shapeBuffer = None
1538 1541
1539 1542
1540 1543 def __init__(self):
1541 1544 """
1542 1545 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
1543 1546
1544 1547 Affected:
1545 1548 self.dataOut
1546 1549
1547 1550 Return: None
1548 1551 """
1549 1552
1550 1553 self.nTotalBlocks = 0
1551 1554
1552 1555 self.profileIndex = 0
1553 1556
1554 1557 self.isConfig = False
1555 1558
1556 1559 self.fp = None
1557 1560
1558 1561 self.flagIsNewFile = 1
1559 1562
1560 1563 self.nTotalBlocks = 0
1561 1564
1562 1565 self.flagIsNewBlock = 0
1563 1566
1564 1567 self.setFile = None
1565 1568
1566 1569 self.dtype = None
1567 1570
1568 1571 self.path = None
1569 1572
1570 1573 self.filename = None
1571 1574
1572 1575 self.basicHeaderObj = BasicHeader(LOCALTIME)
1573 1576
1574 1577 self.systemHeaderObj = SystemHeader()
1575 1578
1576 1579 self.radarControllerHeaderObj = RadarControllerHeader()
1577 1580
1578 1581 self.processingHeaderObj = ProcessingHeader()
1579 1582
1580 1583 def hasAllDataInBuffer(self):
1581 1584 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1582 1585 return 1
1583 1586 return 0
1584 1587
1585 1588
1586 1589 def setBlockDimension(self):
1587 1590 """
1588 1591 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
1589 1592
1590 1593 Affected:
1591 1594 self.shape_spc_Buffer
1592 1595 self.shape_cspc_Buffer
1593 1596 self.shape_dc_Buffer
1594 1597
1595 1598 Return: None
1596 1599 """
1597 1600 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
1598 1601 self.processingHeaderObj.nHeights,
1599 1602 self.systemHeaderObj.nChannels)
1600 1603
1601 1604 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
1602 1605 self.processingHeaderObj.profilesPerBlock,
1603 1606 self.processingHeaderObj.nHeights),
1604 1607 dtype=numpy.dtype('complex64'))
1605 1608
1606 1609
1607 1610 def writeBlock(self):
1608 1611 """
1609 1612 Escribe el buffer en el file designado
1610 1613
1611 1614 Affected:
1612 1615 self.profileIndex
1613 1616 self.flagIsNewFile
1614 1617 self.flagIsNewBlock
1615 1618 self.nTotalBlocks
1616 1619 self.blockIndex
1617 1620
1618 1621 Return: None
1619 1622 """
1620 1623 data = numpy.zeros( self.shapeBuffer, self.dtype )
1621 1624
1622 1625 junk = numpy.transpose(self.datablock, (1,2,0))
1623 1626
1624 1627 data['real'] = junk.real
1625 1628 data['imag'] = junk.imag
1626 1629
1627 1630 data = data.reshape( (-1) )
1628 1631
1629 1632 data.tofile( self.fp )
1630 1633
1631 1634 self.datablock.fill(0)
1632 1635
1633 1636 self.profileIndex = 0
1634 1637 self.flagIsNewFile = 0
1635 1638 self.flagIsNewBlock = 1
1636 1639
1637 1640 self.blockIndex += 1
1638 1641 self.nTotalBlocks += 1
1639 1642
1640 1643 def putData(self):
1641 1644 """
1642 1645 Setea un bloque de datos y luego los escribe en un file
1643 1646
1644 1647 Affected:
1645 1648 self.flagIsNewBlock
1646 1649 self.profileIndex
1647 1650
1648 1651 Return:
1649 1652 0 : Si no hay data o no hay mas files que puedan escribirse
1650 1653 1 : Si se escribio la data de un bloque en un file
1651 1654 """
1652 1655 if self.dataOut.flagNoData:
1653 1656 return 0
1654 1657
1655 1658 self.flagIsNewBlock = 0
1656 1659
1657 1660 if self.dataOut.flagTimeBlock:
1658 1661
1659 1662 self.datablock.fill(0)
1660 1663 self.profileIndex = 0
1661 1664 self.setNextFile()
1662 1665
1663 1666 if self.profileIndex == 0:
1664 1667 self.getBasicHeader()
1665 1668
1666 1669 self.datablock[:,self.profileIndex,:] = self.dataOut.data
1667 1670
1668 1671 self.profileIndex += 1
1669 1672
1670 1673 if self.hasAllDataInBuffer():
1671 1674 #if self.flagIsNewFile:
1672 1675 self.writeNextBlock()
1673 1676 # self.getDataHeader()
1674 1677
1675 1678 return 1
1676 1679
1677 1680 def __getProcessFlags(self):
1678 1681
1679 1682 processFlags = 0
1680 1683
1681 1684 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1682 1685 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1683 1686 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1684 1687 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1685 1688 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1686 1689 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1687 1690
1688 1691 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1689 1692
1690 1693
1691 1694
1692 1695 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
1693 1696 PROCFLAG.DATATYPE_SHORT,
1694 1697 PROCFLAG.DATATYPE_LONG,
1695 1698 PROCFLAG.DATATYPE_INT64,
1696 1699 PROCFLAG.DATATYPE_FLOAT,
1697 1700 PROCFLAG.DATATYPE_DOUBLE]
1698 1701
1699 1702
1700 1703 for index in range(len(dtypeList)):
1701 1704 if self.dataOut.dtype == dtypeList[index]:
1702 1705 dtypeValue = datatypeValueList[index]
1703 1706 break
1704 1707
1705 1708 processFlags += dtypeValue
1706 1709
1707 1710 if self.dataOut.flagDecodeData:
1708 1711 processFlags += PROCFLAG.DECODE_DATA
1709 1712
1710 1713 if self.dataOut.flagDeflipData:
1711 1714 processFlags += PROCFLAG.DEFLIP_DATA
1712 1715
1713 1716 if self.dataOut.code != None:
1714 1717 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1715 1718
1716 1719 if self.dataOut.nCohInt > 1:
1717 1720 processFlags += PROCFLAG.COHERENT_INTEGRATION
1718 1721
1719 1722 return processFlags
1720 1723
1721 1724
1722 1725 def __getBlockSize(self):
1723 1726 '''
1724 1727 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
1725 1728 '''
1726 1729
1727 1730 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1728 1731 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1729 1732 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1730 1733 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1731 1734 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1732 1735 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1733 1736
1734 1737 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1735 1738 datatypeValueList = [1,2,4,8,4,8]
1736 1739 for index in range(len(dtypeList)):
1737 1740 if self.dataOut.dtype == dtypeList[index]:
1738 1741 datatypeValue = datatypeValueList[index]
1739 1742 break
1740 1743
1741 1744 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels * self.dataOut.nProfiles * datatypeValue * 2)
1742 1745
1743 1746 return blocksize
1744 1747
1745 1748 def getDataHeader(self):
1746 1749
1747 1750 """
1748 1751 Obtiene una copia del First Header
1749 1752
1750 1753 Affected:
1751 1754 self.systemHeaderObj
1752 1755 self.radarControllerHeaderObj
1753 1756 self.dtype
1754 1757
1755 1758 Return:
1756 1759 None
1757 1760 """
1758 1761
1759 1762 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
1760 1763 self.systemHeaderObj.nChannels = self.dataOut.nChannels
1761 1764 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
1762 1765
1763 1766 self.getBasicHeader()
1764 1767
1765 1768 processingHeaderSize = 40 # bytes
1766 1769 self.processingHeaderObj.dtype = 0 # Voltage
1767 1770 self.processingHeaderObj.blockSize = self.__getBlockSize()
1768 1771 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
1769 1772 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
1770 1773 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
1771 1774 self.processingHeaderObj.processFlags = self.__getProcessFlags()
1772 1775 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
1773 1776 self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage
1774 1777 self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage
1775 1778
1776 1779 if self.dataOut.code != None:
1777 1780 self.processingHeaderObj.code = self.dataOut.code
1778 1781 self.processingHeaderObj.nCode = self.dataOut.nCode
1779 1782 self.processingHeaderObj.nBaud = self.dataOut.nBaud
1780 1783 codesize = int(8 + 4 * self.dataOut.nCode * self.dataOut.nBaud)
1781 1784 processingHeaderSize += codesize
1782 1785
1783 1786 if self.processingHeaderObj.nWindows != 0:
1784 1787 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
1785 1788 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
1786 1789 self.processingHeaderObj.nHeights = self.dataOut.nHeights
1787 1790 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
1788 1791 processingHeaderSize += 12
1789 1792
1790 1793 self.processingHeaderObj.size = processingHeaderSize
1791 1794
1792 1795 class SpectraReader(JRODataReader):
1793 1796 """
1794 1797 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
1795 1798 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
1796 1799 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
1797 1800
1798 1801 paresCanalesIguales * alturas * perfiles (Self Spectra)
1799 1802 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
1800 1803 canales * alturas (DC Channels)
1801 1804
1802 1805 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1803 1806 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
1804 1807 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
1805 1808 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1806 1809
1807 1810 Example:
1808 1811 dpath = "/home/myuser/data"
1809 1812
1810 1813 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1811 1814
1812 1815 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1813 1816
1814 1817 readerObj = SpectraReader()
1815 1818
1816 1819 readerObj.setup(dpath, startTime, endTime)
1817 1820
1818 1821 while(True):
1819 1822
1820 1823 readerObj.getData()
1821 1824
1822 1825 print readerObj.data_spc
1823 1826
1824 1827 print readerObj.data_cspc
1825 1828
1826 1829 print readerObj.data_dc
1827 1830
1828 1831 if readerObj.flagNoMoreFiles:
1829 1832 break
1830 1833
1831 1834 """
1832 1835
1833 1836 pts2read_SelfSpectra = 0
1834 1837
1835 1838 pts2read_CrossSpectra = 0
1836 1839
1837 1840 pts2read_DCchannels = 0
1838 1841
1839 1842 ext = ".pdata"
1840 1843
1841 1844 optchar = "P"
1842 1845
1843 1846 dataOut = None
1844 1847
1845 1848 nRdChannels = None
1846 1849
1847 1850 nRdPairs = None
1848 1851
1849 1852 rdPairList = []
1850 1853
1851 1854
1852 1855 def __init__(self):
1853 1856 """
1854 1857 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
1855 1858
1856 1859 Inputs:
1857 1860 dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para
1858 1861 almacenar un perfil de datos cada vez que se haga un requerimiento
1859 1862 (getData). El perfil sera obtenido a partir del buffer de datos,
1860 1863 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1861 1864 bloque de datos.
1862 1865 Si este parametro no es pasado se creara uno internamente.
1863 1866
1864 1867 Affected:
1865 1868 self.dataOut
1866 1869
1867 1870 Return : None
1868 1871 """
1869 1872
1870 1873 self.isConfig = False
1871 1874
1872 1875 self.pts2read_SelfSpectra = 0
1873 1876
1874 1877 self.pts2read_CrossSpectra = 0
1875 1878
1876 1879 self.pts2read_DCchannels = 0
1877 1880
1878 1881 self.datablock = None
1879 1882
1880 1883 self.utc = None
1881 1884
1882 1885 self.ext = ".pdata"
1883 1886
1884 1887 self.optchar = "P"
1885 1888
1886 1889 self.basicHeaderObj = BasicHeader(LOCALTIME)
1887 1890
1888 1891 self.systemHeaderObj = SystemHeader()
1889 1892
1890 1893 self.radarControllerHeaderObj = RadarControllerHeader()
1891 1894
1892 1895 self.processingHeaderObj = ProcessingHeader()
1893 1896
1894 1897 self.online = 0
1895 1898
1896 1899 self.fp = None
1897 1900
1898 1901 self.idFile = None
1899 1902
1900 1903 self.dtype = None
1901 1904
1902 1905 self.fileSizeByHeader = None
1903 1906
1904 1907 self.filenameList = []
1905 1908
1906 1909 self.filename = None
1907 1910
1908 1911 self.fileSize = None
1909 1912
1910 1913 self.firstHeaderSize = 0
1911 1914
1912 1915 self.basicHeaderSize = 24
1913 1916
1914 1917 self.pathList = []
1915 1918
1916 1919 self.lastUTTime = 0
1917 1920
1918 1921 self.maxTimeStep = 30
1919 1922
1920 1923 self.flagNoMoreFiles = 0
1921 1924
1922 1925 self.set = 0
1923 1926
1924 1927 self.path = None
1925 1928
1926 1929 self.delay = 60 #seconds
1927 1930
1928 1931 self.nTries = 3 #quantity tries
1929 1932
1930 1933 self.nFiles = 3 #number of files for searching
1931 1934
1932 1935 self.nReadBlocks = 0
1933 1936
1934 1937 self.flagIsNewFile = 1
1935 1938
1936 1939 self.__isFirstTimeOnline = 1
1937 1940
1938 1941 self.ippSeconds = 0
1939 1942
1940 1943 self.flagTimeBlock = 0
1941 1944
1942 1945 self.flagIsNewBlock = 0
1943 1946
1944 1947 self.nTotalBlocks = 0
1945 1948
1946 1949 self.blocksize = 0
1947 1950
1948 1951 self.dataOut = self.createObjByDefault()
1949 1952
1950 1953
1951 1954 def createObjByDefault(self):
1952 1955
1953 1956 dataObj = Spectra()
1954 1957
1955 1958 return dataObj
1956 1959
1957 1960 def __hasNotDataInBuffer(self):
1958 1961 return 1
1959 1962
1960 1963
1961 1964 def getBlockDimension(self):
1962 1965 """
1963 1966 Obtiene la cantidad de puntos a leer por cada bloque de datos
1964 1967
1965 1968 Affected:
1966 1969 self.nRdChannels
1967 1970 self.nRdPairs
1968 1971 self.pts2read_SelfSpectra
1969 1972 self.pts2read_CrossSpectra
1970 1973 self.pts2read_DCchannels
1971 1974 self.blocksize
1972 1975 self.dataOut.nChannels
1973 1976 self.dataOut.nPairs
1974 1977
1975 1978 Return:
1976 1979 None
1977 1980 """
1978 1981 self.nRdChannels = 0
1979 1982 self.nRdPairs = 0
1980 1983 self.rdPairList = []
1981 1984
1982 1985 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
1983 1986 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
1984 1987 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
1985 1988 else:
1986 1989 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
1987 1990 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
1988 1991
1989 1992 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
1990 1993
1991 1994 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
1992 1995 self.blocksize = self.pts2read_SelfSpectra
1993 1996
1994 1997 if self.processingHeaderObj.flag_cspc:
1995 1998 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
1996 1999 self.blocksize += self.pts2read_CrossSpectra
1997 2000
1998 2001 if self.processingHeaderObj.flag_dc:
1999 2002 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
2000 2003 self.blocksize += self.pts2read_DCchannels
2001 2004
2002 2005 # self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels
2003 2006
2004 2007
2005 2008 def readBlock(self):
2006 2009 """
2007 2010 Lee el bloque de datos desde la posicion actual del puntero del archivo
2008 2011 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
2009 2012 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
2010 2013 es seteado a 0
2011 2014
2012 2015 Return: None
2013 2016
2014 2017 Variables afectadas:
2015 2018
2016 2019 self.flagIsNewFile
2017 2020 self.flagIsNewBlock
2018 2021 self.nTotalBlocks
2019 2022 self.data_spc
2020 2023 self.data_cspc
2021 2024 self.data_dc
2022 2025
2023 2026 Exceptions:
2024 2027 Si un bloque leido no es un bloque valido
2025 2028 """
2026 2029 blockOk_flag = False
2027 2030 fpointer = self.fp.tell()
2028 2031
2029 2032 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
2030 2033 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
2031 2034
2032 2035 if self.processingHeaderObj.flag_cspc:
2033 2036 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
2034 2037 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
2035 2038
2036 2039 if self.processingHeaderObj.flag_dc:
2037 2040 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
2038 2041 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
2039 2042
2040 2043
2041 2044 if not(self.processingHeaderObj.shif_fft):
2042 2045 #desplaza a la derecha en el eje 2 determinadas posiciones
2043 2046 shift = int(self.processingHeaderObj.profilesPerBlock/2)
2044 2047 spc = numpy.roll( spc, shift , axis=2 )
2045 2048
2046 2049 if self.processingHeaderObj.flag_cspc:
2047 2050 #desplaza a la derecha en el eje 2 determinadas posiciones
2048 2051 cspc = numpy.roll( cspc, shift, axis=2 )
2049 2052
2050 2053 # self.processingHeaderObj.shif_fft = True
2051 2054
2052 2055 spc = numpy.transpose( spc, (0,2,1) )
2053 2056 self.data_spc = spc
2054 2057
2055 2058 if self.processingHeaderObj.flag_cspc:
2056 2059 cspc = numpy.transpose( cspc, (0,2,1) )
2057 2060 self.data_cspc = cspc['real'] + cspc['imag']*1j
2058 2061 else:
2059 2062 self.data_cspc = None
2060 2063
2061 2064 if self.processingHeaderObj.flag_dc:
2062 2065 self.data_dc = dc['real'] + dc['imag']*1j
2063 2066 else:
2064 2067 self.data_dc = None
2065 2068
2066 2069 self.flagIsNewFile = 0
2067 2070 self.flagIsNewBlock = 1
2068 2071
2069 2072 self.nTotalBlocks += 1
2070 2073 self.nReadBlocks += 1
2071 2074
2072 2075 return 1
2073 2076
2074 2077
2075 2078 def getData(self):
2076 2079 """
2077 2080 Copia el buffer de lectura a la clase "Spectra",
2078 2081 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
2079 2082 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
2080 2083
2081 2084 Return:
2082 2085 0 : Si no hay mas archivos disponibles
2083 2086 1 : Si hizo una buena copia del buffer
2084 2087
2085 2088 Affected:
2086 2089 self.dataOut
2087 2090
2088 2091 self.flagTimeBlock
2089 2092 self.flagIsNewBlock
2090 2093 """
2091 2094
2092 2095 if self.flagNoMoreFiles:
2093 2096 self.dataOut.flagNoData = True
2094 2097 print 'Process finished'
2095 2098 return 0
2096 2099
2097 2100 self.flagTimeBlock = 0
2098 2101 self.flagIsNewBlock = 0
2099 2102
2100 2103 if self.__hasNotDataInBuffer():
2101 2104
2102 2105 if not( self.readNextBlock() ):
2103 2106 self.dataOut.flagNoData = True
2104 2107 return 0
2105 2108
2106 2109 # self.updateDataHeader()
2107 2110
2108 2111 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
2109 2112
2110 2113 if self.data_dc == None:
2111 2114 self.dataOut.flagNoData = True
2112 2115 return 0
2113 2116
2114 2117 self.dataOut.data_spc = self.data_spc
2115 2118
2116 2119 self.dataOut.data_cspc = self.data_cspc
2117 2120
2118 2121 self.dataOut.data_dc = self.data_dc
2119 2122
2120 2123 self.dataOut.flagTimeBlock = self.flagTimeBlock
2121 2124
2122 2125 self.dataOut.flagNoData = False
2123 2126
2124 2127 self.dataOut.dtype = self.dtype
2125 2128
2126 2129 # self.dataOut.nChannels = self.nRdChannels
2127 2130
2128 2131 self.dataOut.nPairs = self.nRdPairs
2129 2132
2130 2133 self.dataOut.pairsList = self.rdPairList
2131 2134
2132 2135 # self.dataOut.nHeights = self.processingHeaderObj.nHeights
2133 2136
2134 2137 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
2135 2138
2136 2139 self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
2137 2140
2138 2141 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
2139 2142
2140 2143 self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
2141 2144
2142 2145 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
2143 2146
2144 2147 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
2145 2148
2146 2149 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
2147 2150
2148 2151 # self.dataOut.channelIndexList = range(self.systemHeaderObj.nChannels)
2149 2152
2150 2153 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000.#+ self.profileIndex * self.ippSeconds
2151 2154
2152 2155 self.dataOut.ippSeconds = self.ippSeconds
2153 2156
2154 2157 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt * self.processingHeaderObj.nIncohInt * self.dataOut.nFFTPoints
2155 2158
2156 2159 # self.profileIndex += 1
2157 2160
2158 2161 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
2159 2162
2160 2163 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
2161 2164
2162 2165 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
2163 2166
2164 2167 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
2165 2168
2166 2169 self.dataOut.flagDeflipData = True #asumo q la data no esta sin flip
2167 2170
2168 2171 if self.processingHeaderObj.code != None:
2169 2172
2170 2173 self.dataOut.nCode = self.processingHeaderObj.nCode
2171 2174
2172 2175 self.dataOut.nBaud = self.processingHeaderObj.nBaud
2173 2176
2174 2177 self.dataOut.code = self.processingHeaderObj.code
2175 2178
2176 2179 self.dataOut.flagDecodeData = True
2177 2180
2178 2181 return self.dataOut.data_spc
2179 2182
2180 2183
2181 2184 class SpectraWriter(JRODataWriter):
2182 2185
2183 2186 """
2184 2187 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
2185 2188 de los datos siempre se realiza por bloques.
2186 2189 """
2187 2190
2188 2191 ext = ".pdata"
2189 2192
2190 2193 optchar = "P"
2191 2194
2192 2195 shape_spc_Buffer = None
2193 2196
2194 2197 shape_cspc_Buffer = None
2195 2198
2196 2199 shape_dc_Buffer = None
2197 2200
2198 2201 data_spc = None
2199 2202
2200 2203 data_cspc = None
2201 2204
2202 2205 data_dc = None
2203 2206
2204 2207 # dataOut = None
2205 2208
2206 2209 def __init__(self):
2207 2210 """
2208 2211 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
2209 2212
2210 2213 Affected:
2211 2214 self.dataOut
2212 2215 self.basicHeaderObj
2213 2216 self.systemHeaderObj
2214 2217 self.radarControllerHeaderObj
2215 2218 self.processingHeaderObj
2216 2219
2217 2220 Return: None
2218 2221 """
2219 2222
2220 2223 self.isConfig = False
2221 2224
2222 2225 self.nTotalBlocks = 0
2223 2226
2224 2227 self.data_spc = None
2225 2228
2226 2229 self.data_cspc = None
2227 2230
2228 2231 self.data_dc = None
2229 2232
2230 2233 self.fp = None
2231 2234
2232 2235 self.flagIsNewFile = 1
2233 2236
2234 2237 self.nTotalBlocks = 0
2235 2238
2236 2239 self.flagIsNewBlock = 0
2237 2240
2238 2241 self.setFile = None
2239 2242
2240 2243 self.dtype = None
2241 2244
2242 2245 self.path = None
2243 2246
2244 2247 self.noMoreFiles = 0
2245 2248
2246 2249 self.filename = None
2247 2250
2248 2251 self.basicHeaderObj = BasicHeader(LOCALTIME)
2249 2252
2250 2253 self.systemHeaderObj = SystemHeader()
2251 2254
2252 2255 self.radarControllerHeaderObj = RadarControllerHeader()
2253 2256
2254 2257 self.processingHeaderObj = ProcessingHeader()
2255 2258
2256 2259
2257 2260 def hasAllDataInBuffer(self):
2258 2261 return 1
2259 2262
2260 2263
2261 2264 def setBlockDimension(self):
2262 2265 """
2263 2266 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
2264 2267
2265 2268 Affected:
2266 2269 self.shape_spc_Buffer
2267 2270 self.shape_cspc_Buffer
2268 2271 self.shape_dc_Buffer
2269 2272
2270 2273 Return: None
2271 2274 """
2272 2275 self.shape_spc_Buffer = (self.dataOut.nChannels,
2273 2276 self.processingHeaderObj.nHeights,
2274 2277 self.processingHeaderObj.profilesPerBlock)
2275 2278
2276 2279 self.shape_cspc_Buffer = (self.dataOut.nPairs,
2277 2280 self.processingHeaderObj.nHeights,
2278 2281 self.processingHeaderObj.profilesPerBlock)
2279 2282
2280 2283 self.shape_dc_Buffer = (self.dataOut.nChannels,
2281 2284 self.processingHeaderObj.nHeights)
2282 2285
2283 2286
2284 2287 def writeBlock(self):
2285 2288 """
2286 2289 Escribe el buffer en el file designado
2287 2290
2288 2291 Affected:
2289 2292 self.data_spc
2290 2293 self.data_cspc
2291 2294 self.data_dc
2292 2295 self.flagIsNewFile
2293 2296 self.flagIsNewBlock
2294 2297 self.nTotalBlocks
2295 2298 self.nWriteBlocks
2296 2299
2297 2300 Return: None
2298 2301 """
2299 2302
2300 2303 spc = numpy.transpose( self.data_spc, (0,2,1) )
2301 2304 if not( self.processingHeaderObj.shif_fft ):
2302 2305 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2303 2306 data = spc.reshape((-1))
2304 2307 data = data.astype(self.dtype[0])
2305 2308 data.tofile(self.fp)
2306 2309
2307 2310 if self.data_cspc != None:
2308 2311 data = numpy.zeros( self.shape_cspc_Buffer, self.dtype )
2309 2312 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
2310 2313 if not( self.processingHeaderObj.shif_fft ):
2311 2314 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2312 2315 data['real'] = cspc.real
2313 2316 data['imag'] = cspc.imag
2314 2317 data = data.reshape((-1))
2315 2318 data.tofile(self.fp)
2316 2319
2317 2320 if self.data_dc != None:
2318 2321 data = numpy.zeros( self.shape_dc_Buffer, self.dtype )
2319 2322 dc = self.data_dc
2320 2323 data['real'] = dc.real
2321 2324 data['imag'] = dc.imag
2322 2325 data = data.reshape((-1))
2323 2326 data.tofile(self.fp)
2324 2327
2325 2328 self.data_spc.fill(0)
2326 2329 self.data_dc.fill(0)
2327 2330 if self.data_cspc != None:
2328 2331 self.data_cspc.fill(0)
2329 2332
2330 2333 self.flagIsNewFile = 0
2331 2334 self.flagIsNewBlock = 1
2332 2335 self.nTotalBlocks += 1
2333 2336 self.nWriteBlocks += 1
2334 2337 self.blockIndex += 1
2335 2338
2336 2339
2337 2340 def putData(self):
2338 2341 """
2339 2342 Setea un bloque de datos y luego los escribe en un file
2340 2343
2341 2344 Affected:
2342 2345 self.data_spc
2343 2346 self.data_cspc
2344 2347 self.data_dc
2345 2348
2346 2349 Return:
2347 2350 0 : Si no hay data o no hay mas files que puedan escribirse
2348 2351 1 : Si se escribio la data de un bloque en un file
2349 2352 """
2350 2353
2351 2354 if self.dataOut.flagNoData:
2352 2355 return 0
2353 2356
2354 2357 self.flagIsNewBlock = 0
2355 2358
2356 2359 if self.dataOut.flagTimeBlock:
2357 2360 self.data_spc.fill(0)
2358 2361 self.data_cspc.fill(0)
2359 2362 self.data_dc.fill(0)
2360 2363 self.setNextFile()
2361 2364
2362 2365 if self.flagIsNewFile == 0:
2363 2366 self.getBasicHeader()
2364 2367
2365 2368 self.data_spc = self.dataOut.data_spc.copy()
2366 2369 self.data_cspc = self.dataOut.data_cspc.copy()
2367 2370 self.data_dc = self.dataOut.data_dc.copy()
2368 2371
2369 2372 # #self.processingHeaderObj.dataBlocksPerFile)
2370 2373 if self.hasAllDataInBuffer():
2371 2374 # self.getDataHeader()
2372 2375 self.writeNextBlock()
2373 2376
2374 2377 return 1
2375 2378
2376 2379
2377 2380 def __getProcessFlags(self):
2378 2381
2379 2382 processFlags = 0
2380 2383
2381 2384 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2382 2385 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2383 2386 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2384 2387 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2385 2388 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2386 2389 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2387 2390
2388 2391 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2389 2392
2390 2393
2391 2394
2392 2395 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
2393 2396 PROCFLAG.DATATYPE_SHORT,
2394 2397 PROCFLAG.DATATYPE_LONG,
2395 2398 PROCFLAG.DATATYPE_INT64,
2396 2399 PROCFLAG.DATATYPE_FLOAT,
2397 2400 PROCFLAG.DATATYPE_DOUBLE]
2398 2401
2399 2402
2400 2403 for index in range(len(dtypeList)):
2401 2404 if self.dataOut.dtype == dtypeList[index]:
2402 2405 dtypeValue = datatypeValueList[index]
2403 2406 break
2404 2407
2405 2408 processFlags += dtypeValue
2406 2409
2407 2410 if self.dataOut.flagDecodeData:
2408 2411 processFlags += PROCFLAG.DECODE_DATA
2409 2412
2410 2413 if self.dataOut.flagDeflipData:
2411 2414 processFlags += PROCFLAG.DEFLIP_DATA
2412 2415
2413 2416 if self.dataOut.code != None:
2414 2417 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
2415 2418
2416 2419 if self.dataOut.nIncohInt > 1:
2417 2420 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
2418 2421
2419 2422 if self.dataOut.data_dc != None:
2420 2423 processFlags += PROCFLAG.SAVE_CHANNELS_DC
2421 2424
2422 2425 return processFlags
2423 2426
2424 2427
2425 2428 def __getBlockSize(self):
2426 2429 '''
2427 2430 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
2428 2431 '''
2429 2432
2430 2433 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2431 2434 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2432 2435 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2433 2436 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2434 2437 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2435 2438 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2436 2439
2437 2440 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2438 2441 datatypeValueList = [1,2,4,8,4,8]
2439 2442 for index in range(len(dtypeList)):
2440 2443 if self.dataOut.dtype == dtypeList[index]:
2441 2444 datatypeValue = datatypeValueList[index]
2442 2445 break
2443 2446
2444 2447
2445 2448 pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints
2446 2449
2447 2450 pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write)
2448 2451 blocksize = (pts2write_SelfSpectra*datatypeValue)
2449 2452
2450 2453 if self.dataOut.data_cspc != None:
2451 2454 pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write)
2452 2455 blocksize += (pts2write_CrossSpectra*datatypeValue*2)
2453 2456
2454 2457 if self.dataOut.data_dc != None:
2455 2458 pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights)
2456 2459 blocksize += (pts2write_DCchannels*datatypeValue*2)
2457 2460
2458 2461 blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
2459 2462
2460 2463 return blocksize
2461 2464
2462 2465 def getDataHeader(self):
2463 2466
2464 2467 """
2465 2468 Obtiene una copia del First Header
2466 2469
2467 2470 Affected:
2468 2471 self.systemHeaderObj
2469 2472 self.radarControllerHeaderObj
2470 2473 self.dtype
2471 2474
2472 2475 Return:
2473 2476 None
2474 2477 """
2475 2478
2476 2479 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
2477 2480 self.systemHeaderObj.nChannels = self.dataOut.nChannels
2478 2481 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
2479 2482
2480 2483 self.getBasicHeader()
2481 2484
2482 2485 processingHeaderSize = 40 # bytes
2483 2486 self.processingHeaderObj.dtype = 0 # Voltage
2484 2487 self.processingHeaderObj.blockSize = self.__getBlockSize()
2485 2488 self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints
2486 2489 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
2487 2490 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
2488 2491 self.processingHeaderObj.processFlags = self.__getProcessFlags()
2489 2492 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval
2490 2493 self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt
2491 2494 self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
2492 2495
2493 2496 if self.processingHeaderObj.totalSpectra > 0:
2494 2497 channelList = []
2495 2498 for channel in range(self.dataOut.nChannels):
2496 2499 channelList.append(channel)
2497 2500 channelList.append(channel)
2498 2501
2499 2502 pairsList = []
2500 2503 for pair in self.dataOut.pairsList:
2501 2504 pairsList.append(pair[0])
2502 2505 pairsList.append(pair[1])
2503 2506 spectraComb = channelList + pairsList
2504 2507 spectraComb = numpy.array(spectraComb,dtype="u1")
2505 2508 self.processingHeaderObj.spectraComb = spectraComb
2506 2509 sizeOfSpcComb = len(spectraComb)
2507 2510 processingHeaderSize += sizeOfSpcComb
2508 2511
2509 2512 if self.dataOut.code != None:
2510 2513 self.processingHeaderObj.code = self.dataOut.code
2511 2514 self.processingHeaderObj.nCode = self.dataOut.nCode
2512 2515 self.processingHeaderObj.nBaud = self.dataOut.nBaud
2513 2516 nCodeSize = 4 # bytes
2514 2517 nBaudSize = 4 # bytes
2515 2518 codeSize = 4 # bytes
2516 2519 sizeOfCode = int(nCodeSize + nBaudSize + codeSize * self.dataOut.nCode * self.dataOut.nBaud)
2517 2520 processingHeaderSize += sizeOfCode
2518 2521
2519 2522 if self.processingHeaderObj.nWindows != 0:
2520 2523 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
2521 2524 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
2522 2525 self.processingHeaderObj.nHeights = self.dataOut.nHeights
2523 2526 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
2524 2527 sizeOfFirstHeight = 4
2525 2528 sizeOfdeltaHeight = 4
2526 2529 sizeOfnHeights = 4
2527 2530 sizeOfWindows = (sizeOfFirstHeight + sizeOfdeltaHeight + sizeOfnHeights)*self.processingHeaderObj.nWindows
2528 2531 processingHeaderSize += sizeOfWindows
2529 2532
2530 2533 self.processingHeaderObj.size = processingHeaderSize
2531 2534
2532 2535 class SpectraHeisWriter():
2533 2536
2534 2537 i=0
2535 2538
2536 2539 def __init__(self, dataOut):
2537 2540
2538 2541 self.wrObj = FITS()
2539 2542 self.dataOut = dataOut
2540 2543
2541 2544 def isNumber(str):
2542 2545 """
2543 2546 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
2544 2547
2545 2548 Excepciones:
2546 2549 Si un determinado string no puede ser convertido a numero
2547 2550 Input:
2548 2551 str, string al cual se le analiza para determinar si convertible a un numero o no
2549 2552
2550 2553 Return:
2551 2554 True : si el string es uno numerico
2552 2555 False : no es un string numerico
2553 2556 """
2554 2557 try:
2555 2558 float( str )
2556 2559 return True
2557 2560 except:
2558 2561 return False
2559 2562
2560 2563 def setup(self, wrpath,):
2561 2564
2562 2565 if not(os.path.exists(wrpath)):
2563 2566 os.mkdir(wrpath)
2564 2567
2565 2568 self.wrpath = wrpath
2566 2569 self.setFile = 0
2567 2570
2568 2571 def putData(self):
2569 2572 # self.wrObj.writeHeader(nChannels=self.dataOut.nChannels, nFFTPoints=self.dataOut.nFFTPoints)
2570 2573 #name = self.dataOut.utctime
2571 2574 name= time.localtime( self.dataOut.utctime)
2572 2575 ext=".fits"
2573 2576 #folder='D%4.4d%3.3d'%(name.tm_year,name.tm_yday)
2574 2577 subfolder = 'D%4.4d%3.3d' % (name.tm_year,name.tm_yday)
2575 2578
2576 2579 fullpath = os.path.join( self.wrpath, subfolder )
2577 2580 if not( os.path.exists(fullpath) ):
2578 2581 os.mkdir(fullpath)
2579 2582 self.setFile += 1
2580 2583 file = 'D%4.4d%3.3d%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext)
2581 2584
2582 2585 filename = os.path.join(self.wrpath,subfolder, file)
2583 2586
2584 2587 # print self.dataOut.ippSeconds
2585 2588 freq=numpy.arange(-1*self.dataOut.nHeights/2.,self.dataOut.nHeights/2.)/(2*self.dataOut.ippSeconds)
2586 2589
2587 2590 col1=self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints)+'E', array=freq)
2588 2591 col2=self.wrObj.writeData(name="P_Ch1",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[0,:]))
2589 2592 col3=self.wrObj.writeData(name="P_Ch2",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[1,:]))
2590 2593 col4=self.wrObj.writeData(name="P_Ch3",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[2,:]))
2591 2594 col5=self.wrObj.writeData(name="P_Ch4",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[3,:]))
2592 2595 col6=self.wrObj.writeData(name="P_Ch5",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[4,:]))
2593 2596 col7=self.wrObj.writeData(name="P_Ch6",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[5,:]))
2594 2597 col8=self.wrObj.writeData(name="P_Ch7",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[6,:]))
2595 2598 col9=self.wrObj.writeData(name="P_Ch8",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[7,:]))
2596 2599 #n=numpy.arange((100))
2597 2600 n=self.dataOut.data_spc[6,:]
2598 2601 a=self.wrObj.cFImage(n)
2599 2602 b=self.wrObj.Ctable(col1,col2,col3,col4,col5,col6,col7,col8,col9)
2600 2603 self.wrObj.CFile(a,b)
2601 2604 self.wrObj.wFile(filename)
2602 2605 return 1
2603 2606
2604 2607 class FITS:
2605 2608
2606 2609 name=None
2607 2610 format=None
2608 2611 array =None
2609 2612 data =None
2610 2613 thdulist=None
2611 2614
2612 2615 def __init__(self):
2613 2616
2614 2617 pass
2615 2618
2616 2619 def setColF(self,name,format,array):
2617 2620 self.name=name
2618 2621 self.format=format
2619 2622 self.array=array
2620 2623 a1=numpy.array([self.array],dtype=numpy.float32)
2621 2624 self.col1 = pyfits.Column(name=self.name, format=self.format, array=a1)
2622 2625 return self.col1
2623 2626
2624 2627 # def setColP(self,name,format,data):
2625 2628 # self.name=name
2626 2629 # self.format=format
2627 2630 # self.data=data
2628 2631 # a2=numpy.array([self.data],dtype=numpy.float32)
2629 2632 # self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2630 2633 # return self.col2
2631 2634
2632 2635 def writeHeader(self,):
2633 2636 pass
2634 2637
2635 2638 def writeData(self,name,format,data):
2636 2639 self.name=name
2637 2640 self.format=format
2638 2641 self.data=data
2639 2642 a2=numpy.array([self.data],dtype=numpy.float32)
2640 2643 self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2641 2644 return self.col2
2642 2645
2643 2646 def cFImage(self,n):
2644 2647 self.hdu= pyfits.PrimaryHDU(n)
2645 2648 return self.hdu
2646 2649
2647 2650 def Ctable(self,col1,col2,col3,col4,col5,col6,col7,col8,col9):
2648 2651 self.cols=pyfits.ColDefs( [col1,col2,col3,col4,col5,col6,col7,col8,col9])
2649 2652 self.tbhdu = pyfits.new_table(self.cols)
2650 2653 return self.tbhdu
2651 2654
2652 2655 def CFile(self,hdu,tbhdu):
2653 2656 self.thdulist=pyfits.HDUList([hdu,tbhdu])
2654 2657
2655 2658 def wFile(self,filename):
2656 2659 self.thdulist.writeto(filename) No newline at end of file
@@ -1,528 +1,531
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JROHeaderIO.py 151 2012-10-31 19:00:51Z murco $
5 5 '''
6 6 import sys
7 7 import numpy
8 8 import copy
9 9 import datetime
10 10
11 11 class Header:
12 12
13 13 def __init__(self):
14 14 raise
15 15
16 16 def copy(self):
17 17 return copy.deepcopy(self)
18 18
19 19 def read():
20 20 pass
21 21
22 22 def write():
23 23 pass
24 24
25 25 def printInfo(self):
26 26
27 print "#"*100
28 print self.__class__.__name__.upper()
29 print "#"*100
27 30 for key in self.__dict__.keys():
28 31 print "%s = %s" %(key, self.__dict__[key])
29 32
30 33 class BasicHeader(Header):
31 34
32 35 size = None
33 36 version = None
34 37 dataBlock = None
35 38 utc = None
36 39 miliSecond = None
37 40 timeZone = None
38 41 dstFlag = None
39 42 errorCount = None
40 43 struct = None
41 44 datatime = None
42 45
43 46 __LOCALTIME = None
44 47
45 48 def __init__(self, localtime=0):
46 49
47 50 self.size = 0
48 51 self.version = 0
49 52 self.dataBlock = 0
50 53 self.utc = 0
51 54 self.miliSecond = 0
52 55 self.timeZone = 0
53 56 self.dstFlag = 0
54 57 self.errorCount = 0
55 58 self.struct = numpy.dtype([
56 59 ('nSize','<u4'),
57 60 ('nVersion','<u2'),
58 61 ('nDataBlockId','<u4'),
59 62 ('nUtime','<u4'),
60 63 ('nMilsec','<u2'),
61 64 ('nTimezone','<i2'),
62 65 ('nDstflag','<i2'),
63 66 ('nErrorCount','<u4')
64 67 ])
65 68
66 69 self.__LOCALTIME = localtime
67 70
68 71 def read(self, fp):
69 72 try:
70 73 header = numpy.fromfile(fp, self.struct,1)
71 74 self.size = int(header['nSize'][0])
72 75 self.version = int(header['nVersion'][0])
73 76 self.dataBlock = int(header['nDataBlockId'][0])
74 77 self.utc = int(header['nUtime'][0])
75 78 self.miliSecond = int(header['nMilsec'][0])
76 79 self.timeZone = int(header['nTimezone'][0])
77 80 self.dstFlag = int(header['nDstflag'][0])
78 81 self.errorCount = int(header['nErrorCount'][0])
79 82
80 83 self.utc += self.__LOCALTIME
81 84
82 85 self.datatime = datetime.datetime.utcfromtimestamp(self.utc)
83 86
84 87 except Exception, e:
85 88 print "BasicHeader: "
86 89 print e
87 90 return 0
88 91
89 92 return 1
90 93
91 94 def write(self, fp):
92 95 self.utc -= self.__LOCALTIME
93 96 headerTuple = (self.size,self.version,self.dataBlock,self.utc,self.miliSecond,self.timeZone,self.dstFlag,self.errorCount)
94 97 header = numpy.array(headerTuple,self.struct)
95 98 header.tofile(fp)
96 99
97 100 return 1
98 101
99 102 class SystemHeader(Header):
100 103
101 104 size = None
102 105 nSamples = None
103 106 nProfiles = None
104 107 nChannels = None
105 108 adcResolution = None
106 109 pciDioBusWidth = None
107 110 struct = None
108 111
109 112 def __init__(self):
110 113 self.size = 0
111 114 self.nSamples = 0
112 115 self.nProfiles = 0
113 116 self.nChannels = 0
114 117 self.adcResolution = 0
115 118 self.pciDioBusWidth = 0
116 119 self.struct = numpy.dtype([
117 120 ('nSize','<u4'),
118 121 ('nNumSamples','<u4'),
119 122 ('nNumProfiles','<u4'),
120 123 ('nNumChannels','<u4'),
121 124 ('nADCResolution','<u4'),
122 125 ('nPCDIOBusWidth','<u4'),
123 126 ])
124 127
125 128
126 129 def read(self, fp):
127 130 try:
128 131 header = numpy.fromfile(fp,self.struct,1)
129 132 self.size = header['nSize'][0]
130 133 self.nSamples = header['nNumSamples'][0]
131 134 self.nProfiles = header['nNumProfiles'][0]
132 135 self.nChannels = header['nNumChannels'][0]
133 136 self.adcResolution = header['nADCResolution'][0]
134 137 self.pciDioBusWidth = header['nPCDIOBusWidth'][0]
135 138
136 139 except Exception, e:
137 140 print "SystemHeader: " + e
138 141 return 0
139 142
140 143 return 1
141 144
142 145 def write(self, fp):
143 146 headerTuple = (self.size,self.nSamples,self.nProfiles,self.nChannels,self.adcResolution,self.pciDioBusWidth)
144 147 header = numpy.array(headerTuple,self.struct)
145 148 header.tofile(fp)
146 149
147 150 return 1
148 151
149 152 class RadarControllerHeader(Header):
150 153
151 154 size = None
152 155 expType = None
153 156 nTx = None
154 157 ipp = None
155 158 txA = None
156 159 txB = None
157 160 nWindows = None
158 161 numTaus = None
159 162 codeType = None
160 163 line6Function = None
161 164 line5Function = None
162 165 fClock = None
163 166 prePulseBefore = None
164 167 prePulserAfter = None
165 168 rangeIpp = None
166 169 rangeTxA = None
167 170 rangeTxB = None
168 171 struct = None
169 172
170 173 def __init__(self):
171 174 self.size = 0
172 175 self.expType = 0
173 176 self.nTx = 0
174 177 self.ipp = 0
175 178 self.txA = 0
176 179 self.txB = 0
177 180 self.nWindows = 0
178 181 self.numTaus = 0
179 182 self.codeType = 0
180 183 self.line6Function = 0
181 184 self.line5Function = 0
182 185 self.fClock = 0
183 186 self.prePulseBefore = 0
184 187 self.prePulserAfter = 0
185 188 self.rangeIpp = 0
186 189 self.rangeTxA = 0
187 190 self.rangeTxB = 0
188 191 self.struct = numpy.dtype([
189 192 ('nSize','<u4'),
190 193 ('nExpType','<u4'),
191 194 ('nNTx','<u4'),
192 195 ('fIpp','<f4'),
193 196 ('fTxA','<f4'),
194 197 ('fTxB','<f4'),
195 198 ('nNumWindows','<u4'),
196 199 ('nNumTaus','<u4'),
197 200 ('nCodeType','<u4'),
198 201 ('nLine6Function','<u4'),
199 202 ('nLine5Function','<u4'),
200 203 ('fClock','<f4'),
201 204 ('nPrePulseBefore','<u4'),
202 205 ('nPrePulseAfter','<u4'),
203 206 ('sRangeIPP','<a20'),
204 207 ('sRangeTxA','<a20'),
205 208 ('sRangeTxB','<a20'),
206 209 ])
207 210
208 211 self.samplingWindowStruct = numpy.dtype([('h0','<f4'),('dh','<f4'),('nsa','<u4')])
209 212
210 213 self.samplingWindow = None
211 214 self.nHeights = None
212 215 self.firstHeight = None
213 216 self.deltaHeight = None
214 217 self.samplesWin = None
215 218
216 219 self.nCode = None
217 220 self.nBaud = None
218 221 self.code = None
219 222 self.flip1 = None
220 223 self.flip2 = None
221 224
222 225 self.dynamic = numpy.array([],numpy.dtype('byte'))
223 226
224 227
225 228 def read(self, fp):
226 229 try:
227 230 startFp = fp.tell()
228 231 header = numpy.fromfile(fp,self.struct,1)
229 232 self.size = int(header['nSize'][0])
230 233 self.expType = int(header['nExpType'][0])
231 234 self.nTx = int(header['nNTx'][0])
232 235 self.ipp = float(header['fIpp'][0])
233 236 self.txA = float(header['fTxA'][0])
234 237 self.txB = float(header['fTxB'][0])
235 238 self.nWindows = int(header['nNumWindows'][0])
236 239 self.numTaus = int(header['nNumTaus'][0])
237 240 self.codeType = int(header['nCodeType'][0])
238 241 self.line6Function = int(header['nLine6Function'][0])
239 242 self.line5Function = int(header['nLine5Function'][0])
240 243 self.fClock = float(header['fClock'][0])
241 244 self.prePulseBefore = int(header['nPrePulseBefore'][0])
242 245 self.prePulserAfter = int(header['nPrePulseAfter'][0])
243 246 self.rangeIpp = header['sRangeIPP'][0]
244 247 self.rangeTxA = header['sRangeTxA'][0]
245 248 self.rangeTxB = header['sRangeTxB'][0]
246 249 # jump Dynamic Radar Controller Header
247 250 jumpFp = self.size - 116
248 251 self.dynamic = numpy.fromfile(fp,numpy.dtype('byte'),jumpFp)
249 252 #pointer backward to dynamic header and read
250 253 backFp = fp.tell() - jumpFp
251 254 fp.seek(backFp)
252 255
253 256 self.samplingWindow = numpy.fromfile(fp,self.samplingWindowStruct,self.nWindows)
254 257 self.nHeights = int(numpy.sum(self.samplingWindow['nsa']))
255 258 self.firstHeight = self.samplingWindow['h0']
256 259 self.deltaHeight = self.samplingWindow['dh']
257 260 self.samplesWin = self.samplingWindow['nsa']
258 261
259 262 self.Taus = numpy.fromfile(fp,'<f4',self.numTaus)
260 263
261 264 if self.codeType != 0:
262 265 self.nCode = int(numpy.fromfile(fp,'<u4',1))
263 266 self.nBaud = int(numpy.fromfile(fp,'<u4',1))
264 267 self.code = numpy.empty([self.nCode,self.nBaud],dtype='u1')
265 268 tempList = []
266 269 for ic in range(self.nCode):
267 270 temp = numpy.fromfile(fp,'u1',4*int(numpy.ceil(self.nBaud/32.)))
268 271 tempList.append(temp)
269 272 self.code[ic] = numpy.unpackbits(temp[::-1])[-1*self.nBaud:]
270 273 self.code = 2.0*self.code - 1.0
271 274
272 275 if self.line5Function == RCfunction.FLIP:
273 276 self.flip1 = numpy.fromfile(fp,'<u4',1)
274 277
275 278 if self.line6Function == RCfunction.FLIP:
276 279 self.flip2 = numpy.fromfile(fp,'<u4',1)
277 280
278 281 endFp = self.size + startFp
279 282 jumpFp = endFp - fp.tell()
280 283 if jumpFp > 0:
281 284 fp.seek(jumpFp)
282 285
283 286 except Exception, e:
284 287 print "RadarControllerHeader: " + e
285 288 return 0
286 289
287 290 return 1
288 291
289 292 def write(self, fp):
290 293 headerTuple = (self.size,
291 294 self.expType,
292 295 self.nTx,
293 296 self.ipp,
294 297 self.txA,
295 298 self.txB,
296 299 self.nWindows,
297 300 self.numTaus,
298 301 self.codeType,
299 302 self.line6Function,
300 303 self.line5Function,
301 304 self.fClock,
302 305 self.prePulseBefore,
303 306 self.prePulserAfter,
304 307 self.rangeIpp,
305 308 self.rangeTxA,
306 309 self.rangeTxB)
307 310
308 311 header = numpy.array(headerTuple,self.struct)
309 312 header.tofile(fp)
310 313
311 314 dynamic = self.dynamic
312 315 dynamic.tofile(fp)
313 316
314 317 return 1
315 318
316 319
317 320
318 321 class ProcessingHeader(Header):
319 322
320 323 size = None
321 324 dtype = None
322 325 blockSize = None
323 326 profilesPerBlock = None
324 327 dataBlocksPerFile = None
325 328 nWindows = None
326 329 processFlags = None
327 330 nCohInt = None
328 331 nIncohInt = None
329 332 totalSpectra = None
330 333 struct = None
331 334 flag_dc = None
332 335 flag_cspc = None
333 336
334 337 def __init__(self):
335 338 self.size = 0
336 339 self.dtype = 0
337 340 self.blockSize = 0
338 341 self.profilesPerBlock = 0
339 342 self.dataBlocksPerFile = 0
340 343 self.nWindows = 0
341 344 self.processFlags = 0
342 345 self.nCohInt = 0
343 346 self.nIncohInt = 0
344 347 self.totalSpectra = 0
345 348 self.struct = numpy.dtype([
346 349 ('nSize','<u4'),
347 350 ('nDataType','<u4'),
348 351 ('nSizeOfDataBlock','<u4'),
349 352 ('nProfilesperBlock','<u4'),
350 353 ('nDataBlocksperFile','<u4'),
351 354 ('nNumWindows','<u4'),
352 355 ('nProcessFlags','<u4'),
353 356 ('nCoherentIntegrations','<u4'),
354 357 ('nIncoherentIntegrations','<u4'),
355 358 ('nTotalSpectra','<u4')
356 359 ])
357 360 self.samplingWindow = 0
358 361 self.structSamplingWindow = numpy.dtype([('h0','<f4'),('dh','<f4'),('nsa','<u4')])
359 362 self.nHeights = 0
360 363 self.firstHeight = 0
361 364 self.deltaHeight = 0
362 365 self.samplesWin = 0
363 366 self.spectraComb = 0
364 367 self.nCode = None
365 368 self.code = None
366 369 self.nBaud = None
367 370 self.shif_fft = False
368 371 self.flag_dc = False
369 372 self.flag_cspc = False
370 373
371 374 def read(self, fp):
372 375 try:
373 376 header = numpy.fromfile(fp,self.struct,1)
374 377 self.size = int(header['nSize'][0])
375 378 self.dtype = int(header['nDataType'][0])
376 379 self.blockSize = int(header['nSizeOfDataBlock'][0])
377 380 self.profilesPerBlock = int(header['nProfilesperBlock'][0])
378 381 self.dataBlocksPerFile = int(header['nDataBlocksperFile'][0])
379 382 self.nWindows = int(header['nNumWindows'][0])
380 383 self.processFlags = header['nProcessFlags']
381 384 self.nCohInt = int(header['nCoherentIntegrations'][0])
382 385 self.nIncohInt = int(header['nIncoherentIntegrations'][0])
383 386 self.totalSpectra = int(header['nTotalSpectra'][0])
384 387 self.samplingWindow = numpy.fromfile(fp,self.structSamplingWindow,self.nWindows)
385 388 self.nHeights = int(numpy.sum(self.samplingWindow['nsa']))
386 389 self.firstHeight = float(self.samplingWindow['h0'][0])
387 390 self.deltaHeight = float(self.samplingWindow['dh'][0])
388 391 self.samplesWin = self.samplingWindow['nsa']
389 392 self.spectraComb = numpy.fromfile(fp,'u1',2*self.totalSpectra)
390 393
391 394 if ((self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE) == PROCFLAG.DEFINE_PROCESS_CODE):
392 395 self.nCode = int(numpy.fromfile(fp,'<u4',1))
393 396 self.nBaud = int(numpy.fromfile(fp,'<u4',1))
394 397 self.code = numpy.fromfile(fp,'<f4',self.nCode*self.nBaud).reshape(self.nCode,self.nBaud)
395 398
396 399 if ((self.processFlags & PROCFLAG.SHIFT_FFT_DATA) == PROCFLAG.SHIFT_FFT_DATA):
397 400 self.shif_fft = True
398 401 else:
399 402 self.shif_fft = False
400 403
401 404 if ((self.processFlags & PROCFLAG.SAVE_CHANNELS_DC) == PROCFLAG.SAVE_CHANNELS_DC):
402 405 self.flag_dc = True
403 406
404 407 nChannels = 0
405 408 nPairs = 0
406 409 pairList = []
407 410
408 411 for i in range( 0, self.totalSpectra*2, 2 ):
409 412 if self.spectraComb[i] == self.spectraComb[i+1]:
410 413 nChannels = nChannels + 1 #par de canales iguales
411 414 else:
412 415 nPairs = nPairs + 1 #par de canales diferentes
413 416 pairList.append( (self.spectraComb[i], self.spectraComb[i+1]) )
414 417
415 418 self.flag_cspc = False
416 419 if nPairs > 0:
417 420 self.flag_cspc = True
418 421
419 422 except Exception, e:
420 423 print "ProcessingHeader: " + e
421 424 return 0
422 425
423 426 return 1
424 427
425 428 def write(self, fp):
426 429 headerTuple = (self.size,
427 430 self.dtype,
428 431 self.blockSize,
429 432 self.profilesPerBlock,
430 433 self.dataBlocksPerFile,
431 434 self.nWindows,
432 435 self.processFlags,
433 436 self.nCohInt,
434 437 self.nIncohInt,
435 438 self.totalSpectra)
436 439
437 440 header = numpy.array(headerTuple,self.struct)
438 441 header.tofile(fp)
439 442
440 443 if self.nWindows != 0:
441 444 sampleWindowTuple = (self.firstHeight,self.deltaHeight,self.samplesWin)
442 445 samplingWindow = numpy.array(sampleWindowTuple,self.structSamplingWindow)
443 446 samplingWindow.tofile(fp)
444 447
445 448
446 449 if self.totalSpectra != 0:
447 450 spectraComb = numpy.array([],numpy.dtype('u1'))
448 451 spectraComb = self.spectraComb
449 452 spectraComb.tofile(fp)
450 453
451 454
452 455 if self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE == PROCFLAG.DEFINE_PROCESS_CODE:
453 456 nCode = numpy.array([self.nCode], numpy.dtype('u4')) #Probar con un dato que almacene codigo, hasta el momento no se hizo la prueba
454 457 nCode.tofile(fp)
455 458
456 459 nBaud = numpy.array([self.nBaud], numpy.dtype('u4'))
457 460 nBaud.tofile(fp)
458 461
459 462 code = self.code.reshape(self.nCode*self.nBaud)
460 463 code = code.astype(numpy.dtype('<f4'))
461 464 code.tofile(fp)
462 465
463 466 return 1
464 467
465 468 class RCfunction:
466 469 NONE=0
467 470 FLIP=1
468 471 CODE=2
469 472 SAMPLING=3
470 473 LIN6DIV256=4
471 474 SYNCHRO=5
472 475
473 476 class nCodeType:
474 477 NONE=0
475 478 USERDEFINE=1
476 479 BARKER2=2
477 480 BARKER3=3
478 481 BARKER4=4
479 482 BARKER5=5
480 483 BARKER7=6
481 484 BARKER11=7
482 485 BARKER13=8
483 486 AC128=9
484 487 COMPLEMENTARYCODE2=10
485 488 COMPLEMENTARYCODE4=11
486 489 COMPLEMENTARYCODE8=12
487 490 COMPLEMENTARYCODE16=13
488 491 COMPLEMENTARYCODE32=14
489 492 COMPLEMENTARYCODE64=15
490 493 COMPLEMENTARYCODE128=16
491 494 CODE_BINARY28=17
492 495
493 496 class PROCFLAG:
494 497 COHERENT_INTEGRATION = numpy.uint32(0x00000001)
495 498 DECODE_DATA = numpy.uint32(0x00000002)
496 499 SPECTRA_CALC = numpy.uint32(0x00000004)
497 500 INCOHERENT_INTEGRATION = numpy.uint32(0x00000008)
498 501 POST_COHERENT_INTEGRATION = numpy.uint32(0x00000010)
499 502 SHIFT_FFT_DATA = numpy.uint32(0x00000020)
500 503
501 504 DATATYPE_CHAR = numpy.uint32(0x00000040)
502 505 DATATYPE_SHORT = numpy.uint32(0x00000080)
503 506 DATATYPE_LONG = numpy.uint32(0x00000100)
504 507 DATATYPE_INT64 = numpy.uint32(0x00000200)
505 508 DATATYPE_FLOAT = numpy.uint32(0x00000400)
506 509 DATATYPE_DOUBLE = numpy.uint32(0x00000800)
507 510
508 511 DATAARRANGE_CONTIGUOUS_CH = numpy.uint32(0x00001000)
509 512 DATAARRANGE_CONTIGUOUS_H = numpy.uint32(0x00002000)
510 513 DATAARRANGE_CONTIGUOUS_P = numpy.uint32(0x00004000)
511 514
512 515 SAVE_CHANNELS_DC = numpy.uint32(0x00008000)
513 516 DEFLIP_DATA = numpy.uint32(0x00010000)
514 517 DEFINE_PROCESS_CODE = numpy.uint32(0x00020000)
515 518
516 519 ACQ_SYS_NATALIA = numpy.uint32(0x00040000)
517 520 ACQ_SYS_ECHOTEK = numpy.uint32(0x00080000)
518 521 ACQ_SYS_ADRXD = numpy.uint32(0x000C0000)
519 522 ACQ_SYS_JULIA = numpy.uint32(0x00100000)
520 523 ACQ_SYS_XXXXXX = numpy.uint32(0x00140000)
521 524
522 525 EXP_NAME_ESP = numpy.uint32(0x00200000)
523 526 CHANNEL_NAMES_ESP = numpy.uint32(0x00400000)
524 527
525 528 OPERATION_MASK = numpy.uint32(0x0000003F)
526 529 DATATYPE_MASK = numpy.uint32(0x00000FC0)
527 530 DATAARRANGE_MASK = numpy.uint32(0x00007000)
528 531 ACQ_SYS_MASK = numpy.uint32(0x001C0000) No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now