##// END OF EJS Templates
Bux fixed: Se modifico el tipo de dato ainteger al leer el codigo en el RadarControllerHeader
Miguel Valdez -
r218:a84a1beaf0cb
parent child
Show More
@@ -1,2513 +1,2513
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JRODataIO.py 169 2012-11-19 21:57:03Z murco $
5 5 '''
6 6
7 7 import os, sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import time, datetime
13 13
14 14 from jrodata import *
15 15 from jroheaderIO import *
16 16 from jroprocessing import *
17 17
18 18 def isNumber(str):
19 19 """
20 20 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
21 21
22 22 Excepciones:
23 23 Si un determinado string no puede ser convertido a numero
24 24 Input:
25 25 str, string al cual se le analiza para determinar si convertible a un numero o no
26 26
27 27 Return:
28 28 True : si el string es uno numerico
29 29 False : no es un string numerico
30 30 """
31 31 try:
32 32 float( str )
33 33 return True
34 34 except:
35 35 return False
36 36
37 37 def isThisFileinRange(filename, startUTSeconds, endUTSeconds):
38 38 """
39 39 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
40 40
41 41 Inputs:
42 42 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
43 43
44 44 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
45 45 segundos contados desde 01/01/1970.
46 46 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
47 47 segundos contados desde 01/01/1970.
48 48
49 49 Return:
50 50 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
51 51 fecha especificado, de lo contrario retorna False.
52 52
53 53 Excepciones:
54 54 Si el archivo no existe o no puede ser abierto
55 55 Si la cabecera no puede ser leida.
56 56
57 57 """
58 58 basicHeaderObj = BasicHeader()
59 59
60 60 try:
61 61 fp = open(filename,'rb')
62 62 except:
63 63 raise IOError, "The file %s can't be opened" %(filename)
64 64
65 65 sts = basicHeaderObj.read(fp)
66 66 fp.close()
67 67
68 68 if not(sts):
69 69 print "Skipping the file %s because it has not a valid header" %(filename)
70 70 return 0
71 71
72 72 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
73 73 return 0
74 74
75 75 return 1
76 76
77 77 def getlastFileFromPath(path, ext):
78 78 """
79 79 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
80 80 al final de la depuracion devuelve el ultimo file de la lista que quedo.
81 81
82 82 Input:
83 83 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
84 84 ext : extension de los files contenidos en una carpeta
85 85
86 86 Return:
87 87 El ultimo file de una determinada carpeta, no se considera el path.
88 88 """
89 89 validFilelist = []
90 90 fileList = os.listdir(path)
91 91
92 92 # 0 1234 567 89A BCDE
93 93 # H YYYY DDD SSS .ext
94 94
95 95 for file in fileList:
96 96 try:
97 97 year = int(file[1:5])
98 98 doy = int(file[5:8])
99 99
100 100 if (os.path.splitext(file)[-1].upper() != ext.upper()) : continue
101 101 except:
102 102 continue
103 103
104 104 validFilelist.append(file)
105 105
106 106 if validFilelist:
107 107 validFilelist = sorted( validFilelist, key=str.lower )
108 108 return validFilelist[-1]
109 109
110 110 return None
111 111
112 112 def checkForRealPath(path, year, doy, set, ext):
113 113 """
114 114 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
115 115 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
116 116 el path exacto de un determinado file.
117 117
118 118 Example :
119 119 nombre correcto del file es .../.../D2009307/P2009307367.ext
120 120
121 121 Entonces la funcion prueba con las siguientes combinaciones
122 122 .../.../x2009307/y2009307367.ext
123 123 .../.../x2009307/Y2009307367.ext
124 124 .../.../X2009307/y2009307367.ext
125 125 .../.../X2009307/Y2009307367.ext
126 126 siendo para este caso, la ultima combinacion de letras, identica al file buscado
127 127
128 128 Return:
129 129 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
130 130 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
131 131 para el filename
132 132 """
133 133 filepath = None
134 134 find_flag = False
135 135 filename = None
136 136
137 137 if ext.lower() == ".r": #voltage
138 138 header1 = "dD"
139 139 header2 = "dD"
140 140 elif ext.lower() == ".pdata": #spectra
141 141 header1 = "dD"
142 142 header2 = "pP"
143 143 else:
144 144 return None, filename
145 145
146 146 for dir in header1: #barrido por las dos combinaciones posibles de "D"
147 147 for fil in header2: #barrido por las dos combinaciones posibles de "D"
148 148 doypath = "%s%04d%03d" % ( dir, year, doy ) #formo el nombre del directorio xYYYYDDD (x=d o x=D)
149 149 filename = "%s%04d%03d%03d%s" % ( fil, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
150 150 filepath = os.path.join( path, doypath, filename ) #formo el path completo
151 151 if os.path.exists( filepath ): #verifico que exista
152 152 find_flag = True
153 153 break
154 154 if find_flag:
155 155 break
156 156
157 157 if not(find_flag):
158 158 return None, filename
159 159
160 160 return filepath, filename
161 161
162 162 class JRODataIO:
163 163
164 164 c = 3E8
165 165
166 166 isConfig = False
167 167
168 168 basicHeaderObj = BasicHeader()
169 169
170 170 systemHeaderObj = SystemHeader()
171 171
172 172 radarControllerHeaderObj = RadarControllerHeader()
173 173
174 174 processingHeaderObj = ProcessingHeader()
175 175
176 176 online = 0
177 177
178 178 dtype = None
179 179
180 180 pathList = []
181 181
182 182 filenameList = []
183 183
184 184 filename = None
185 185
186 186 ext = None
187 187
188 188 flagIsNewFile = 1
189 189
190 190 flagTimeBlock = 0
191 191
192 192 flagIsNewBlock = 0
193 193
194 194 fp = None
195 195
196 196 firstHeaderSize = 0
197 197
198 198 basicHeaderSize = 24
199 199
200 200 versionFile = 1103
201 201
202 202 fileSize = None
203 203
204 204 ippSeconds = None
205 205
206 206 fileSizeByHeader = None
207 207
208 208 fileIndex = None
209 209
210 210 profileIndex = None
211 211
212 212 blockIndex = None
213 213
214 214 nTotalBlocks = None
215 215
216 216 maxTimeStep = 30
217 217
218 218 lastUTTime = None
219 219
220 220 datablock = None
221 221
222 222 dataOut = None
223 223
224 224 blocksize = None
225 225
226 226 def __init__(self):
227 227
228 228 raise ValueError, "Not implemented"
229 229
230 230 def run(self):
231 231
232 232 raise ValueError, "Not implemented"
233 233
234 234 def getOutput(self):
235 235
236 236 return self.dataOut
237 237
238 238 class JRODataReader(JRODataIO, ProcessingUnit):
239 239
240 240 nReadBlocks = 0
241 241
242 242 delay = 10 #number of seconds waiting a new file
243 243
244 244 nTries = 3 #quantity tries
245 245
246 246 nFiles = 3 #number of files for searching
247 247
248 248 flagNoMoreFiles = 0
249 249
250 250 def __init__(self):
251 251
252 252 """
253 253
254 254 """
255 255
256 256 raise ValueError, "This method has not been implemented"
257 257
258 258
259 259 def createObjByDefault(self):
260 260 """
261 261
262 262 """
263 263 raise ValueError, "This method has not been implemented"
264 264
265 265 def getBlockDimension(self):
266 266
267 267 raise ValueError, "No implemented"
268 268
269 269 def __searchFilesOffLine(self,
270 270 path,
271 271 startDate,
272 272 endDate,
273 273 startTime=datetime.time(0,0,0),
274 274 endTime=datetime.time(23,59,59),
275 275 set=None,
276 276 expLabel="",
277 277 ext=".r"):
278 278 dirList = []
279 279 for thisPath in os.listdir(path):
280 280 if os.path.isdir(os.path.join(path,thisPath)):
281 281 dirList.append(thisPath)
282 282
283 283 if not(dirList):
284 284 return None, None
285 285
286 286 pathList = []
287 287 dateList = []
288 288
289 289 thisDate = startDate
290 290
291 291 while(thisDate <= endDate):
292 292 year = thisDate.timetuple().tm_year
293 293 doy = thisDate.timetuple().tm_yday
294 294
295 295 match = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy))
296 296 if len(match) == 0:
297 297 thisDate += datetime.timedelta(1)
298 298 continue
299 299
300 300 pathList.append(os.path.join(path,match[0],expLabel))
301 301 dateList.append(thisDate)
302 302 thisDate += datetime.timedelta(1)
303 303
304 304 filenameList = []
305 305 for index in range(len(pathList)):
306 306
307 307 thisPath = pathList[index]
308 308 fileList = glob.glob1(thisPath, "*%s" %ext)
309 309 fileList.sort()
310 310
311 311 #Busqueda de datos en el rango de horas indicados
312 312 thisDate = dateList[index]
313 313 startDT = datetime.datetime.combine(thisDate, startTime)
314 314 endDT = datetime.datetime.combine(thisDate, endTime)
315 315
316 316 startUtSeconds = time.mktime(startDT.timetuple())
317 317 endUtSeconds = time.mktime(endDT.timetuple())
318 318
319 319 for file in fileList:
320 320
321 321 filename = os.path.join(thisPath,file)
322 322
323 323 if isThisFileinRange(filename, startUtSeconds, endUtSeconds):
324 324 filenameList.append(filename)
325 325
326 326 if not(filenameList):
327 327 return None, None
328 328
329 329 self.filenameList = filenameList
330 330
331 331 return pathList, filenameList
332 332
333 333 def __searchFilesOnLine(self, path, startDate=None, endDate=None, startTime=None, endTime=None, expLabel = "", ext = None):
334 334
335 335 """
336 336 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
337 337 devuelve el archivo encontrado ademas de otros datos.
338 338
339 339 Input:
340 340 path : carpeta donde estan contenidos los files que contiene data
341 341
342 342 startDate : Fecha inicial. Rechaza todos los directorios donde
343 343 file end time < startDate (obejto datetime.date)
344 344
345 345 endDate : Fecha final. Rechaza todos los directorios donde
346 346 file start time > endDate (obejto datetime.date)
347 347
348 348 startTime : Tiempo inicial. Rechaza todos los archivos donde
349 349 file end time < startTime (obejto datetime.time)
350 350
351 351 endTime : Tiempo final. Rechaza todos los archivos donde
352 352 file start time > endTime (obejto datetime.time)
353 353
354 354 expLabel : Nombre del subexperimento (subfolder)
355 355
356 356 ext : extension de los files
357 357
358 358 Return:
359 359 directory : eL directorio donde esta el file encontrado
360 360 filename : el ultimo file de una determinada carpeta
361 361 year : el anho
362 362 doy : el numero de dia del anho
363 363 set : el set del archivo
364 364
365 365
366 366 """
367 367 dirList = []
368 368 pathList = []
369 369 directory = None
370 370
371 371 #Filtra solo los directorios
372 372 for thisPath in os.listdir(path):
373 373 if os.path.isdir(os.path.join(path, thisPath)):
374 374 dirList.append(thisPath)
375 375
376 376 if not(dirList):
377 377 return None, None, None, None, None
378 378
379 379 dirList = sorted( dirList, key=str.lower )
380 380
381 381 if startDate:
382 382 startDateTime = datetime.datetime.combine(startDate, startTime)
383 383 thisDateTime = startDateTime
384 384 if endDate == None: endDateTime = startDateTime
385 385 else: endDateTime = datetime.datetime.combine(endDate, endTime)
386 386
387 387 while(thisDateTime <= endDateTime):
388 388 year = thisDateTime.timetuple().tm_year
389 389 doy = thisDateTime.timetuple().tm_yday
390 390
391 391 match = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy))
392 392 if len(match) == 0:
393 393 thisDateTime += datetime.timedelta(1)
394 394 continue
395 395
396 396 pathList.append(os.path.join(path,match[0], expLabel))
397 397 thisDateTime += datetime.timedelta(1)
398 398
399 399 if not(pathList):
400 400 print "\tNo files in range: %s - %s" %(startDateTime.ctime(), endDateTime.ctime())
401 401 return None, None, None, None, None
402 402
403 403 directory = pathList[0]
404 404
405 405 else:
406 406 directory = dirList[-1]
407 407 directory = os.path.join(path,directory)
408 408
409 409 filename = getlastFileFromPath(directory, ext)
410 410
411 411 if not(filename):
412 412 return None, None, None, None, None
413 413
414 414 if not(self.__verifyFile(os.path.join(directory, filename))):
415 415 return None, None, None, None, None
416 416
417 417 year = int( filename[1:5] )
418 418 doy = int( filename[5:8] )
419 419 set = int( filename[8:11] )
420 420
421 421 return directory, filename, year, doy, set
422 422
423 423
424 424
425 425 def __setNextFileOffline(self):
426 426
427 427 idFile = self.fileIndex
428 428
429 429 while (True):
430 430 idFile += 1
431 431 if not(idFile < len(self.filenameList)):
432 432 self.flagNoMoreFiles = 1
433 433 print "No more Files"
434 434 return 0
435 435
436 436 filename = self.filenameList[idFile]
437 437
438 438 if not(self.__verifyFile(filename)):
439 439 continue
440 440
441 441 fileSize = os.path.getsize(filename)
442 442 fp = open(filename,'rb')
443 443 break
444 444
445 445 self.flagIsNewFile = 1
446 446 self.fileIndex = idFile
447 447 self.filename = filename
448 448 self.fileSize = fileSize
449 449 self.fp = fp
450 450
451 451 print "Setting the file: %s"%self.filename
452 452
453 453 return 1
454 454
455 455 def __setNextFileOnline(self):
456 456 """
457 457 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
458 458 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
459 459 siguientes.
460 460
461 461 Affected:
462 462 self.flagIsNewFile
463 463 self.filename
464 464 self.fileSize
465 465 self.fp
466 466 self.set
467 467 self.flagNoMoreFiles
468 468
469 469 Return:
470 470 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
471 471 1 : si el file fue abierto con exito y esta listo a ser leido
472 472
473 473 Excepciones:
474 474 Si un determinado file no puede ser abierto
475 475 """
476 476 nFiles = 0
477 477 fileOk_flag = False
478 478 firstTime_flag = True
479 479
480 480 self.set += 1
481 481
482 482 #busca el 1er file disponible
483 483 file, filename = checkForRealPath( self.path, self.year, self.doy, self.set, self.ext )
484 484 if file:
485 485 if self.__verifyFile(file, False):
486 486 fileOk_flag = True
487 487
488 488 #si no encuentra un file entonces espera y vuelve a buscar
489 489 if not(fileOk_flag):
490 490 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
491 491
492 492 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
493 493 tries = self.nTries
494 494 else:
495 495 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
496 496
497 497 for nTries in range( tries ):
498 498 if firstTime_flag:
499 499 print "\tWaiting %0.2f sec for the file \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
500 500 time.sleep( self.delay )
501 501 else:
502 502 print "\tSearching next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
503 503
504 504 file, filename = checkForRealPath( self.path, self.year, self.doy, self.set, self.ext )
505 505 if file:
506 506 if self.__verifyFile(file):
507 507 fileOk_flag = True
508 508 break
509 509
510 510 if fileOk_flag:
511 511 break
512 512
513 513 firstTime_flag = False
514 514
515 515 print "\tSkipping the file \"%s\" due to this file doesn't exist" % filename
516 516 self.set += 1
517 517
518 518 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
519 519 self.set = 0
520 520 self.doy += 1
521 521
522 522 if fileOk_flag:
523 523 self.fileSize = os.path.getsize( file )
524 524 self.filename = file
525 525 self.flagIsNewFile = 1
526 526 if self.fp != None: self.fp.close()
527 527 self.fp = open(file, 'rb')
528 528 self.flagNoMoreFiles = 0
529 529 print 'Setting the file: %s' % file
530 530 else:
531 531 self.fileSize = 0
532 532 self.filename = None
533 533 self.flagIsNewFile = 0
534 534 self.fp = None
535 535 self.flagNoMoreFiles = 1
536 536 print 'No more Files'
537 537
538 538 return fileOk_flag
539 539
540 540
541 541 def setNextFile(self):
542 542 if self.fp != None:
543 543 self.fp.close()
544 544
545 545 if self.online:
546 546 newFile = self.__setNextFileOnline()
547 547 else:
548 548 newFile = self.__setNextFileOffline()
549 549
550 550 if not(newFile):
551 551 return 0
552 552
553 553 self.__readFirstHeader()
554 554 self.nReadBlocks = 0
555 555 return 1
556 556
557 557 def __waitNewBlock(self):
558 558 #si es OnLine y ademas aun no se han leido un bloque completo entonces se espera por uno valido
559 559 if not self.online:
560 560 return 0
561 561
562 562 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
563 563 return 0
564 564
565 565 currentPointer = self.fp.tell()
566 566
567 567 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
568 568
569 569 for nTries in range( self.nTries ):
570 570
571 571 self.fp.close()
572 572 self.fp = open( self.filename, 'rb' )
573 573 self.fp.seek( currentPointer )
574 574
575 575 self.fileSize = os.path.getsize( self.filename )
576 576 currentSize = self.fileSize - currentPointer
577 577
578 578 if ( currentSize >= neededSize ):
579 579 self.__rdBasicHeader()
580 580 return 1
581 581
582 582 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
583 583 time.sleep( self.delay )
584 584
585 585
586 586 return 0
587 587
588 588 def __setNewBlock(self):
589 589 if self.fp == None:
590 590 return 0
591 591
592 592 if self.flagIsNewFile:
593 593 return 1
594 594
595 595 self.lastUTTime = self.basicHeaderObj.utc
596 596 currentSize = self.fileSize - self.fp.tell()
597 597 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
598 598
599 599 if (currentSize >= neededSize):
600 600 self.__rdBasicHeader()
601 601 return 1
602 602
603 603 if self.__waitNewBlock():
604 604 return 1
605 605
606 606 if not(self.setNextFile()):
607 607 return 0
608 608
609 609 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
610 610
611 611 self.flagTimeBlock = 0
612 612
613 613 if deltaTime > self.maxTimeStep:
614 614 self.flagTimeBlock = 1
615 615
616 616 return 1
617 617
618 618
619 619 def readNextBlock(self):
620 620 if not(self.__setNewBlock()):
621 621 return 0
622 622
623 623 if not(self.readBlock()):
624 624 return 0
625 625
626 626 return 1
627 627
628 628 def __rdProcessingHeader(self, fp=None):
629 629 if fp == None:
630 630 fp = self.fp
631 631
632 632 self.processingHeaderObj.read(fp)
633 633
634 634 def __rdRadarControllerHeader(self, fp=None):
635 635 if fp == None:
636 636 fp = self.fp
637 637
638 638 self.radarControllerHeaderObj.read(fp)
639 639
640 640 def __rdSystemHeader(self, fp=None):
641 641 if fp == None:
642 642 fp = self.fp
643 643
644 644 self.systemHeaderObj.read(fp)
645 645
646 646 def __rdBasicHeader(self, fp=None):
647 647 if fp == None:
648 648 fp = self.fp
649 649
650 650 self.basicHeaderObj.read(fp)
651 651
652 652
653 653 def __readFirstHeader(self):
654 654 self.__rdBasicHeader()
655 655 self.__rdSystemHeader()
656 656 self.__rdRadarControllerHeader()
657 657 self.__rdProcessingHeader()
658 658
659 659 self.firstHeaderSize = self.basicHeaderObj.size
660 660
661 661 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
662 662 if datatype == 0:
663 663 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
664 664 elif datatype == 1:
665 665 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
666 666 elif datatype == 2:
667 667 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
668 668 elif datatype == 3:
669 669 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
670 670 elif datatype == 4:
671 671 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
672 672 elif datatype == 5:
673 673 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
674 674 else:
675 675 raise ValueError, 'Data type was not defined'
676 676
677 677 self.dtype = datatype_str
678 678 self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
679 679 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
680 680 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
681 681 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
682 682 self.getBlockDimension()
683 683
684 684
685 685 def __verifyFile(self, filename, msgFlag=True):
686 686 msg = None
687 687 try:
688 688 fp = open(filename, 'rb')
689 689 currentPosition = fp.tell()
690 690 except:
691 691 if msgFlag:
692 692 print "The file %s can't be opened" % (filename)
693 693 return False
694 694
695 695 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
696 696
697 697 if neededSize == 0:
698 698 basicHeaderObj = BasicHeader()
699 699 systemHeaderObj = SystemHeader()
700 700 radarControllerHeaderObj = RadarControllerHeader()
701 701 processingHeaderObj = ProcessingHeader()
702 702
703 703 try:
704 if not( basicHeaderObj.read(fp) ): raise ValueError
705 if not( systemHeaderObj.read(fp) ): raise ValueError
706 if not( radarControllerHeaderObj.read(fp) ): raise ValueError
707 if not( processingHeaderObj.read(fp) ): raise ValueError
704 if not( basicHeaderObj.read(fp) ): raise IOError
705 if not( systemHeaderObj.read(fp) ): raise IOError
706 if not( radarControllerHeaderObj.read(fp) ): raise IOError
707 if not( processingHeaderObj.read(fp) ): raise IOError
708 708 data_type = int(numpy.log2((processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
709 709
710 710 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
711 711
712 712 except:
713 713 if msgFlag:
714 714 print "\tThe file %s is empty or it hasn't enough data" % filename
715 715
716 716 fp.close()
717 717 return False
718 718 else:
719 719 msg = "\tSkipping the file %s due to it hasn't enough data" %filename
720 720
721 721 fp.close()
722 722 fileSize = os.path.getsize(filename)
723 723 currentSize = fileSize - currentPosition
724 724 if currentSize < neededSize:
725 725 if msgFlag and (msg != None):
726 726 print msg #print"\tSkipping the file %s due to it hasn't enough data" %filename
727 727 return False
728 728
729 729 return True
730 730
731 731 def setup(self,
732 732 path=None,
733 733 startDate=None,
734 734 endDate=None,
735 735 startTime=datetime.time(0,0,0),
736 736 endTime=datetime.time(23,59,59),
737 737 set=0,
738 738 expLabel = "",
739 739 ext = None,
740 740 online = False,
741 741 delay = 60):
742 742
743 743 if path == None:
744 744 raise ValueError, "The path is not valid"
745 745
746 746 if ext == None:
747 747 ext = self.ext
748 748
749 749 if online:
750 750 print "Searching files in online mode..."
751 751 doypath, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext)
752 752
753 753 if not(doypath):
754 754 for nTries in range( self.nTries ):
755 755 print '\tWaiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
756 756 time.sleep( self.delay )
757 757 doypath, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext)
758 758 if doypath:
759 759 break
760 760
761 761 if not(doypath):
762 762 print "There 'isn't valied files in %s" % path
763 763 return None
764 764
765 765 self.year = year
766 766 self.doy = doy
767 767 self.set = set - 1
768 768 self.path = path
769 769
770 770 else:
771 771 print "Searching files in offline mode ..."
772 772 pathList, filenameList = self.__searchFilesOffLine(path, startDate, endDate, startTime, endTime, set, expLabel, ext)
773 773
774 774 if not(pathList):
775 775 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
776 776 datetime.datetime.combine(startDate,startTime).ctime(),
777 777 datetime.datetime.combine(endDate,endTime).ctime())
778 778
779 779 sys.exit(-1)
780 780
781 781
782 782 self.fileIndex = -1
783 783 self.pathList = pathList
784 784 self.filenameList = filenameList
785 785
786 786 self.online = online
787 787 self.delay = delay
788 788 ext = ext.lower()
789 789 self.ext = ext
790 790
791 791 if not(self.setNextFile()):
792 792 if (startDate!=None) and (endDate!=None):
793 793 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
794 794 elif startDate != None:
795 795 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
796 796 else:
797 797 print "No files"
798 798
799 799 sys.exit(-1)
800 800
801 801 # self.updateDataHeader()
802 802
803 803 return self.dataOut
804 804
805 805 def getData():
806 806
807 807 raise ValueError, "This method has not been implemented"
808 808
809 809 def hasNotDataInBuffer():
810 810
811 811 raise ValueError, "This method has not been implemented"
812 812
813 813 def readBlock():
814 814
815 815 raise ValueError, "This method has not been implemented"
816 816
817 817 def isEndProcess(self):
818 818
819 819 return self.flagNoMoreFiles
820 820
821 821 def printReadBlocks(self):
822 822
823 823 print "Number of read blocks per file %04d" %self.nReadBlocks
824 824
825 825 def printTotalBlocks(self):
826 826
827 827 print "Number of read blocks %04d" %self.nTotalBlocks
828 828
829 829 def run(self, **kwargs):
830 830
831 831 if not(self.isConfig):
832 832
833 833 # self.dataOut = dataOut
834 834 self.setup(**kwargs)
835 835 self.isConfig = True
836 836
837 837 self.getData()
838 838
839 839 class JRODataWriter(JRODataIO, Operation):
840 840
841 841 """
842 842 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
843 843 de los datos siempre se realiza por bloques.
844 844 """
845 845
846 846 blockIndex = 0
847 847
848 848 path = None
849 849
850 850 setFile = None
851 851
852 852 profilesPerBlock = None
853 853
854 854 blocksPerFile = None
855 855
856 856 nWriteBlocks = 0
857 857
858 858 def __init__(self, dataOut=None):
859 859 raise ValueError, "Not implemented"
860 860
861 861
862 862 def hasAllDataInBuffer(self):
863 863 raise ValueError, "Not implemented"
864 864
865 865
866 866 def setBlockDimension(self):
867 867 raise ValueError, "Not implemented"
868 868
869 869
870 870 def writeBlock(self):
871 871 raise ValueError, "No implemented"
872 872
873 873
874 874 def putData(self):
875 875 raise ValueError, "No implemented"
876 876
877 877 def getDataHeader(self):
878 878 """
879 879 Obtiene una copia del First Header
880 880
881 881 Affected:
882 882
883 883 self.basicHeaderObj
884 884 self.systemHeaderObj
885 885 self.radarControllerHeaderObj
886 886 self.processingHeaderObj self.
887 887
888 888 Return:
889 889 None
890 890 """
891 891
892 892 raise ValueError, "No implemented"
893 893
894 894 def getBasicHeader(self):
895 895
896 896 self.basicHeaderObj.size = self.basicHeaderSize #bytes
897 897 self.basicHeaderObj.version = self.versionFile
898 898 self.basicHeaderObj.dataBlock = self.nTotalBlocks
899 899
900 900 utc = numpy.floor(self.dataOut.utctime)
901 901 milisecond = (self.dataOut.utctime - utc)* 1000.0
902 902
903 903 self.basicHeaderObj.utc = utc
904 904 self.basicHeaderObj.miliSecond = milisecond
905 905 self.basicHeaderObj.timeZone = 0
906 906 self.basicHeaderObj.dstFlag = 0
907 907 self.basicHeaderObj.errorCount = 0
908 908
909 909 def __writeFirstHeader(self):
910 910 """
911 911 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
912 912
913 913 Affected:
914 914 __dataType
915 915
916 916 Return:
917 917 None
918 918 """
919 919
920 920 # CALCULAR PARAMETROS
921 921
922 922 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
923 923 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
924 924
925 925 self.basicHeaderObj.write(self.fp)
926 926 self.systemHeaderObj.write(self.fp)
927 927 self.radarControllerHeaderObj.write(self.fp)
928 928 self.processingHeaderObj.write(self.fp)
929 929
930 930 self.dtype = self.dataOut.dtype
931 931
932 932 def __setNewBlock(self):
933 933 """
934 934 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
935 935
936 936 Return:
937 937 0 : si no pudo escribir nada
938 938 1 : Si escribio el Basic el First Header
939 939 """
940 940 if self.fp == None:
941 941 self.setNextFile()
942 942
943 943 if self.flagIsNewFile:
944 944 return 1
945 945
946 946 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
947 947 self.basicHeaderObj.write(self.fp)
948 948 return 1
949 949
950 950 if not( self.setNextFile() ):
951 951 return 0
952 952
953 953 return 1
954 954
955 955
956 956 def writeNextBlock(self):
957 957 """
958 958 Selecciona el bloque siguiente de datos y los escribe en un file
959 959
960 960 Return:
961 961 0 : Si no hizo pudo escribir el bloque de datos
962 962 1 : Si no pudo escribir el bloque de datos
963 963 """
964 964 if not( self.__setNewBlock() ):
965 965 return 0
966 966
967 967 self.writeBlock()
968 968
969 969 return 1
970 970
971 971 def setNextFile(self):
972 972 """
973 973 Determina el siguiente file que sera escrito
974 974
975 975 Affected:
976 976 self.filename
977 977 self.subfolder
978 978 self.fp
979 979 self.setFile
980 980 self.flagIsNewFile
981 981
982 982 Return:
983 983 0 : Si el archivo no puede ser escrito
984 984 1 : Si el archivo esta listo para ser escrito
985 985 """
986 986 ext = self.ext
987 987 path = self.path
988 988
989 989 if self.fp != None:
990 990 self.fp.close()
991 991
992 992 timeTuple = time.localtime( self.dataOut.dataUtcTime)
993 993 subfolder = 'D%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
994 994
995 995 doypath = os.path.join( path, subfolder )
996 996 if not( os.path.exists(doypath) ):
997 997 os.mkdir(doypath)
998 998 self.setFile = -1 #inicializo mi contador de seteo
999 999 else:
1000 1000 filesList = os.listdir( doypath )
1001 1001 if len( filesList ) > 0:
1002 1002 filesList = sorted( filesList, key=str.lower )
1003 1003 filen = filesList[-1]
1004 1004 # el filename debera tener el siguiente formato
1005 1005 # 0 1234 567 89A BCDE (hex)
1006 1006 # x YYYY DDD SSS .ext
1007 1007 if isNumber( filen[8:11] ):
1008 1008 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1009 1009 else:
1010 1010 self.setFile = -1
1011 1011 else:
1012 1012 self.setFile = -1 #inicializo mi contador de seteo
1013 1013
1014 1014 setFile = self.setFile
1015 1015 setFile += 1
1016 1016
1017 1017 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
1018 1018 timeTuple.tm_year,
1019 1019 timeTuple.tm_yday,
1020 1020 setFile,
1021 1021 ext )
1022 1022
1023 1023 filename = os.path.join( path, subfolder, file )
1024 1024
1025 1025 fp = open( filename,'wb' )
1026 1026
1027 1027 self.blockIndex = 0
1028 1028
1029 1029 #guardando atributos
1030 1030 self.filename = filename
1031 1031 self.subfolder = subfolder
1032 1032 self.fp = fp
1033 1033 self.setFile = setFile
1034 1034 self.flagIsNewFile = 1
1035 1035
1036 1036 self.getDataHeader()
1037 1037
1038 1038 print 'Writing the file: %s'%self.filename
1039 1039
1040 1040 self.__writeFirstHeader()
1041 1041
1042 1042 return 1
1043 1043
1044 1044 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=None, set=0, ext=None):
1045 1045 """
1046 1046 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1047 1047
1048 1048 Inputs:
1049 1049 path : el path destino en el cual se escribiran los files a crear
1050 1050 format : formato en el cual sera salvado un file
1051 1051 set : el setebo del file
1052 1052
1053 1053 Return:
1054 1054 0 : Si no realizo un buen seteo
1055 1055 1 : Si realizo un buen seteo
1056 1056 """
1057 1057
1058 1058 if ext == None:
1059 1059 ext = self.ext
1060 1060
1061 1061 ext = ext.lower()
1062 1062
1063 1063 self.ext = ext
1064 1064
1065 1065 self.path = path
1066 1066
1067 1067 self.setFile = set - 1
1068 1068
1069 1069 self.blocksPerFile = blocksPerFile
1070 1070
1071 1071 self.profilesPerBlock = profilesPerBlock
1072 1072
1073 1073 self.dataOut = dataOut
1074 1074
1075 1075 if not(self.setNextFile()):
1076 1076 print "There isn't a next file"
1077 1077 return 0
1078 1078
1079 1079 self.setBlockDimension()
1080 1080
1081 1081 return 1
1082 1082
1083 1083 def run(self, dataOut, **kwargs):
1084 1084
1085 1085 if not(self.isConfig):
1086 1086
1087 1087 self.setup(dataOut, **kwargs)
1088 1088 self.isConfig = True
1089 1089
1090 1090 self.putData()
1091 1091
1092 1092 class VoltageReader(JRODataReader):
1093 1093 """
1094 1094 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
1095 1095 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
1096 1096 perfiles*alturas*canales) son almacenados en la variable "buffer".
1097 1097
1098 1098 perfiles * alturas * canales
1099 1099
1100 1100 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1101 1101 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
1102 1102 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
1103 1103 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1104 1104
1105 1105 Example:
1106 1106
1107 1107 dpath = "/home/myuser/data"
1108 1108
1109 1109 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1110 1110
1111 1111 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1112 1112
1113 1113 readerObj = VoltageReader()
1114 1114
1115 1115 readerObj.setup(dpath, startTime, endTime)
1116 1116
1117 1117 while(True):
1118 1118
1119 1119 #to get one profile
1120 1120 profile = readerObj.getData()
1121 1121
1122 1122 #print the profile
1123 1123 print profile
1124 1124
1125 1125 #If you want to see all datablock
1126 1126 print readerObj.datablock
1127 1127
1128 1128 if readerObj.flagNoMoreFiles:
1129 1129 break
1130 1130
1131 1131 """
1132 1132
1133 1133 ext = ".r"
1134 1134
1135 1135 optchar = "D"
1136 1136 dataOut = None
1137 1137
1138 1138
1139 1139 def __init__(self):
1140 1140 """
1141 1141 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
1142 1142
1143 1143 Input:
1144 1144 dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para
1145 1145 almacenar un perfil de datos cada vez que se haga un requerimiento
1146 1146 (getData). El perfil sera obtenido a partir del buffer de datos,
1147 1147 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1148 1148 bloque de datos.
1149 1149 Si este parametro no es pasado se creara uno internamente.
1150 1150
1151 1151 Variables afectadas:
1152 1152 self.dataOut
1153 1153
1154 1154 Return:
1155 1155 None
1156 1156 """
1157 1157
1158 1158 self.isConfig = False
1159 1159
1160 1160 self.datablock = None
1161 1161
1162 1162 self.utc = 0
1163 1163
1164 1164 self.ext = ".r"
1165 1165
1166 1166 self.optchar = "D"
1167 1167
1168 1168 self.basicHeaderObj = BasicHeader()
1169 1169
1170 1170 self.systemHeaderObj = SystemHeader()
1171 1171
1172 1172 self.radarControllerHeaderObj = RadarControllerHeader()
1173 1173
1174 1174 self.processingHeaderObj = ProcessingHeader()
1175 1175
1176 1176 self.online = 0
1177 1177
1178 1178 self.fp = None
1179 1179
1180 1180 self.idFile = None
1181 1181
1182 1182 self.dtype = None
1183 1183
1184 1184 self.fileSizeByHeader = None
1185 1185
1186 1186 self.filenameList = []
1187 1187
1188 1188 self.filename = None
1189 1189
1190 1190 self.fileSize = None
1191 1191
1192 1192 self.firstHeaderSize = 0
1193 1193
1194 1194 self.basicHeaderSize = 24
1195 1195
1196 1196 self.pathList = []
1197 1197
1198 1198 self.filenameList = []
1199 1199
1200 1200 self.lastUTTime = 0
1201 1201
1202 1202 self.maxTimeStep = 30
1203 1203
1204 1204 self.flagNoMoreFiles = 0
1205 1205
1206 1206 self.set = 0
1207 1207
1208 1208 self.path = None
1209 1209
1210 1210 self.profileIndex = 9999
1211 1211
1212 1212 self.delay = 3 #seconds
1213 1213
1214 1214 self.nTries = 3 #quantity tries
1215 1215
1216 1216 self.nFiles = 3 #number of files for searching
1217 1217
1218 1218 self.nReadBlocks = 0
1219 1219
1220 1220 self.flagIsNewFile = 1
1221 1221
1222 1222 self.ippSeconds = 0
1223 1223
1224 1224 self.flagTimeBlock = 0
1225 1225
1226 1226 self.flagIsNewBlock = 0
1227 1227
1228 1228 self.nTotalBlocks = 0
1229 1229
1230 1230 self.blocksize = 0
1231 1231
1232 1232 self.dataOut = self.createObjByDefault()
1233 1233
1234 1234 def createObjByDefault(self):
1235 1235
1236 1236 dataObj = Voltage()
1237 1237
1238 1238 return dataObj
1239 1239
1240 1240 def __hasNotDataInBuffer(self):
1241 1241 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1242 1242 return 1
1243 1243 return 0
1244 1244
1245 1245
1246 1246 def getBlockDimension(self):
1247 1247 """
1248 1248 Obtiene la cantidad de puntos a leer por cada bloque de datos
1249 1249
1250 1250 Affected:
1251 1251 self.blocksize
1252 1252
1253 1253 Return:
1254 1254 None
1255 1255 """
1256 1256 pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
1257 1257 self.blocksize = pts2read
1258 1258
1259 1259
1260 1260 def readBlock(self):
1261 1261 """
1262 1262 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
1263 1263 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1264 1264 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1265 1265 es seteado a 0
1266 1266
1267 1267 Inputs:
1268 1268 None
1269 1269
1270 1270 Return:
1271 1271 None
1272 1272
1273 1273 Affected:
1274 1274 self.profileIndex
1275 1275 self.datablock
1276 1276 self.flagIsNewFile
1277 1277 self.flagIsNewBlock
1278 1278 self.nTotalBlocks
1279 1279
1280 1280 Exceptions:
1281 1281 Si un bloque leido no es un bloque valido
1282 1282 """
1283 1283
1284 1284 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
1285 1285
1286 1286 try:
1287 1287 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
1288 1288 except:
1289 1289 print "The read block (%3d) has not enough data" %self.nReadBlocks
1290 1290 return 0
1291 1291
1292 1292 junk = numpy.transpose(junk, (2,0,1))
1293 1293 self.datablock = junk['real'] + junk['imag']*1j
1294 1294
1295 1295 self.profileIndex = 0
1296 1296
1297 1297 self.flagIsNewFile = 0
1298 1298 self.flagIsNewBlock = 1
1299 1299
1300 1300 self.nTotalBlocks += 1
1301 1301 self.nReadBlocks += 1
1302 1302
1303 1303 return 1
1304 1304
1305 1305
1306 1306 def getData(self):
1307 1307 """
1308 1308 getData obtiene una unidad de datos del buffer de lectura y la copia a la clase "Voltage"
1309 1309 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
1310 1310 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
1311 1311
1312 1312 Ademas incrementa el contador del buffer en 1.
1313 1313
1314 1314 Return:
1315 1315 data : retorna un perfil de voltages (alturas * canales) copiados desde el
1316 1316 buffer. Si no hay mas archivos a leer retorna None.
1317 1317
1318 1318 Variables afectadas:
1319 1319 self.dataOut
1320 1320 self.profileIndex
1321 1321
1322 1322 Affected:
1323 1323 self.dataOut
1324 1324 self.profileIndex
1325 1325 self.flagTimeBlock
1326 1326 self.flagIsNewBlock
1327 1327 """
1328 1328
1329 1329 if self.flagNoMoreFiles:
1330 1330 self.dataOut.flagNoData = True
1331 1331 print 'Process finished'
1332 1332 return 0
1333 1333
1334 1334 self.flagTimeBlock = 0
1335 1335 self.flagIsNewBlock = 0
1336 1336
1337 1337 if self.__hasNotDataInBuffer():
1338 1338
1339 1339 if not( self.readNextBlock() ):
1340 1340 return 0
1341 1341
1342 1342 # self.updateDataHeader()
1343 1343
1344 1344 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
1345 1345
1346 1346 if self.datablock == None:
1347 1347 self.dataOut.flagNoData = True
1348 1348 return 0
1349 1349
1350 1350 self.dataOut.data = self.datablock[:,self.profileIndex,:]
1351 1351
1352 1352 self.dataOut.dtype = self.dtype
1353 1353
1354 1354 # self.dataOut.nChannels = self.systemHeaderObj.nChannels
1355 1355
1356 1356 # self.dataOut.nHeights = self.processingHeaderObj.nHeights
1357 1357
1358 1358 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
1359 1359
1360 1360 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
1361 1361
1362 1362 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
1363 1363
1364 1364 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
1365 1365
1366 1366 # self.dataOut.channelIndexList = range(self.systemHeaderObj.nChannels)
1367 1367
1368 1368 self.dataOut.flagTimeBlock = self.flagTimeBlock
1369 1369
1370 1370 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.ippSeconds
1371 1371
1372 1372 self.dataOut.ippSeconds = self.ippSeconds
1373 1373
1374 1374 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt
1375 1375
1376 1376 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
1377 1377
1378 1378 self.dataOut.flagShiftFFT = False
1379 1379
1380 1380 if self.processingHeaderObj.code != None:
1381 1381 self.dataOut.nCode = self.processingHeaderObj.nCode
1382 1382
1383 1383 self.dataOut.nBaud = self.processingHeaderObj.nBaud
1384 1384
1385 1385 self.dataOut.code = self.processingHeaderObj.code
1386 1386
1387 1387 self.profileIndex += 1
1388 1388
1389 1389 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
1390 1390
1391 1391 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
1392 1392
1393 1393 self.dataOut.flagNoData = False
1394 1394
1395 1395 # print self.profileIndex, self.dataOut.utctime
1396 1396 # if self.profileIndex == 800:
1397 1397 # a=1
1398 1398
1399 1399 return self.dataOut.data
1400 1400
1401 1401
1402 1402 class VoltageWriter(JRODataWriter):
1403 1403 """
1404 1404 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
1405 1405 de los datos siempre se realiza por bloques.
1406 1406 """
1407 1407
1408 1408 ext = ".r"
1409 1409
1410 1410 optchar = "D"
1411 1411
1412 1412 shapeBuffer = None
1413 1413
1414 1414
1415 1415 def __init__(self):
1416 1416 """
1417 1417 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
1418 1418
1419 1419 Affected:
1420 1420 self.dataOut
1421 1421
1422 1422 Return: None
1423 1423 """
1424 1424
1425 1425 self.nTotalBlocks = 0
1426 1426
1427 1427 self.profileIndex = 0
1428 1428
1429 1429 self.isConfig = False
1430 1430
1431 1431 self.fp = None
1432 1432
1433 1433 self.flagIsNewFile = 1
1434 1434
1435 1435 self.nTotalBlocks = 0
1436 1436
1437 1437 self.flagIsNewBlock = 0
1438 1438
1439 1439 self.setFile = None
1440 1440
1441 1441 self.dtype = None
1442 1442
1443 1443 self.path = None
1444 1444
1445 1445 self.filename = None
1446 1446
1447 1447 self.basicHeaderObj = BasicHeader()
1448 1448
1449 1449 self.systemHeaderObj = SystemHeader()
1450 1450
1451 1451 self.radarControllerHeaderObj = RadarControllerHeader()
1452 1452
1453 1453 self.processingHeaderObj = ProcessingHeader()
1454 1454
1455 1455 def hasAllDataInBuffer(self):
1456 1456 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1457 1457 return 1
1458 1458 return 0
1459 1459
1460 1460
1461 1461 def setBlockDimension(self):
1462 1462 """
1463 1463 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
1464 1464
1465 1465 Affected:
1466 1466 self.shape_spc_Buffer
1467 1467 self.shape_cspc_Buffer
1468 1468 self.shape_dc_Buffer
1469 1469
1470 1470 Return: None
1471 1471 """
1472 1472 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
1473 1473 self.processingHeaderObj.nHeights,
1474 1474 self.systemHeaderObj.nChannels)
1475 1475
1476 1476 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
1477 1477 self.processingHeaderObj.profilesPerBlock,
1478 1478 self.processingHeaderObj.nHeights),
1479 1479 dtype=numpy.dtype('complex'))
1480 1480
1481 1481
1482 1482 def writeBlock(self):
1483 1483 """
1484 1484 Escribe el buffer en el file designado
1485 1485
1486 1486 Affected:
1487 1487 self.profileIndex
1488 1488 self.flagIsNewFile
1489 1489 self.flagIsNewBlock
1490 1490 self.nTotalBlocks
1491 1491 self.blockIndex
1492 1492
1493 1493 Return: None
1494 1494 """
1495 1495 data = numpy.zeros( self.shapeBuffer, self.dtype )
1496 1496
1497 1497 junk = numpy.transpose(self.datablock, (1,2,0))
1498 1498
1499 1499 data['real'] = junk.real
1500 1500 data['imag'] = junk.imag
1501 1501
1502 1502 data = data.reshape( (-1) )
1503 1503
1504 1504 data.tofile( self.fp )
1505 1505
1506 1506 self.datablock.fill(0)
1507 1507
1508 1508 self.profileIndex = 0
1509 1509 self.flagIsNewFile = 0
1510 1510 self.flagIsNewBlock = 1
1511 1511
1512 1512 self.blockIndex += 1
1513 1513 self.nTotalBlocks += 1
1514 1514
1515 1515 def putData(self):
1516 1516 """
1517 1517 Setea un bloque de datos y luego los escribe en un file
1518 1518
1519 1519 Affected:
1520 1520 self.flagIsNewBlock
1521 1521 self.profileIndex
1522 1522
1523 1523 Return:
1524 1524 0 : Si no hay data o no hay mas files que puedan escribirse
1525 1525 1 : Si se escribio la data de un bloque en un file
1526 1526 """
1527 1527 if self.dataOut.flagNoData:
1528 1528 return 0
1529 1529
1530 1530 self.flagIsNewBlock = 0
1531 1531
1532 1532 if self.dataOut.flagTimeBlock:
1533 1533
1534 1534 self.datablock.fill(0)
1535 1535 self.profileIndex = 0
1536 1536 self.setNextFile()
1537 1537
1538 1538 if self.profileIndex == 0:
1539 1539 self.getBasicHeader()
1540 1540
1541 1541 self.datablock[:,self.profileIndex,:] = self.dataOut.data
1542 1542
1543 1543 self.profileIndex += 1
1544 1544
1545 1545 if self.hasAllDataInBuffer():
1546 1546 #if self.flagIsNewFile:
1547 1547 self.writeNextBlock()
1548 1548 # self.getDataHeader()
1549 1549
1550 1550 return 1
1551 1551
1552 1552 def __getProcessFlags(self):
1553 1553
1554 1554 processFlags = 0
1555 1555
1556 1556 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1557 1557 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1558 1558 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1559 1559 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1560 1560 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1561 1561 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1562 1562
1563 1563 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1564 1564
1565 1565
1566 1566
1567 1567 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
1568 1568 PROCFLAG.DATATYPE_SHORT,
1569 1569 PROCFLAG.DATATYPE_LONG,
1570 1570 PROCFLAG.DATATYPE_INT64,
1571 1571 PROCFLAG.DATATYPE_FLOAT,
1572 1572 PROCFLAG.DATATYPE_DOUBLE]
1573 1573
1574 1574
1575 1575 for index in range(len(dtypeList)):
1576 1576 if self.dataOut.dtype == dtypeList[index]:
1577 1577 dtypeValue = datatypeValueList[index]
1578 1578 break
1579 1579
1580 1580 processFlags += dtypeValue
1581 1581
1582 1582 if self.dataOut.flagDecodeData:
1583 1583 processFlags += PROCFLAG.DECODE_DATA
1584 1584
1585 1585 if self.dataOut.flagDeflipData:
1586 1586 processFlags += PROCFLAG.DEFLIP_DATA
1587 1587
1588 1588 if self.dataOut.code != None:
1589 1589 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1590 1590
1591 1591 if self.dataOut.nCohInt > 1:
1592 1592 processFlags += PROCFLAG.COHERENT_INTEGRATION
1593 1593
1594 1594 return processFlags
1595 1595
1596 1596
1597 1597 def __getBlockSize(self):
1598 1598 '''
1599 1599 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
1600 1600 '''
1601 1601
1602 1602 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1603 1603 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1604 1604 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1605 1605 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1606 1606 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1607 1607 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1608 1608
1609 1609 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1610 1610 datatypeValueList = [1,2,4,8,4,8]
1611 1611 for index in range(len(dtypeList)):
1612 1612 if self.dataOut.dtype == dtypeList[index]:
1613 1613 datatypeValue = datatypeValueList[index]
1614 1614 break
1615 1615
1616 1616 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels * self.dataOut.nProfiles * datatypeValue * 2)
1617 1617
1618 1618 return blocksize
1619 1619
1620 1620 def getDataHeader(self):
1621 1621
1622 1622 """
1623 1623 Obtiene una copia del First Header
1624 1624
1625 1625 Affected:
1626 1626 self.systemHeaderObj
1627 1627 self.radarControllerHeaderObj
1628 1628 self.dtype
1629 1629
1630 1630 Return:
1631 1631 None
1632 1632 """
1633 1633
1634 1634 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
1635 1635 self.systemHeaderObj.nChannels = self.dataOut.nChannels
1636 1636 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
1637 1637
1638 1638 self.getBasicHeader()
1639 1639
1640 1640 processingHeaderSize = 40 # bytes
1641 1641 self.processingHeaderObj.dtype = 0 # Voltage
1642 1642 self.processingHeaderObj.blockSize = self.__getBlockSize()
1643 1643 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
1644 1644 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
1645 1645 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
1646 1646 self.processingHeaderObj.processFlags = self.__getProcessFlags()
1647 1647 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
1648 1648 self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage
1649 1649 self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage
1650 1650
1651 1651 if self.dataOut.code != None:
1652 1652 self.processingHeaderObj.code = self.dataOut.code
1653 1653 self.processingHeaderObj.nCode = self.dataOut.nCode
1654 1654 self.processingHeaderObj.nBaud = self.dataOut.nBaud
1655 1655 codesize = int(8 + 4 * self.dataOut.nCode * self.dataOut.nBaud)
1656 1656 processingHeaderSize += codesize
1657 1657
1658 1658 if self.processingHeaderObj.nWindows != 0:
1659 1659 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
1660 1660 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
1661 1661 self.processingHeaderObj.nHeights = self.dataOut.nHeights
1662 1662 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
1663 1663 processingHeaderSize += 12
1664 1664
1665 1665 self.processingHeaderObj.size = processingHeaderSize
1666 1666
1667 1667 class SpectraReader(JRODataReader):
1668 1668 """
1669 1669 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
1670 1670 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
1671 1671 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
1672 1672
1673 1673 paresCanalesIguales * alturas * perfiles (Self Spectra)
1674 1674 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
1675 1675 canales * alturas (DC Channels)
1676 1676
1677 1677 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1678 1678 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
1679 1679 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
1680 1680 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1681 1681
1682 1682 Example:
1683 1683 dpath = "/home/myuser/data"
1684 1684
1685 1685 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1686 1686
1687 1687 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1688 1688
1689 1689 readerObj = SpectraReader()
1690 1690
1691 1691 readerObj.setup(dpath, startTime, endTime)
1692 1692
1693 1693 while(True):
1694 1694
1695 1695 readerObj.getData()
1696 1696
1697 1697 print readerObj.data_spc
1698 1698
1699 1699 print readerObj.data_cspc
1700 1700
1701 1701 print readerObj.data_dc
1702 1702
1703 1703 if readerObj.flagNoMoreFiles:
1704 1704 break
1705 1705
1706 1706 """
1707 1707
1708 1708 pts2read_SelfSpectra = 0
1709 1709
1710 1710 pts2read_CrossSpectra = 0
1711 1711
1712 1712 pts2read_DCchannels = 0
1713 1713
1714 1714 ext = ".pdata"
1715 1715
1716 1716 optchar = "P"
1717 1717
1718 1718 dataOut = None
1719 1719
1720 1720 nRdChannels = None
1721 1721
1722 1722 nRdPairs = None
1723 1723
1724 1724 rdPairList = []
1725 1725
1726 1726
1727 1727 def __init__(self):
1728 1728 """
1729 1729 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
1730 1730
1731 1731 Inputs:
1732 1732 dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para
1733 1733 almacenar un perfil de datos cada vez que se haga un requerimiento
1734 1734 (getData). El perfil sera obtenido a partir del buffer de datos,
1735 1735 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1736 1736 bloque de datos.
1737 1737 Si este parametro no es pasado se creara uno internamente.
1738 1738
1739 1739 Affected:
1740 1740 self.dataOut
1741 1741
1742 1742 Return : None
1743 1743 """
1744 1744
1745 1745 self.isConfig = False
1746 1746
1747 1747 self.pts2read_SelfSpectra = 0
1748 1748
1749 1749 self.pts2read_CrossSpectra = 0
1750 1750
1751 1751 self.pts2read_DCchannels = 0
1752 1752
1753 1753 self.datablock = None
1754 1754
1755 1755 self.utc = None
1756 1756
1757 1757 self.ext = ".pdata"
1758 1758
1759 1759 self.optchar = "P"
1760 1760
1761 1761 self.basicHeaderObj = BasicHeader()
1762 1762
1763 1763 self.systemHeaderObj = SystemHeader()
1764 1764
1765 1765 self.radarControllerHeaderObj = RadarControllerHeader()
1766 1766
1767 1767 self.processingHeaderObj = ProcessingHeader()
1768 1768
1769 1769 self.online = 0
1770 1770
1771 1771 self.fp = None
1772 1772
1773 1773 self.idFile = None
1774 1774
1775 1775 self.dtype = None
1776 1776
1777 1777 self.fileSizeByHeader = None
1778 1778
1779 1779 self.filenameList = []
1780 1780
1781 1781 self.filename = None
1782 1782
1783 1783 self.fileSize = None
1784 1784
1785 1785 self.firstHeaderSize = 0
1786 1786
1787 1787 self.basicHeaderSize = 24
1788 1788
1789 1789 self.pathList = []
1790 1790
1791 1791 self.lastUTTime = 0
1792 1792
1793 1793 self.maxTimeStep = 30
1794 1794
1795 1795 self.flagNoMoreFiles = 0
1796 1796
1797 1797 self.set = 0
1798 1798
1799 1799 self.path = None
1800 1800
1801 1801 self.delay = 3 #seconds
1802 1802
1803 1803 self.nTries = 3 #quantity tries
1804 1804
1805 1805 self.nFiles = 3 #number of files for searching
1806 1806
1807 1807 self.nReadBlocks = 0
1808 1808
1809 1809 self.flagIsNewFile = 1
1810 1810
1811 1811 self.ippSeconds = 0
1812 1812
1813 1813 self.flagTimeBlock = 0
1814 1814
1815 1815 self.flagIsNewBlock = 0
1816 1816
1817 1817 self.nTotalBlocks = 0
1818 1818
1819 1819 self.blocksize = 0
1820 1820
1821 1821 self.dataOut = self.createObjByDefault()
1822 1822
1823 1823
1824 1824 def createObjByDefault(self):
1825 1825
1826 1826 dataObj = Spectra()
1827 1827
1828 1828 return dataObj
1829 1829
1830 1830 def __hasNotDataInBuffer(self):
1831 1831 return 1
1832 1832
1833 1833
1834 1834 def getBlockDimension(self):
1835 1835 """
1836 1836 Obtiene la cantidad de puntos a leer por cada bloque de datos
1837 1837
1838 1838 Affected:
1839 1839 self.nRdChannels
1840 1840 self.nRdPairs
1841 1841 self.pts2read_SelfSpectra
1842 1842 self.pts2read_CrossSpectra
1843 1843 self.pts2read_DCchannels
1844 1844 self.blocksize
1845 1845 self.dataOut.nChannels
1846 1846 self.dataOut.nPairs
1847 1847
1848 1848 Return:
1849 1849 None
1850 1850 """
1851 1851 self.nRdChannels = 0
1852 1852 self.nRdPairs = 0
1853 1853 self.rdPairList = []
1854 1854
1855 1855 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
1856 1856 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
1857 1857 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
1858 1858 else:
1859 1859 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
1860 1860 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
1861 1861
1862 1862 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
1863 1863
1864 1864 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
1865 1865 self.blocksize = self.pts2read_SelfSpectra
1866 1866
1867 1867 if self.processingHeaderObj.flag_cspc:
1868 1868 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
1869 1869 self.blocksize += self.pts2read_CrossSpectra
1870 1870
1871 1871 if self.processingHeaderObj.flag_dc:
1872 1872 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
1873 1873 self.blocksize += self.pts2read_DCchannels
1874 1874
1875 1875 # self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels
1876 1876
1877 1877
1878 1878 def readBlock(self):
1879 1879 """
1880 1880 Lee el bloque de datos desde la posicion actual del puntero del archivo
1881 1881 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1882 1882 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1883 1883 es seteado a 0
1884 1884
1885 1885 Return: None
1886 1886
1887 1887 Variables afectadas:
1888 1888
1889 1889 self.flagIsNewFile
1890 1890 self.flagIsNewBlock
1891 1891 self.nTotalBlocks
1892 1892 self.data_spc
1893 1893 self.data_cspc
1894 1894 self.data_dc
1895 1895
1896 1896 Exceptions:
1897 1897 Si un bloque leido no es un bloque valido
1898 1898 """
1899 1899 blockOk_flag = False
1900 1900 fpointer = self.fp.tell()
1901 1901
1902 1902 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
1903 1903 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
1904 1904
1905 1905 if self.processingHeaderObj.flag_cspc:
1906 1906 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
1907 1907 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
1908 1908
1909 1909 if self.processingHeaderObj.flag_dc:
1910 1910 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
1911 1911 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
1912 1912
1913 1913
1914 1914 if not(self.processingHeaderObj.shif_fft):
1915 1915 #desplaza a la derecha en el eje 2 determinadas posiciones
1916 1916 shift = int(self.processingHeaderObj.profilesPerBlock/2)
1917 1917 spc = numpy.roll( spc, shift , axis=2 )
1918 1918
1919 1919 if self.processingHeaderObj.flag_cspc:
1920 1920 #desplaza a la derecha en el eje 2 determinadas posiciones
1921 1921 cspc = numpy.roll( cspc, shift, axis=2 )
1922 1922
1923 1923
1924 1924 spc = numpy.transpose( spc, (0,2,1) )
1925 1925 self.data_spc = spc
1926 1926
1927 1927 if self.processingHeaderObj.flag_cspc:
1928 1928 cspc = numpy.transpose( cspc, (0,2,1) )
1929 1929 self.data_cspc = cspc['real'] + cspc['imag']*1j
1930 1930 else:
1931 1931 self.data_cspc = None
1932 1932
1933 1933 if self.processingHeaderObj.flag_dc:
1934 1934 self.data_dc = dc['real'] + dc['imag']*1j
1935 1935 else:
1936 1936 self.data_dc = None
1937 1937
1938 1938 self.flagIsNewFile = 0
1939 1939 self.flagIsNewBlock = 1
1940 1940
1941 1941 self.nTotalBlocks += 1
1942 1942 self.nReadBlocks += 1
1943 1943
1944 1944 return 1
1945 1945
1946 1946
1947 1947 def getData(self):
1948 1948 """
1949 1949 Copia el buffer de lectura a la clase "Spectra",
1950 1950 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
1951 1951 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
1952 1952
1953 1953 Return:
1954 1954 0 : Si no hay mas archivos disponibles
1955 1955 1 : Si hizo una buena copia del buffer
1956 1956
1957 1957 Affected:
1958 1958 self.dataOut
1959 1959
1960 1960 self.flagTimeBlock
1961 1961 self.flagIsNewBlock
1962 1962 """
1963 1963
1964 1964 if self.flagNoMoreFiles:
1965 1965 self.dataOut.flagNoData = True
1966 1966 print 'Process finished'
1967 1967 return 0
1968 1968
1969 1969 self.flagTimeBlock = 0
1970 1970 self.flagIsNewBlock = 0
1971 1971
1972 1972 if self.__hasNotDataInBuffer():
1973 1973
1974 1974 if not( self.readNextBlock() ):
1975 1975 self.dataOut.flagNoData = True
1976 1976 return 0
1977 1977
1978 1978 # self.updateDataHeader()
1979 1979
1980 1980 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
1981 1981
1982 1982 if self.data_dc == None:
1983 1983 self.dataOut.flagNoData = True
1984 1984 return 0
1985 1985
1986 1986 self.dataOut.data_spc = self.data_spc
1987 1987
1988 1988 self.dataOut.data_cspc = self.data_cspc
1989 1989
1990 1990 self.dataOut.data_dc = self.data_dc
1991 1991
1992 1992 self.dataOut.flagTimeBlock = self.flagTimeBlock
1993 1993
1994 1994 self.dataOut.flagNoData = False
1995 1995
1996 1996 self.dataOut.dtype = self.dtype
1997 1997
1998 1998 # self.dataOut.nChannels = self.nRdChannels
1999 1999
2000 2000 self.dataOut.nPairs = self.nRdPairs
2001 2001
2002 2002 self.dataOut.pairsList = self.rdPairList
2003 2003
2004 2004 # self.dataOut.nHeights = self.processingHeaderObj.nHeights
2005 2005
2006 2006 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
2007 2007
2008 2008 self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
2009 2009
2010 2010 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
2011 2011
2012 2012 self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
2013 2013
2014 2014 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
2015 2015
2016 2016 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
2017 2017
2018 2018 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
2019 2019
2020 2020 # self.dataOut.channelIndexList = range(self.systemHeaderObj.nChannels)
2021 2021
2022 2022 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000.#+ self.profileIndex * self.ippSeconds
2023 2023
2024 2024 self.dataOut.ippSeconds = self.ippSeconds
2025 2025
2026 2026 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt * self.processingHeaderObj.nIncohInt * self.dataOut.nFFTPoints
2027 2027
2028 2028 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
2029 2029
2030 2030 # self.profileIndex += 1
2031 2031
2032 2032 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
2033 2033
2034 2034 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
2035 2035
2036 2036 return self.dataOut.data_spc
2037 2037
2038 2038
2039 2039 class SpectraWriter(JRODataWriter):
2040 2040
2041 2041 """
2042 2042 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
2043 2043 de los datos siempre se realiza por bloques.
2044 2044 """
2045 2045
2046 2046 ext = ".pdata"
2047 2047
2048 2048 optchar = "P"
2049 2049
2050 2050 shape_spc_Buffer = None
2051 2051
2052 2052 shape_cspc_Buffer = None
2053 2053
2054 2054 shape_dc_Buffer = None
2055 2055
2056 2056 data_spc = None
2057 2057
2058 2058 data_cspc = None
2059 2059
2060 2060 data_dc = None
2061 2061
2062 2062 # dataOut = None
2063 2063
2064 2064 def __init__(self):
2065 2065 """
2066 2066 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
2067 2067
2068 2068 Affected:
2069 2069 self.dataOut
2070 2070 self.basicHeaderObj
2071 2071 self.systemHeaderObj
2072 2072 self.radarControllerHeaderObj
2073 2073 self.processingHeaderObj
2074 2074
2075 2075 Return: None
2076 2076 """
2077 2077
2078 2078 self.isConfig = False
2079 2079
2080 2080 self.nTotalBlocks = 0
2081 2081
2082 2082 self.data_spc = None
2083 2083
2084 2084 self.data_cspc = None
2085 2085
2086 2086 self.data_dc = None
2087 2087
2088 2088 self.fp = None
2089 2089
2090 2090 self.flagIsNewFile = 1
2091 2091
2092 2092 self.nTotalBlocks = 0
2093 2093
2094 2094 self.flagIsNewBlock = 0
2095 2095
2096 2096 self.setFile = None
2097 2097
2098 2098 self.dtype = None
2099 2099
2100 2100 self.path = None
2101 2101
2102 2102 self.noMoreFiles = 0
2103 2103
2104 2104 self.filename = None
2105 2105
2106 2106 self.basicHeaderObj = BasicHeader()
2107 2107
2108 2108 self.systemHeaderObj = SystemHeader()
2109 2109
2110 2110 self.radarControllerHeaderObj = RadarControllerHeader()
2111 2111
2112 2112 self.processingHeaderObj = ProcessingHeader()
2113 2113
2114 2114
2115 2115 def hasAllDataInBuffer(self):
2116 2116 return 1
2117 2117
2118 2118
2119 2119 def setBlockDimension(self):
2120 2120 """
2121 2121 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
2122 2122
2123 2123 Affected:
2124 2124 self.shape_spc_Buffer
2125 2125 self.shape_cspc_Buffer
2126 2126 self.shape_dc_Buffer
2127 2127
2128 2128 Return: None
2129 2129 """
2130 2130 self.shape_spc_Buffer = (self.dataOut.nChannels,
2131 2131 self.processingHeaderObj.nHeights,
2132 2132 self.processingHeaderObj.profilesPerBlock)
2133 2133
2134 2134 self.shape_cspc_Buffer = (self.dataOut.nPairs,
2135 2135 self.processingHeaderObj.nHeights,
2136 2136 self.processingHeaderObj.profilesPerBlock)
2137 2137
2138 2138 self.shape_dc_Buffer = (self.dataOut.nChannels,
2139 2139 self.processingHeaderObj.nHeights)
2140 2140
2141 2141
2142 2142 def writeBlock(self):
2143 2143 """
2144 2144 Escribe el buffer en el file designado
2145 2145
2146 2146 Affected:
2147 2147 self.data_spc
2148 2148 self.data_cspc
2149 2149 self.data_dc
2150 2150 self.flagIsNewFile
2151 2151 self.flagIsNewBlock
2152 2152 self.nTotalBlocks
2153 2153 self.nWriteBlocks
2154 2154
2155 2155 Return: None
2156 2156 """
2157 2157
2158 2158 spc = numpy.transpose( self.data_spc, (0,2,1) )
2159 2159 if not( self.processingHeaderObj.shif_fft ):
2160 2160 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2161 2161 data = spc.reshape((-1))
2162 2162 data.tofile(self.fp)
2163 2163
2164 2164 if self.data_cspc != None:
2165 2165 data = numpy.zeros( self.shape_cspc_Buffer, self.dtype )
2166 2166 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
2167 2167 if not( self.processingHeaderObj.shif_fft ):
2168 2168 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2169 2169 data['real'] = cspc.real
2170 2170 data['imag'] = cspc.imag
2171 2171 data = data.reshape((-1))
2172 2172 data.tofile(self.fp)
2173 2173
2174 2174 if self.data_dc != None:
2175 2175 data = numpy.zeros( self.shape_dc_Buffer, self.dtype )
2176 2176 dc = self.data_dc
2177 2177 data['real'] = dc.real
2178 2178 data['imag'] = dc.imag
2179 2179 data = data.reshape((-1))
2180 2180 data.tofile(self.fp)
2181 2181
2182 2182 self.data_spc.fill(0)
2183 2183 self.data_dc.fill(0)
2184 2184 if self.data_cspc != None:
2185 2185 self.data_cspc.fill(0)
2186 2186
2187 2187 self.flagIsNewFile = 0
2188 2188 self.flagIsNewBlock = 1
2189 2189 self.nTotalBlocks += 1
2190 2190 self.nWriteBlocks += 1
2191 2191 self.blockIndex += 1
2192 2192
2193 2193
2194 2194 def putData(self):
2195 2195 """
2196 2196 Setea un bloque de datos y luego los escribe en un file
2197 2197
2198 2198 Affected:
2199 2199 self.data_spc
2200 2200 self.data_cspc
2201 2201 self.data_dc
2202 2202
2203 2203 Return:
2204 2204 0 : Si no hay data o no hay mas files que puedan escribirse
2205 2205 1 : Si se escribio la data de un bloque en un file
2206 2206 """
2207 2207
2208 2208 if self.dataOut.flagNoData:
2209 2209 return 0
2210 2210
2211 2211 self.flagIsNewBlock = 0
2212 2212
2213 2213 if self.dataOut.flagTimeBlock:
2214 2214 self.data_spc.fill(0)
2215 2215 self.data_cspc.fill(0)
2216 2216 self.data_dc.fill(0)
2217 2217 self.setNextFile()
2218 2218
2219 2219 if self.flagIsNewFile == 0:
2220 2220 self.getBasicHeader()
2221 2221
2222 2222 self.data_spc = self.dataOut.data_spc
2223 2223 self.data_cspc = self.dataOut.data_cspc
2224 2224 self.data_dc = self.dataOut.data_dc
2225 2225
2226 2226 # #self.processingHeaderObj.dataBlocksPerFile)
2227 2227 if self.hasAllDataInBuffer():
2228 2228 # self.getDataHeader()
2229 2229 self.writeNextBlock()
2230 2230
2231 2231 return 1
2232 2232
2233 2233
2234 2234 def __getProcessFlags(self):
2235 2235
2236 2236 processFlags = 0
2237 2237
2238 2238 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2239 2239 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2240 2240 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2241 2241 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2242 2242 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2243 2243 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2244 2244
2245 2245 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2246 2246
2247 2247
2248 2248
2249 2249 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
2250 2250 PROCFLAG.DATATYPE_SHORT,
2251 2251 PROCFLAG.DATATYPE_LONG,
2252 2252 PROCFLAG.DATATYPE_INT64,
2253 2253 PROCFLAG.DATATYPE_FLOAT,
2254 2254 PROCFLAG.DATATYPE_DOUBLE]
2255 2255
2256 2256
2257 2257 for index in range(len(dtypeList)):
2258 2258 if self.dataOut.dtype == dtypeList[index]:
2259 2259 dtypeValue = datatypeValueList[index]
2260 2260 break
2261 2261
2262 2262 processFlags += dtypeValue
2263 2263
2264 2264 if self.dataOut.flagDecodeData:
2265 2265 processFlags += PROCFLAG.DECODE_DATA
2266 2266
2267 2267 if self.dataOut.flagDeflipData:
2268 2268 processFlags += PROCFLAG.DEFLIP_DATA
2269 2269
2270 2270 if self.dataOut.code != None:
2271 2271 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
2272 2272
2273 2273 if self.dataOut.nIncohInt > 1:
2274 2274 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
2275 2275
2276 2276 if self.dataOut.data_dc != None:
2277 2277 processFlags += PROCFLAG.SAVE_CHANNELS_DC
2278 2278
2279 2279 return processFlags
2280 2280
2281 2281
2282 2282 def __getBlockSize(self):
2283 2283 '''
2284 2284 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
2285 2285 '''
2286 2286
2287 2287 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2288 2288 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2289 2289 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2290 2290 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2291 2291 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2292 2292 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2293 2293
2294 2294 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2295 2295 datatypeValueList = [1,2,4,8,4,8]
2296 2296 for index in range(len(dtypeList)):
2297 2297 if self.dataOut.dtype == dtypeList[index]:
2298 2298 datatypeValue = datatypeValueList[index]
2299 2299 break
2300 2300
2301 2301
2302 2302 pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints
2303 2303
2304 2304 pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write)
2305 2305 blocksize = (pts2write_SelfSpectra*datatypeValue)
2306 2306
2307 2307 if self.dataOut.data_cspc != None:
2308 2308 pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write)
2309 2309 blocksize += (pts2write_CrossSpectra*datatypeValue*2)
2310 2310
2311 2311 if self.dataOut.data_dc != None:
2312 2312 pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights)
2313 2313 blocksize += (pts2write_DCchannels*datatypeValue*2)
2314 2314
2315 2315 blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
2316 2316
2317 2317 return blocksize
2318 2318
2319 2319 def getDataHeader(self):
2320 2320
2321 2321 """
2322 2322 Obtiene una copia del First Header
2323 2323
2324 2324 Affected:
2325 2325 self.systemHeaderObj
2326 2326 self.radarControllerHeaderObj
2327 2327 self.dtype
2328 2328
2329 2329 Return:
2330 2330 None
2331 2331 """
2332 2332
2333 2333 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
2334 2334 self.systemHeaderObj.nChannels = self.dataOut.nChannels
2335 2335 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
2336 2336
2337 2337 self.getBasicHeader()
2338 2338
2339 2339 processingHeaderSize = 40 # bytes
2340 2340 self.processingHeaderObj.dtype = 0 # Voltage
2341 2341 self.processingHeaderObj.blockSize = self.__getBlockSize()
2342 2342 self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints
2343 2343 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
2344 2344 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
2345 2345 self.processingHeaderObj.processFlags = self.__getProcessFlags()
2346 2346 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval
2347 2347 self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt
2348 2348 self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
2349 2349
2350 2350 if self.processingHeaderObj.totalSpectra > 0:
2351 2351 channelList = []
2352 2352 for channel in range(self.dataOut.nChannels):
2353 2353 channelList.append(channel)
2354 2354 channelList.append(channel)
2355 2355
2356 2356 pairsList = []
2357 2357 for pair in self.dataOut.pairsList:
2358 2358 pairsList.append(pair[0])
2359 2359 pairsList.append(pair[1])
2360 2360 spectraComb = channelList + pairsList
2361 2361 spectraComb = numpy.array(spectraComb,dtype="u1")
2362 2362 self.processingHeaderObj.spectraComb = spectraComb
2363 2363 sizeOfSpcComb = len(spectraComb)
2364 2364 processingHeaderSize += sizeOfSpcComb
2365 2365
2366 2366 if self.dataOut.code != None:
2367 2367 self.processingHeaderObj.code = self.dataOut.code
2368 2368 self.processingHeaderObj.nCode = self.dataOut.nCode
2369 2369 self.processingHeaderObj.nBaud = self.dataOut.nBaud
2370 2370 nCodeSize = 4 # bytes
2371 2371 nBaudSize = 4 # bytes
2372 2372 codeSize = 4 # bytes
2373 2373 sizeOfCode = int(nCodeSize + nBaudSize + codeSize * self.dataOut.nCode * self.dataOut.nBaud)
2374 2374 processingHeaderSize += sizeOfCode
2375 2375
2376 2376 if self.processingHeaderObj.nWindows != 0:
2377 2377 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
2378 2378 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
2379 2379 self.processingHeaderObj.nHeights = self.dataOut.nHeights
2380 2380 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
2381 2381 sizeOfFirstHeight = 4
2382 2382 sizeOfdeltaHeight = 4
2383 2383 sizeOfnHeights = 4
2384 2384 sizeOfWindows = (sizeOfFirstHeight + sizeOfdeltaHeight + sizeOfnHeights)*self.processingHeaderObj.nWindows
2385 2385 processingHeaderSize += sizeOfWindows
2386 2386
2387 2387 self.processingHeaderObj.size = processingHeaderSize
2388 2388
2389 2389 class SpectraHeisWriter():
2390 2390
2391 2391 i=0
2392 2392
2393 2393 def __init__(self, dataOut):
2394 2394
2395 2395 self.wrObj = FITS()
2396 2396 self.dataOut = dataOut
2397 2397
2398 2398 def isNumber(str):
2399 2399 """
2400 2400 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
2401 2401
2402 2402 Excepciones:
2403 2403 Si un determinado string no puede ser convertido a numero
2404 2404 Input:
2405 2405 str, string al cual se le analiza para determinar si convertible a un numero o no
2406 2406
2407 2407 Return:
2408 2408 True : si el string es uno numerico
2409 2409 False : no es un string numerico
2410 2410 """
2411 2411 try:
2412 2412 float( str )
2413 2413 return True
2414 2414 except:
2415 2415 return False
2416 2416
2417 2417 def setup(self, wrpath,):
2418 2418
2419 2419 if not(os.path.exists(wrpath)):
2420 2420 os.mkdir(wrpath)
2421 2421
2422 2422 self.wrpath = wrpath
2423 2423 self.setFile = 0
2424 2424
2425 2425 def putData(self):
2426 2426 # self.wrObj.writeHeader(nChannels=self.dataOut.nChannels, nFFTPoints=self.dataOut.nFFTPoints)
2427 2427 #name = self.dataOut.utctime
2428 2428 name= time.localtime( self.dataOut.utctime)
2429 2429 ext=".fits"
2430 2430 #folder='D%4.4d%3.3d'%(name.tm_year,name.tm_yday)
2431 2431 subfolder = 'D%4.4d%3.3d' % (name.tm_year,name.tm_yday)
2432 2432
2433 2433 doypath = os.path.join( self.wrpath, subfolder )
2434 2434 if not( os.path.exists(doypath) ):
2435 2435 os.mkdir(doypath)
2436 2436 self.setFile += 1
2437 2437 file = 'D%4.4d%3.3d%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext)
2438 2438
2439 2439 filename = os.path.join(self.wrpath,subfolder, file)
2440 2440
2441 2441 # print self.dataOut.ippSeconds
2442 2442 freq=numpy.arange(-1*self.dataOut.nHeights/2.,self.dataOut.nHeights/2.)/(2*self.dataOut.ippSeconds)
2443 2443
2444 2444 col1=self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints)+'E', array=freq)
2445 2445 col2=self.wrObj.writeData(name="P_Ch1",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[0,:]))
2446 2446 col3=self.wrObj.writeData(name="P_Ch2",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[1,:]))
2447 2447 col4=self.wrObj.writeData(name="P_Ch3",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[2,:]))
2448 2448 col5=self.wrObj.writeData(name="P_Ch4",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[3,:]))
2449 2449 col6=self.wrObj.writeData(name="P_Ch5",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[4,:]))
2450 2450 col7=self.wrObj.writeData(name="P_Ch6",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[5,:]))
2451 2451 col8=self.wrObj.writeData(name="P_Ch7",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[6,:]))
2452 2452 col9=self.wrObj.writeData(name="P_Ch8",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[7,:]))
2453 2453 #n=numpy.arange((100))
2454 2454 n=self.dataOut.data_spc[6,:]
2455 2455 a=self.wrObj.cFImage(n)
2456 2456 b=self.wrObj.Ctable(col1,col2,col3,col4,col5,col6,col7,col8,col9)
2457 2457 self.wrObj.CFile(a,b)
2458 2458 self.wrObj.wFile(filename)
2459 2459 return 1
2460 2460
2461 2461 class FITS:
2462 2462
2463 2463 name=None
2464 2464 format=None
2465 2465 array =None
2466 2466 data =None
2467 2467 thdulist=None
2468 2468
2469 2469 def __init__(self):
2470 2470
2471 2471 pass
2472 2472
2473 2473 def setColF(self,name,format,array):
2474 2474 self.name=name
2475 2475 self.format=format
2476 2476 self.array=array
2477 2477 a1=numpy.array([self.array],dtype=numpy.float32)
2478 2478 self.col1 = pyfits.Column(name=self.name, format=self.format, array=a1)
2479 2479 return self.col1
2480 2480
2481 2481 # def setColP(self,name,format,data):
2482 2482 # self.name=name
2483 2483 # self.format=format
2484 2484 # self.data=data
2485 2485 # a2=numpy.array([self.data],dtype=numpy.float32)
2486 2486 # self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2487 2487 # return self.col2
2488 2488
2489 2489 def writeHeader(self,):
2490 2490 pass
2491 2491
2492 2492 def writeData(self,name,format,data):
2493 2493 self.name=name
2494 2494 self.format=format
2495 2495 self.data=data
2496 2496 a2=numpy.array([self.data],dtype=numpy.float32)
2497 2497 self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2498 2498 return self.col2
2499 2499
2500 2500 def cFImage(self,n):
2501 2501 self.hdu= pyfits.PrimaryHDU(n)
2502 2502 return self.hdu
2503 2503
2504 2504 def Ctable(self,col1,col2,col3,col4,col5,col6,col7,col8,col9):
2505 2505 self.cols=pyfits.ColDefs( [col1,col2,col3,col4,col5,col6,col7,col8,col9])
2506 2506 self.tbhdu = pyfits.new_table(self.cols)
2507 2507 return self.tbhdu
2508 2508
2509 2509 def CFile(self,hdu,tbhdu):
2510 2510 self.thdulist=pyfits.HDUList([hdu,tbhdu])
2511 2511
2512 2512 def wFile(self,filename):
2513 2513 self.thdulist.writeto(filename) No newline at end of file
@@ -1,505 +1,511
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JROHeaderIO.py 151 2012-10-31 19:00:51Z murco $
5 5 '''
6
6 import sys
7 7 import numpy
8 8 import copy
9 9
10 10 class Header:
11 11
12 12 def __init__(self):
13 13 raise
14 14
15 15 def copy(self):
16 16 return copy.deepcopy(self)
17 17
18 18 def read():
19 19 pass
20 20
21 21 def write():
22 22 pass
23 23
24 24 class BasicHeader(Header):
25 25
26 26 size = None
27 27 version = None
28 28 dataBlock = None
29 29 utc = None
30 30 miliSecond = None
31 31 timeZone = None
32 32 dstFlag = None
33 33 errorCount = None
34 34 struct = None
35 35
36 36 def __init__(self):
37 37
38 38 self.size = 0
39 39 self.version = 0
40 40 self.dataBlock = 0
41 41 self.utc = 0
42 42 self.miliSecond = 0
43 43 self.timeZone = 0
44 44 self.dstFlag = 0
45 45 self.errorCount = 0
46 46 self.struct = numpy.dtype([
47 47 ('nSize','<u4'),
48 48 ('nVersion','<u2'),
49 49 ('nDataBlockId','<u4'),
50 50 ('nUtime','<u4'),
51 51 ('nMilsec','<u2'),
52 52 ('nTimezone','<i2'),
53 53 ('nDstflag','<i2'),
54 54 ('nErrorCount','<u4')
55 55 ])
56 56
57 57
58 58 def read(self, fp):
59 59 try:
60 60 header = numpy.fromfile(fp, self.struct,1)
61 61 self.size = int(header['nSize'][0])
62 62 self.version = int(header['nVersion'][0])
63 63 self.dataBlock = int(header['nDataBlockId'][0])
64 64 self.utc = int(header['nUtime'][0])
65 65 self.miliSecond = int(header['nMilsec'][0])
66 66 self.timeZone = int(header['nTimezone'][0])
67 67 self.dstFlag = int(header['nDstflag'][0])
68 68 self.errorCount = int(header['nErrorCount'][0])
69 except:
69
70 except Exception, e:
71 print "BasicHeader: " + e
70 72 return 0
71 73
72 74 return 1
73 75
74 76 def write(self, fp):
75 77 headerTuple = (self.size,self.version,self.dataBlock,self.utc,self.miliSecond,self.timeZone,self.dstFlag,self.errorCount)
76 78 header = numpy.array(headerTuple,self.struct)
77 79 header.tofile(fp)
78 80
79 81 return 1
80 82
81 83 class SystemHeader(Header):
82 84
83 85 size = None
84 86 nSamples = None
85 87 nProfiles = None
86 88 nChannels = None
87 89 adcResolution = None
88 90 pciDioBusWidth = None
89 91 struct = None
90 92
91 93 def __init__(self):
92 94 self.size = 0
93 95 self.nSamples = 0
94 96 self.nProfiles = 0
95 97 self.nChannels = 0
96 98 self.adcResolution = 0
97 99 self.pciDioBusWidth = 0
98 100 self.struct = numpy.dtype([
99 101 ('nSize','<u4'),
100 102 ('nNumSamples','<u4'),
101 103 ('nNumProfiles','<u4'),
102 104 ('nNumChannels','<u4'),
103 105 ('nADCResolution','<u4'),
104 106 ('nPCDIOBusWidth','<u4'),
105 107 ])
106 108
107 109
108 110 def read(self, fp):
109 111 try:
110 112 header = numpy.fromfile(fp,self.struct,1)
111 113 self.size = header['nSize'][0]
112 114 self.nSamples = header['nNumSamples'][0]
113 115 self.nProfiles = header['nNumProfiles'][0]
114 116 self.nChannels = header['nNumChannels'][0]
115 117 self.adcResolution = header['nADCResolution'][0]
116 118 self.pciDioBusWidth = header['nPCDIOBusWidth'][0]
117 except:
119
120 except Exception, e:
121 print "SystemHeader: " + e
118 122 return 0
119 123
120 124 return 1
121 125
122 126 def write(self, fp):
123 127 headerTuple = (self.size,self.nSamples,self.nProfiles,self.nChannels,self.adcResolution,self.pciDioBusWidth)
124 128 header = numpy.array(headerTuple,self.struct)
125 129 header.tofile(fp)
126 130
127 131 return 1
128 132
129 133 class RadarControllerHeader(Header):
130 134
131 135 size = None
132 136 expType = None
133 137 nTx = None
134 138 ipp = None
135 139 txA = None
136 140 txB = None
137 141 nWindows = None
138 142 numTaus = None
139 143 codeType = None
140 144 line6Function = None
141 145 line5Function = None
142 146 fClock = None
143 147 prePulseBefore = None
144 148 prePulserAfter = None
145 149 rangeIpp = None
146 150 rangeTxA = None
147 151 rangeTxB = None
148 152 struct = None
149 153
150 154 def __init__(self):
151 155 self.size = 0
152 156 self.expType = 0
153 157 self.nTx = 0
154 158 self.ipp = 0
155 159 self.txA = 0
156 160 self.txB = 0
157 161 self.nWindows = 0
158 162 self.numTaus = 0
159 163 self.codeType = 0
160 164 self.line6Function = 0
161 165 self.line5Function = 0
162 166 self.fClock = 0
163 167 self.prePulseBefore = 0
164 168 self.prePulserAfter = 0
165 169 self.rangeIpp = 0
166 170 self.rangeTxA = 0
167 171 self.rangeTxB = 0
168 172 self.struct = numpy.dtype([
169 173 ('nSize','<u4'),
170 174 ('nExpType','<u4'),
171 175 ('nNTx','<u4'),
172 176 ('fIpp','<f4'),
173 177 ('fTxA','<f4'),
174 178 ('fTxB','<f4'),
175 179 ('nNumWindows','<u4'),
176 180 ('nNumTaus','<u4'),
177 181 ('nCodeType','<u4'),
178 182 ('nLine6Function','<u4'),
179 183 ('nLine5Function','<u4'),
180 184 ('fClock','<f4'),
181 185 ('nPrePulseBefore','<u4'),
182 186 ('nPrePulseAfter','<u4'),
183 187 ('sRangeIPP','<a20'),
184 188 ('sRangeTxA','<a20'),
185 189 ('sRangeTxB','<a20'),
186 190 ])
187 191
188 192 self.samplingWindowStruct = numpy.dtype([('h0','<f4'),('dh','<f4'),('nsa','<u4')])
189 193
190 194 self.samplingWindow = None
191 195 self.nHeights = None
192 196 self.firstHeight = None
193 197 self.deltaHeight = None
194 198 self.samplesWin = None
195 199
196 200 self.nCode = None
197 201 self.nBaud = None
198 202 self.code = None
199 203 self.flip1 = None
200 204 self.flip2 = None
201 205
202 206 self.dynamic = numpy.array([],numpy.dtype('byte'))
203 207
204 208
205 209 def read(self, fp):
206 210 try:
207 211 startFp = fp.tell()
208 212 header = numpy.fromfile(fp,self.struct,1)
209 213 self.size = int(header['nSize'][0])
210 214 self.expType = int(header['nExpType'][0])
211 215 self.nTx = int(header['nNTx'][0])
212 216 self.ipp = float(header['fIpp'][0])
213 217 self.txA = float(header['fTxA'][0])
214 218 self.txB = float(header['fTxB'][0])
215 219 self.nWindows = int(header['nNumWindows'][0])
216 220 self.numTaus = int(header['nNumTaus'][0])
217 221 self.codeType = int(header['nCodeType'][0])
218 222 self.line6Function = int(header['nLine6Function'][0])
219 223 self.line5Function = int(header['nLine5Function'][0])
220 224 self.fClock = float(header['fClock'][0])
221 225 self.prePulseBefore = int(header['nPrePulseBefore'][0])
222 226 self.prePulserAfter = int(header['nPrePulseAfter'][0])
223 227 self.rangeIpp = header['sRangeIPP'][0]
224 228 self.rangeTxA = header['sRangeTxA'][0]
225 229 self.rangeTxB = header['sRangeTxB'][0]
226 230 # jump Dynamic Radar Controller Header
227 231 jumpFp = self.size - 116
228 232 self.dynamic = numpy.fromfile(fp,numpy.dtype('byte'),jumpFp)
229 233 #pointer backward to dynamic header and read
230 234 backFp = fp.tell() - jumpFp
231 235 fp.seek(backFp)
232 236
233 237 self.samplingWindow = numpy.fromfile(fp,self.samplingWindowStruct,self.nWindows)
234 238 self.nHeights = int(numpy.sum(self.samplingWindow['nsa']))
235 239 self.firstHeight = self.samplingWindow['h0']
236 240 self.deltaHeight = self.samplingWindow['dh']
237 241 self.samplesWin = self.samplingWindow['nsa']
238 242
239 243 self.Taus = numpy.fromfile(fp,'<f4',self.numTaus)
240 244
241 245 if self.codeType != 0:
242 246 self.nCode = int(numpy.fromfile(fp,'<u4',1))
243 247 self.nBaud = int(numpy.fromfile(fp,'<u4',1))
244 248 self.code = numpy.empty([self.nCode,self.nBaud],dtype='u1')
245 249 tempList = []
246 250 for ic in range(self.nCode):
247 temp = numpy.fromfile(fp,'u1',4*numpy.ceil(self.nBaud/32.))
251 temp = numpy.fromfile(fp,'u1',4*int(numpy.ceil(self.nBaud/32.)))
248 252 tempList.append(temp)
249 253 self.code[ic] = numpy.unpackbits(temp[::-1])[-1*self.nBaud:]
250 254 self.code = 2.0*self.code - 1.0
251 255
252 256 if self.line5Function == RCfunction.FLIP:
253 257 self.flip1 = numpy.fromfile(fp,'<u4',1)
254 258
255 259 if self.line6Function == RCfunction.FLIP:
256 260 self.flip2 = numpy.fromfile(fp,'<u4',1)
257 261
258 262 endFp = self.size + startFp
259 263 jumpFp = endFp - fp.tell()
260 264 if jumpFp > 0:
261 265 fp.seek(jumpFp)
262
263 except:
266
267 except Exception, e:
268 print "RadarControllerHeader: " + e
264 269 return 0
265 270
266 271 return 1
267 272
268 273 def write(self, fp):
269 274 headerTuple = (self.size,
270 275 self.expType,
271 276 self.nTx,
272 277 self.ipp,
273 278 self.txA,
274 279 self.txB,
275 280 self.nWindows,
276 281 self.numTaus,
277 282 self.codeType,
278 283 self.line6Function,
279 284 self.line5Function,
280 285 self.fClock,
281 286 self.prePulseBefore,
282 287 self.prePulserAfter,
283 288 self.rangeIpp,
284 289 self.rangeTxA,
285 290 self.rangeTxB)
286 291
287 292 header = numpy.array(headerTuple,self.struct)
288 293 header.tofile(fp)
289 294
290 295 dynamic = self.dynamic
291 296 dynamic.tofile(fp)
292 297
293 298 return 1
294 299
295 300
296 301
297 302 class ProcessingHeader(Header):
298 303
299 304 size = None
300 305 dtype = None
301 306 blockSize = None
302 307 profilesPerBlock = None
303 308 dataBlocksPerFile = None
304 309 nWindows = None
305 310 processFlags = None
306 311 nCohInt = None
307 312 nIncohInt = None
308 313 totalSpectra = None
309 314 struct = None
310 315 flag_dc = None
311 316 flag_cspc = None
312 317
313 318 def __init__(self):
314 319 self.size = 0
315 320 self.dtype = 0
316 321 self.blockSize = 0
317 322 self.profilesPerBlock = 0
318 323 self.dataBlocksPerFile = 0
319 324 self.nWindows = 0
320 325 self.processFlags = 0
321 326 self.nCohInt = 0
322 327 self.nIncohInt = 0
323 328 self.totalSpectra = 0
324 329 self.struct = numpy.dtype([
325 330 ('nSize','<u4'),
326 331 ('nDataType','<u4'),
327 332 ('nSizeOfDataBlock','<u4'),
328 333 ('nProfilesperBlock','<u4'),
329 334 ('nDataBlocksperFile','<u4'),
330 335 ('nNumWindows','<u4'),
331 336 ('nProcessFlags','<u4'),
332 337 ('nCoherentIntegrations','<u4'),
333 338 ('nIncoherentIntegrations','<u4'),
334 339 ('nTotalSpectra','<u4')
335 340 ])
336 341 self.samplingWindow = 0
337 342 self.structSamplingWindow = numpy.dtype([('h0','<f4'),('dh','<f4'),('nsa','<u4')])
338 343 self.nHeights = 0
339 344 self.firstHeight = 0
340 345 self.deltaHeight = 0
341 346 self.samplesWin = 0
342 347 self.spectraComb = 0
343 348 self.nCode = None
344 349 self.code = None
345 350 self.nBaud = None
346 351 self.shif_fft = False
347 352 self.flag_dc = False
348 353 self.flag_cspc = False
349 354
350 355 def read(self, fp):
351 356 try:
352 357 header = numpy.fromfile(fp,self.struct,1)
353 358 self.size = int(header['nSize'][0])
354 359 self.dtype = int(header['nDataType'][0])
355 360 self.blockSize = int(header['nSizeOfDataBlock'][0])
356 361 self.profilesPerBlock = int(header['nProfilesperBlock'][0])
357 362 self.dataBlocksPerFile = int(header['nDataBlocksperFile'][0])
358 363 self.nWindows = int(header['nNumWindows'][0])
359 364 self.processFlags = int(header['nProcessFlags'])
360 365 self.nCohInt = int(header['nCoherentIntegrations'][0])
361 366 self.nIncohInt = int(header['nIncoherentIntegrations'][0])
362 367 self.totalSpectra = int(header['nTotalSpectra'][0])
363 368 self.samplingWindow = numpy.fromfile(fp,self.structSamplingWindow,self.nWindows)
364 369 self.nHeights = int(numpy.sum(self.samplingWindow['nsa']))
365 370 self.firstHeight = int(self.samplingWindow['h0'][0])
366 371 self.deltaHeight = int(self.samplingWindow['dh'][0])
367 372 self.samplesWin = self.samplingWindow['nsa']
368 373 self.spectraComb = numpy.fromfile(fp,'u1',2*self.totalSpectra)
369 374
370 375 if ((self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE) == PROCFLAG.DEFINE_PROCESS_CODE):
371 376 self.nCode = int(numpy.fromfile(fp,'<u4',1))
372 377 self.nBaud = int(numpy.fromfile(fp,'<u4',1))
373 378 self.code = numpy.fromfile(fp,'<f4',self.nCode*self.nBaud).reshape(self.nBaud,self.nCode)
374 379
375 380 if ((self.processFlags & PROCFLAG.SHIFT_FFT_DATA) == PROCFLAG.SHIFT_FFT_DATA):
376 381 self.shif_fft = True
377 382 else:
378 383 self.shif_fft = False
379 384
380 385 if ((self.processFlags & PROCFLAG.SAVE_CHANNELS_DC) == PROCFLAG.SAVE_CHANNELS_DC):
381 386 self.flag_dc = True
382 387
383 388 nChannels = 0
384 389 nPairs = 0
385 390 pairList = []
386 391
387 392 for i in range( 0, self.totalSpectra*2, 2 ):
388 393 if self.spectraComb[i] == self.spectraComb[i+1]:
389 394 nChannels = nChannels + 1 #par de canales iguales
390 395 else:
391 396 nPairs = nPairs + 1 #par de canales diferentes
392 397 pairList.append( (self.spectraComb[i], self.spectraComb[i+1]) )
393 398
394 399 self.flag_cspc = False
395 400 if nPairs > 0:
396 401 self.flag_cspc = True
397 402
398 except:
403 except Exception, e:
404 print "ProcessingHeader: " + e
399 405 return 0
400 406
401 407 return 1
402 408
403 409 def write(self, fp):
404 410 headerTuple = (self.size,
405 411 self.dtype,
406 412 self.blockSize,
407 413 self.profilesPerBlock,
408 414 self.dataBlocksPerFile,
409 415 self.nWindows,
410 416 self.processFlags,
411 417 self.nCohInt,
412 418 self.nIncohInt,
413 419 self.totalSpectra)
414 420
415 421 header = numpy.array(headerTuple,self.struct)
416 422 header.tofile(fp)
417 423
418 424 if self.nWindows != 0:
419 425 sampleWindowTuple = (self.firstHeight,self.deltaHeight,self.samplesWin)
420 426 samplingWindow = numpy.array(sampleWindowTuple,self.structSamplingWindow)
421 427 samplingWindow.tofile(fp)
422 428
423 429
424 430 if self.totalSpectra != 0:
425 431 spectraComb = numpy.array([],numpy.dtype('u1'))
426 432 spectraComb = self.spectraComb
427 433 spectraComb.tofile(fp)
428 434
429 435
430 436 if self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE == PROCFLAG.DEFINE_PROCESS_CODE:
431 437 nCode = self.nCode #Probar con un dato que almacene codigo, hasta el momento no se hizo la prueba
432 438 nCode.tofile(fp)
433 439
434 440 nBaud = self.nBaud
435 441 nBaud.tofile(fp)
436 442
437 443 code = self.code.reshape(nCode*nBaud)
438 444 code.tofile(fp)
439 445
440 446 return 1
441 447
442 448 class RCfunction:
443 449 NONE=0
444 450 FLIP=1
445 451 CODE=2
446 452 SAMPLING=3
447 453 LIN6DIV256=4
448 454 SYNCHRO=5
449 455
450 456 class nCodeType:
451 457 NONE=0
452 458 USERDEFINE=1
453 459 BARKER2=2
454 460 BARKER3=3
455 461 BARKER4=4
456 462 BARKER5=5
457 463 BARKER7=6
458 464 BARKER11=7
459 465 BARKER13=8
460 466 AC128=9
461 467 COMPLEMENTARYCODE2=10
462 468 COMPLEMENTARYCODE4=11
463 469 COMPLEMENTARYCODE8=12
464 470 COMPLEMENTARYCODE16=13
465 471 COMPLEMENTARYCODE32=14
466 472 COMPLEMENTARYCODE64=15
467 473 COMPLEMENTARYCODE128=16
468 474 CODE_BINARY28=17
469 475
470 476 class PROCFLAG:
471 477 COHERENT_INTEGRATION = numpy.uint32(0x00000001)
472 478 DECODE_DATA = numpy.uint32(0x00000002)
473 479 SPECTRA_CALC = numpy.uint32(0x00000004)
474 480 INCOHERENT_INTEGRATION = numpy.uint32(0x00000008)
475 481 POST_COHERENT_INTEGRATION = numpy.uint32(0x00000010)
476 482 SHIFT_FFT_DATA = numpy.uint32(0x00000020)
477 483
478 484 DATATYPE_CHAR = numpy.uint32(0x00000040)
479 485 DATATYPE_SHORT = numpy.uint32(0x00000080)
480 486 DATATYPE_LONG = numpy.uint32(0x00000100)
481 487 DATATYPE_INT64 = numpy.uint32(0x00000200)
482 488 DATATYPE_FLOAT = numpy.uint32(0x00000400)
483 489 DATATYPE_DOUBLE = numpy.uint32(0x00000800)
484 490
485 491 DATAARRANGE_CONTIGUOUS_CH = numpy.uint32(0x00001000)
486 492 DATAARRANGE_CONTIGUOUS_H = numpy.uint32(0x00002000)
487 493 DATAARRANGE_CONTIGUOUS_P = numpy.uint32(0x00004000)
488 494
489 495 SAVE_CHANNELS_DC = numpy.uint32(0x00008000)
490 496 DEFLIP_DATA = numpy.uint32(0x00010000)
491 497 DEFINE_PROCESS_CODE = numpy.uint32(0x00020000)
492 498
493 499 ACQ_SYS_NATALIA = numpy.uint32(0x00040000)
494 500 ACQ_SYS_ECHOTEK = numpy.uint32(0x00080000)
495 501 ACQ_SYS_ADRXD = numpy.uint32(0x000C0000)
496 502 ACQ_SYS_JULIA = numpy.uint32(0x00100000)
497 503 ACQ_SYS_XXXXXX = numpy.uint32(0x00140000)
498 504
499 505 EXP_NAME_ESP = numpy.uint32(0x00200000)
500 506 CHANNEL_NAMES_ESP = numpy.uint32(0x00400000)
501 507
502 508 OPERATION_MASK = numpy.uint32(0x0000003F)
503 509 DATATYPE_MASK = numpy.uint32(0x00000FC0)
504 510 DATAARRANGE_MASK = numpy.uint32(0x00007000)
505 511 ACQ_SYS_MASK = numpy.uint32(0x001C0000) No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now