##// END OF EJS Templates
Minor changes
Miguel Valdez -
r754:c1673fc8e139
parent child
Show More
@@ -1,1646 +1,1646
1 1 '''
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6 import os
7 7 import sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import time, datetime
13 13 #import h5py
14 14 import traceback
15 15
16 16 try:
17 17 from gevent import sleep
18 18 except:
19 19 from time import sleep
20 20
21 21 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
22 22 from schainpy.model.data.jroheaderIO import get_dtype_index, get_numpy_dtype, get_procflag_dtype, get_dtype_width
23 23
24 24 LOCALTIME = True
25 25
26 26 def isNumber(cad):
27 27 """
28 28 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
29 29
30 30 Excepciones:
31 31 Si un determinado string no puede ser convertido a numero
32 32 Input:
33 33 str, string al cual se le analiza para determinar si convertible a un numero o no
34 34
35 35 Return:
36 36 True : si el string es uno numerico
37 37 False : no es un string numerico
38 38 """
39 39 try:
40 40 float( cad )
41 41 return True
42 42 except:
43 43 return False
44 44
45 45 def isFileInEpoch(filename, startUTSeconds, endUTSeconds):
46 46 """
47 47 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
48 48
49 49 Inputs:
50 50 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
51 51
52 52 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
53 53 segundos contados desde 01/01/1970.
54 54 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
55 55 segundos contados desde 01/01/1970.
56 56
57 57 Return:
58 58 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
59 59 fecha especificado, de lo contrario retorna False.
60 60
61 61 Excepciones:
62 62 Si el archivo no existe o no puede ser abierto
63 63 Si la cabecera no puede ser leida.
64 64
65 65 """
66 66 basicHeaderObj = BasicHeader(LOCALTIME)
67 67
68 68 try:
69 69 fp = open(filename,'rb')
70 70 except IOError:
71 71 print "The file %s can't be opened" %(filename)
72 72 return 0
73 73
74 74 sts = basicHeaderObj.read(fp)
75 75 fp.close()
76 76
77 77 if not(sts):
78 78 print "Skipping the file %s because it has not a valid header" %(filename)
79 79 return 0
80 80
81 81 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
82 82 return 0
83 83
84 84 return 1
85 85
86 86 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
87 87 """
88 88 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
89 89
90 90 Inputs:
91 91 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
92 92
93 93 startDate : fecha inicial del rango seleccionado en formato datetime.date
94 94
95 95 endDate : fecha final del rango seleccionado en formato datetime.date
96 96
97 97 startTime : tiempo inicial del rango seleccionado en formato datetime.time
98 98
99 99 endTime : tiempo final del rango seleccionado en formato datetime.time
100 100
101 101 Return:
102 102 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
103 103 fecha especificado, de lo contrario retorna False.
104 104
105 105 Excepciones:
106 106 Si el archivo no existe o no puede ser abierto
107 107 Si la cabecera no puede ser leida.
108 108
109 109 """
110 110
111 111
112 112 try:
113 113 fp = open(filename,'rb')
114 114 except IOError:
115 115 print "The file %s can't be opened" %(filename)
116 116 return None
117 117
118 118 basicHeaderObj = BasicHeader(LOCALTIME)
119 119 sts = basicHeaderObj.read(fp)
120 120 fp.close()
121 121
122 122 thisDatetime = basicHeaderObj.datatime
123 123 thisDate = thisDatetime.date()
124 124 thisTime = thisDatetime.time()
125 125
126 126 if not(sts):
127 127 print "Skipping the file %s because it has not a valid header" %(filename)
128 128 return None
129 129
130 130 #General case
131 131 # o>>>>>>>>>>>>>><<<<<<<<<<<<<<o
132 132 #-----------o----------------------------o-----------
133 133 # startTime endTime
134 134
135 135 if endTime >= startTime:
136 136 if (thisTime < startTime) or (thisTime > endTime):
137 137 return None
138 138
139 139 return thisDatetime
140 140
141 141 #If endTime < startTime then endTime belongs to the next day
142 142
143 143
144 144 #<<<<<<<<<<<o o>>>>>>>>>>>
145 145 #-----------o----------------------------o-----------
146 146 # endTime startTime
147 147
148 148 if (thisDate == startDate) and (thisTime < startTime):
149 149 return None
150 150
151 151 if (thisDate == endDate) and (thisTime > endTime):
152 152 return None
153 153
154 154 if (thisTime < startTime) and (thisTime > endTime):
155 155 return None
156 156
157 157 return thisDatetime
158 158
159 159 def isFolderInDateRange(folder, startDate=None, endDate=None):
160 160 """
161 161 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
162 162
163 163 Inputs:
164 164 folder : nombre completo del directorio.
165 165 Su formato deberia ser "/path_root/?YYYYDDD"
166 166
167 167 siendo:
168 168 YYYY : Anio (ejemplo 2015)
169 169 DDD : Dia del anio (ejemplo 305)
170 170
171 171 startDate : fecha inicial del rango seleccionado en formato datetime.date
172 172
173 173 endDate : fecha final del rango seleccionado en formato datetime.date
174 174
175 175 Return:
176 176 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
177 177 fecha especificado, de lo contrario retorna False.
178 178 Excepciones:
179 179 Si el directorio no tiene el formato adecuado
180 180 """
181 181
182 182 basename = os.path.basename(folder)
183 183
184 184 if not isRadarFolder(basename):
185 185 print "The folder %s has not the rigth format" %folder
186 186 return 0
187 187
188 188 if startDate and endDate:
189 189 thisDate = getDateFromRadarFolder(basename)
190 190
191 191 if thisDate < startDate:
192 192 return 0
193 193
194 194 if thisDate > endDate:
195 195 return 0
196 196
197 197 return 1
198 198
199 199 def isFileInDateRange(filename, startDate=None, endDate=None):
200 200 """
201 201 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
202 202
203 203 Inputs:
204 204 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
205 205
206 206 Su formato deberia ser "?YYYYDDDsss"
207 207
208 208 siendo:
209 209 YYYY : Anio (ejemplo 2015)
210 210 DDD : Dia del anio (ejemplo 305)
211 211 sss : set
212 212
213 213 startDate : fecha inicial del rango seleccionado en formato datetime.date
214 214
215 215 endDate : fecha final del rango seleccionado en formato datetime.date
216 216
217 217 Return:
218 218 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
219 219 fecha especificado, de lo contrario retorna False.
220 220 Excepciones:
221 221 Si el archivo no tiene el formato adecuado
222 222 """
223 223
224 224 basename = os.path.basename(filename)
225 225
226 226 if not isRadarFile(basename):
227 227 print "The filename %s has not the rigth format" %filename
228 228 return 0
229 229
230 230 if startDate and endDate:
231 231 thisDate = getDateFromRadarFile(basename)
232 232
233 233 if thisDate < startDate:
234 234 return 0
235 235
236 236 if thisDate > endDate:
237 237 return 0
238 238
239 239 return 1
240 240
241 241 def getFileFromSet(path, ext, set):
242 242 validFilelist = []
243 243 fileList = os.listdir(path)
244 244
245 245 # 0 1234 567 89A BCDE
246 246 # H YYYY DDD SSS .ext
247 247
248 248 for thisFile in fileList:
249 249 try:
250 250 year = int(thisFile[1:5])
251 251 doy = int(thisFile[5:8])
252 252 except:
253 253 continue
254 254
255 255 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
256 256 continue
257 257
258 258 validFilelist.append(thisFile)
259 259
260 260 myfile = fnmatch.filter(validFilelist,'*%4.4d%3.3d%3.3d*'%(year,doy,set))
261 261
262 262 if len(myfile)!= 0:
263 263 return myfile[0]
264 264 else:
265 265 filename = '*%4.4d%3.3d%3.3d%s'%(year,doy,set,ext.lower())
266 266 print 'the filename %s does not exist'%filename
267 267 print '...going to the last file: '
268 268
269 269 if validFilelist:
270 270 validFilelist = sorted( validFilelist, key=str.lower )
271 271 return validFilelist[-1]
272 272
273 273 return None
274 274
275 275 def getlastFileFromPath(path, ext):
276 276 """
277 277 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
278 278 al final de la depuracion devuelve el ultimo file de la lista que quedo.
279 279
280 280 Input:
281 281 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
282 282 ext : extension de los files contenidos en una carpeta
283 283
284 284 Return:
285 285 El ultimo file de una determinada carpeta, no se considera el path.
286 286 """
287 287 validFilelist = []
288 288 fileList = os.listdir(path)
289 289
290 290 # 0 1234 567 89A BCDE
291 291 # H YYYY DDD SSS .ext
292 292
293 293 for thisFile in fileList:
294 294
295 295 year = thisFile[1:5]
296 296 if not isNumber(year):
297 297 continue
298 298
299 299 doy = thisFile[5:8]
300 300 if not isNumber(doy):
301 301 continue
302 302
303 303 year = int(year)
304 304 doy = int(doy)
305 305
306 306 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
307 307 continue
308 308
309 309 validFilelist.append(thisFile)
310 310
311 311 if validFilelist:
312 312 validFilelist = sorted( validFilelist, key=str.lower )
313 313 return validFilelist[-1]
314 314
315 315 return None
316 316
317 317 def checkForRealPath(path, foldercounter, year, doy, set, ext):
318 318 """
319 319 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
320 320 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
321 321 el path exacto de un determinado file.
322 322
323 323 Example :
324 324 nombre correcto del file es .../.../D2009307/P2009307367.ext
325 325
326 326 Entonces la funcion prueba con las siguientes combinaciones
327 327 .../.../y2009307367.ext
328 328 .../.../Y2009307367.ext
329 329 .../.../x2009307/y2009307367.ext
330 330 .../.../x2009307/Y2009307367.ext
331 331 .../.../X2009307/y2009307367.ext
332 332 .../.../X2009307/Y2009307367.ext
333 333 siendo para este caso, la ultima combinacion de letras, identica al file buscado
334 334
335 335 Return:
336 336 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
337 337 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
338 338 para el filename
339 339 """
340 340 fullfilename = None
341 341 find_flag = False
342 342 filename = None
343 343
344 344 prefixDirList = [None,'d','D']
345 345 if ext.lower() == ".r": #voltage
346 346 prefixFileList = ['d','D']
347 347 elif ext.lower() == ".pdata": #spectra
348 348 prefixFileList = ['p','P']
349 349 else:
350 350 return None, filename
351 351
352 352 #barrido por las combinaciones posibles
353 353 for prefixDir in prefixDirList:
354 354 thispath = path
355 355 if prefixDir != None:
356 356 #formo el nombre del directorio xYYYYDDD (x=d o x=D)
357 357 if foldercounter == 0:
358 358 thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy ))
359 359 else:
360 360 thispath = os.path.join(path, "%s%04d%03d_%02d" % ( prefixDir, year, doy , foldercounter))
361 361 for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D"
362 362 filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
363 363 fullfilename = os.path.join( thispath, filename ) #formo el path completo
364 364
365 365 if os.path.exists( fullfilename ): #verifico que exista
366 366 find_flag = True
367 367 break
368 368 if find_flag:
369 369 break
370 370
371 371 if not(find_flag):
372 372 return None, filename
373 373
374 374 return fullfilename, filename
375 375
376 376 def isRadarFolder(folder):
377 377 try:
378 378 year = int(folder[1:5])
379 379 doy = int(folder[5:8])
380 380 except:
381 381 return 0
382 382
383 383 return 1
384 384
385 385 def isRadarFile(file):
386 386 try:
387 387 year = int(file[1:5])
388 388 doy = int(file[5:8])
389 389 set = int(file[8:11])
390 390 except:
391 391 return 0
392 392
393 393 return 1
394 394
395 395 def getDateFromRadarFile(file):
396 396 try:
397 397 year = int(file[1:5])
398 398 doy = int(file[5:8])
399 399 set = int(file[8:11])
400 400 except:
401 401 return None
402 402
403 403 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy-1)
404 404 return thisDate
405 405
406 406 def getDateFromRadarFolder(folder):
407 407 try:
408 408 year = int(folder[1:5])
409 409 doy = int(folder[5:8])
410 410 except:
411 411 return None
412 412
413 413 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy-1)
414 414 return thisDate
415 415
416 416 class JRODataIO:
417 417
418 418 c = 3E8
419 419
420 420 isConfig = False
421 421
422 422 basicHeaderObj = None
423 423
424 424 systemHeaderObj = None
425 425
426 426 radarControllerHeaderObj = None
427 427
428 428 processingHeaderObj = None
429 429
430 430 dtype = None
431 431
432 432 pathList = []
433 433
434 434 filenameList = []
435 435
436 436 filename = None
437 437
438 438 ext = None
439 439
440 440 flagIsNewFile = 1
441 441
442 442 flagDiscontinuousBlock = 0
443 443
444 444 flagIsNewBlock = 0
445 445
446 446 fp = None
447 447
448 448 firstHeaderSize = 0
449 449
450 450 basicHeaderSize = 24
451 451
452 452 versionFile = 1103
453 453
454 454 fileSize = None
455 455
456 456 # ippSeconds = None
457 457
458 458 fileSizeByHeader = None
459 459
460 460 fileIndex = None
461 461
462 462 profileIndex = None
463 463
464 464 blockIndex = None
465 465
466 466 nTotalBlocks = None
467 467
468 468 maxTimeStep = 30
469 469
470 470 lastUTTime = None
471 471
472 472 datablock = None
473 473
474 474 dataOut = None
475 475
476 476 blocksize = None
477 477
478 478 getByBlock = False
479 479
480 480 def __init__(self):
481 481
482 482 raise NotImplementedError
483 483
484 484 def run(self):
485 485
486 486 raise NotImplementedError
487 487
488 488 def getDtypeWidth(self):
489 489
490 490 dtype_index = get_dtype_index(self.dtype)
491 491 dtype_width = get_dtype_width(dtype_index)
492 492
493 493 return dtype_width
494 494
495 495 class JRODataReader(JRODataIO):
496 496
497 497
498 498 online = 0
499 499
500 500 realtime = 0
501 501
502 502 nReadBlocks = 0
503 503
504 504 delay = 10 #number of seconds waiting a new file
505 505
506 506 nTries = 3 #quantity tries
507 507
508 508 nFiles = 3 #number of files for searching
509 509
510 510 path = None
511 511
512 512 foldercounter = 0
513 513
514 514 flagNoMoreFiles = 0
515 515
516 516 datetimeList = []
517 517
518 518 __isFirstTimeOnline = 1
519 519
520 520 __printInfo = True
521 521
522 522 profileIndex = None
523 523
524 524 nTxs = 1
525 525
526 526 txIndex = None
527 527
528 528 def __init__(self):
529 529
530 530 """
531 531 This class is used to find data files
532 532
533 533 Example:
534 534 reader = JRODataReader()
535 535 fileList = reader.findDataFiles()
536 536
537 537 """
538 538 pass
539 539
540 540
541 541 def createObjByDefault(self):
542 542 """
543 543
544 544 """
545 545 raise NotImplementedError
546 546
547 547 def getBlockDimension(self):
548 548
549 549 raise NotImplementedError
550 550
551 551 def __searchFilesOffLine(self,
552 552 path,
553 553 startDate=None,
554 554 endDate=None,
555 555 startTime=datetime.time(0,0,0),
556 556 endTime=datetime.time(23,59,59),
557 557 set=None,
558 558 expLabel='',
559 559 ext='.r',
560 560 walk=True):
561 561
562 562 self.filenameList = []
563 563 self.datetimeList = []
564 564
565 565 pathList = []
566 566
567 567 dateList, pathList = self.findDatafiles(path, startDate, endDate, expLabel, ext, walk, include_path=True)
568 568
569 569 if dateList == []:
570 570 # print "[Reading] No *%s files in %s from %s to %s)"%(ext, path,
571 571 # datetime.datetime.combine(startDate,startTime).ctime(),
572 572 # datetime.datetime.combine(endDate,endTime).ctime())
573 573
574 574 return None, None
575 575
576 576 if len(dateList) > 1:
577 print "[Reading] %d days were found in date range: %s - %s" %(len(dateList), startDate, endDate)
577 print "[Reading] Data found: total days = %d, date range = %s - %s" %(len(dateList), startDate, endDate)
578 578 else:
579 print "[Reading] data was found for the date %s" %(dateList[0])
579 print "[Reading] Data found: date = %s" %(dateList[0])
580 580
581 581 filenameList = []
582 582 datetimeList = []
583 583
584 584 for thisPath in pathList:
585 585 # thisPath = pathList[pathDict[file]]
586 586
587 587 fileList = glob.glob1(thisPath, "*%s" %ext)
588 588 fileList.sort()
589 589
590 590 for file in fileList:
591 591
592 592 filename = os.path.join(thisPath,file)
593 593
594 594 if not isFileInDateRange(filename, startDate, endDate):
595 595 continue
596 596
597 597 thisDatetime = isFileInTimeRange(filename, startDate, endDate, startTime, endTime)
598 598
599 599 if not(thisDatetime):
600 600 continue
601 601
602 602 filenameList.append(filename)
603 603 datetimeList.append(thisDatetime)
604 604
605 605 if not(filenameList):
606 606 print "[Reading] Any file was found int time range %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
607 607 return None, None
608 608
609 609 print "[Reading] %d file(s) was(were) found in time range: %s - %s" %(len(filenameList), startTime, endTime)
610 610 print
611 611
612 612 for i in range(len(filenameList)):
613 613 print "[Reading] %s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
614 614
615 615 self.filenameList = filenameList
616 616 self.datetimeList = datetimeList
617 617
618 618 return pathList, filenameList
619 619
620 620 def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True, set=None):
621 621
622 622 """
623 623 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
624 624 devuelve el archivo encontrado ademas de otros datos.
625 625
626 626 Input:
627 627 path : carpeta donde estan contenidos los files que contiene data
628 628
629 629 expLabel : Nombre del subexperimento (subfolder)
630 630
631 631 ext : extension de los files
632 632
633 633 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
634 634
635 635 Return:
636 636 directory : eL directorio donde esta el file encontrado
637 637 filename : el ultimo file de una determinada carpeta
638 638 year : el anho
639 639 doy : el numero de dia del anho
640 640 set : el set del archivo
641 641
642 642
643 643 """
644 644 if not os.path.isdir(path):
645 645 return None, None, None, None, None, None
646 646
647 647 dirList = []
648 648
649 649 if not walk:
650 650 fullpath = path
651 651 foldercounter = 0
652 652 else:
653 653 #Filtra solo los directorios
654 654 for thisPath in os.listdir(path):
655 655 if not os.path.isdir(os.path.join(path,thisPath)):
656 656 continue
657 657 if not isRadarFolder(thisPath):
658 658 continue
659 659
660 660 dirList.append(thisPath)
661 661
662 662 if not(dirList):
663 663 return None, None, None, None, None, None
664 664
665 665 dirList = sorted( dirList, key=str.lower )
666 666
667 667 doypath = dirList[-1]
668 668 foldercounter = int(doypath.split('_')[1]) if len(doypath.split('_'))>1 else 0
669 669 fullpath = os.path.join(path, doypath, expLabel)
670 670
671 671
672 672 print "[Reading] %s folder was found: " %(fullpath )
673 673
674 674 if set == None:
675 675 filename = getlastFileFromPath(fullpath, ext)
676 676 else:
677 677 filename = getFileFromSet(fullpath, ext, set)
678 678
679 679 if not(filename):
680 680 return None, None, None, None, None, None
681 681
682 682 print "[Reading] %s file was found" %(filename)
683 683
684 684 if not(self.__verifyFile(os.path.join(fullpath, filename))):
685 685 return None, None, None, None, None, None
686 686
687 687 year = int( filename[1:5] )
688 688 doy = int( filename[5:8] )
689 689 set = int( filename[8:11] )
690 690
691 691 return fullpath, foldercounter, filename, year, doy, set
692 692
693 693 def __setNextFileOffline(self):
694 694
695 695 idFile = self.fileIndex
696 696
697 697 while (True):
698 698 idFile += 1
699 699 if not(idFile < len(self.filenameList)):
700 700 self.flagNoMoreFiles = 1
701 701 # print "[Reading] No more Files"
702 702 return 0
703 703
704 704 filename = self.filenameList[idFile]
705 705
706 706 if not(self.__verifyFile(filename)):
707 707 continue
708 708
709 709 fileSize = os.path.getsize(filename)
710 710 fp = open(filename,'rb')
711 711 break
712 712
713 713 self.flagIsNewFile = 1
714 714 self.fileIndex = idFile
715 715 self.filename = filename
716 716 self.fileSize = fileSize
717 717 self.fp = fp
718 718
719 719 # print "[Reading] Setting the file: %s"%self.filename
720 720
721 721 return 1
722 722
723 723 def __setNextFileOnline(self):
724 724 """
725 725 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
726 726 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
727 727 siguientes.
728 728
729 729 Affected:
730 730 self.flagIsNewFile
731 731 self.filename
732 732 self.fileSize
733 733 self.fp
734 734 self.set
735 735 self.flagNoMoreFiles
736 736
737 737 Return:
738 738 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
739 739 1 : si el file fue abierto con exito y esta listo a ser leido
740 740
741 741 Excepciones:
742 742 Si un determinado file no puede ser abierto
743 743 """
744 744 nFiles = 0
745 745 fileOk_flag = False
746 746 firstTime_flag = True
747 747
748 748 self.set += 1
749 749
750 750 if self.set > 999:
751 751 self.set = 0
752 752 self.foldercounter += 1
753 753
754 754 #busca el 1er file disponible
755 755 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
756 756 if fullfilename:
757 757 if self.__verifyFile(fullfilename, False):
758 758 fileOk_flag = True
759 759
760 760 #si no encuentra un file entonces espera y vuelve a buscar
761 761 if not(fileOk_flag):
762 762 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
763 763
764 764 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
765 765 tries = self.nTries
766 766 else:
767 767 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
768 768
769 769 for nTries in range( tries ):
770 770 if firstTime_flag:
771 771 print "\t[Reading] Waiting %0.2f sec for the next file: \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
772 772 sleep( self.delay )
773 773 else:
774 774 print "\t[Reading] Searching the next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
775 775
776 776 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
777 777 if fullfilename:
778 778 if self.__verifyFile(fullfilename):
779 779 fileOk_flag = True
780 780 break
781 781
782 782 if fileOk_flag:
783 783 break
784 784
785 785 firstTime_flag = False
786 786
787 787 print "\t[Reading] Skipping the file \"%s\" due to this file doesn't exist" % filename
788 788 self.set += 1
789 789
790 790 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
791 791 self.set = 0
792 792 self.doy += 1
793 793 self.foldercounter = 0
794 794
795 795 if fileOk_flag:
796 796 self.fileSize = os.path.getsize( fullfilename )
797 797 self.filename = fullfilename
798 798 self.flagIsNewFile = 1
799 799 if self.fp != None: self.fp.close()
800 800 self.fp = open(fullfilename, 'rb')
801 801 self.flagNoMoreFiles = 0
802 802 # print '[Reading] Setting the file: %s' % fullfilename
803 803 else:
804 804 self.fileSize = 0
805 805 self.filename = None
806 806 self.flagIsNewFile = 0
807 807 self.fp = None
808 808 self.flagNoMoreFiles = 1
809 809 # print '[Reading] No more files to read'
810 810
811 811 return fileOk_flag
812 812
813 813 def setNextFile(self):
814 814 if self.fp != None:
815 815 self.fp.close()
816 816
817 817 if self.online:
818 818 newFile = self.__setNextFileOnline()
819 819 else:
820 820 newFile = self.__setNextFileOffline()
821 821
822 822 if not(newFile):
823 823 print '[Reading] No more files to read'
824 824 return 0
825 825
826 826 print '[Reading] Setting the file: %s' % self.filename
827 827
828 828 self.__readFirstHeader()
829 829 self.nReadBlocks = 0
830 830 return 1
831 831
832 832 def __waitNewBlock(self):
833 833 """
834 834 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
835 835
836 836 Si el modo de lectura es OffLine siempre retorn 0
837 837 """
838 838 if not self.online:
839 839 return 0
840 840
841 841 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
842 842 return 0
843 843
844 844 currentPointer = self.fp.tell()
845 845
846 846 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
847 847
848 848 for nTries in range( self.nTries ):
849 849
850 850 self.fp.close()
851 851 self.fp = open( self.filename, 'rb' )
852 852 self.fp.seek( currentPointer )
853 853
854 854 self.fileSize = os.path.getsize( self.filename )
855 855 currentSize = self.fileSize - currentPointer
856 856
857 857 if ( currentSize >= neededSize ):
858 858 self.basicHeaderObj.read(self.fp)
859 859 return 1
860 860
861 861 if self.fileSize == self.fileSizeByHeader:
862 862 # self.flagEoF = True
863 863 return 0
864 864
865 865 print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
866 866 sleep( self.delay )
867 867
868 868
869 869 return 0
870 870
871 871 def waitDataBlock(self,pointer_location):
872 872
873 873 currentPointer = pointer_location
874 874
875 875 neededSize = self.processingHeaderObj.blockSize #+ self.basicHeaderSize
876 876
877 877 for nTries in range( self.nTries ):
878 878 self.fp.close()
879 879 self.fp = open( self.filename, 'rb' )
880 880 self.fp.seek( currentPointer )
881 881
882 882 self.fileSize = os.path.getsize( self.filename )
883 883 currentSize = self.fileSize - currentPointer
884 884
885 885 if ( currentSize >= neededSize ):
886 886 return 1
887 887
888 888 print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
889 889 sleep( self.delay )
890 890
891 891 return 0
892 892
893 893 def __jumpToLastBlock(self):
894 894
895 895 if not(self.__isFirstTimeOnline):
896 896 return
897 897
898 898 csize = self.fileSize - self.fp.tell()
899 899 blocksize = self.processingHeaderObj.blockSize
900 900
901 901 #salta el primer bloque de datos
902 902 if csize > self.processingHeaderObj.blockSize:
903 903 self.fp.seek(self.fp.tell() + blocksize)
904 904 else:
905 905 return
906 906
907 907 csize = self.fileSize - self.fp.tell()
908 908 neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
909 909 while True:
910 910
911 911 if self.fp.tell()<self.fileSize:
912 912 self.fp.seek(self.fp.tell() + neededsize)
913 913 else:
914 914 self.fp.seek(self.fp.tell() - neededsize)
915 915 break
916 916
917 917 # csize = self.fileSize - self.fp.tell()
918 918 # neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
919 919 # factor = int(csize/neededsize)
920 920 # if factor > 0:
921 921 # self.fp.seek(self.fp.tell() + factor*neededsize)
922 922
923 923 self.flagIsNewFile = 0
924 924 self.__isFirstTimeOnline = 0
925 925
926 926 def __setNewBlock(self):
927 927
928 928 if self.fp == None:
929 929 return 0
930 930
931 931 # if self.online:
932 932 # self.__jumpToLastBlock()
933 933
934 934 if self.flagIsNewFile:
935 935 self.lastUTTime = self.basicHeaderObj.utc
936 936 return 1
937 937
938 938 if self.realtime:
939 939 self.flagDiscontinuousBlock = 1
940 940 if not(self.setNextFile()):
941 941 return 0
942 942 else:
943 943 return 1
944 944
945 945 currentSize = self.fileSize - self.fp.tell()
946 946 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
947 947
948 948 if (currentSize >= neededSize):
949 949 self.basicHeaderObj.read(self.fp)
950 950 self.lastUTTime = self.basicHeaderObj.utc
951 951 return 1
952 952
953 953 if self.__waitNewBlock():
954 954 self.lastUTTime = self.basicHeaderObj.utc
955 955 return 1
956 956
957 957 if not(self.setNextFile()):
958 958 return 0
959 959
960 960 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
961 961 self.lastUTTime = self.basicHeaderObj.utc
962 962
963 963 self.flagDiscontinuousBlock = 0
964 964
965 965 if deltaTime > self.maxTimeStep:
966 966 self.flagDiscontinuousBlock = 1
967 967
968 968 return 1
969 969
970 970 def readNextBlock(self):
971 971
972 972 if not(self.__setNewBlock()):
973 973 return 0
974 974
975 975 if not(self.readBlock()):
976 976 return 0
977 977
978 978 self.getBasicHeader()
979 979
980 980 print "[Reading] Block No. %d/%d -> %s" %(self.nReadBlocks,
981 981 self.processingHeaderObj.dataBlocksPerFile,
982 982 self.dataOut.datatime.ctime())
983 983 return 1
984 984
985 985 def __readFirstHeader(self):
986 986
987 987 self.basicHeaderObj.read(self.fp)
988 988 self.systemHeaderObj.read(self.fp)
989 989 self.radarControllerHeaderObj.read(self.fp)
990 990 self.processingHeaderObj.read(self.fp)
991 991
992 992 self.firstHeaderSize = self.basicHeaderObj.size
993 993
994 994 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
995 995 if datatype == 0:
996 996 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
997 997 elif datatype == 1:
998 998 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
999 999 elif datatype == 2:
1000 1000 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
1001 1001 elif datatype == 3:
1002 1002 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
1003 1003 elif datatype == 4:
1004 1004 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
1005 1005 elif datatype == 5:
1006 1006 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
1007 1007 else:
1008 1008 raise ValueError, 'Data type was not defined'
1009 1009
1010 1010 self.dtype = datatype_str
1011 1011 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
1012 1012 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
1013 1013 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
1014 1014 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
1015 1015 self.getBlockDimension()
1016 1016
1017 1017 def __verifyFile(self, filename, msgFlag=True):
1018 1018
1019 1019 msg = None
1020 1020
1021 1021 try:
1022 1022 fp = open(filename, 'rb')
1023 1023 except IOError:
1024 1024
1025 1025 if msgFlag:
1026 1026 print "[Reading] File %s can't be opened" % (filename)
1027 1027
1028 1028 return False
1029 1029
1030 1030 currentPosition = fp.tell()
1031 1031 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
1032 1032
1033 1033 if neededSize == 0:
1034 1034 basicHeaderObj = BasicHeader(LOCALTIME)
1035 1035 systemHeaderObj = SystemHeader()
1036 1036 radarControllerHeaderObj = RadarControllerHeader()
1037 1037 processingHeaderObj = ProcessingHeader()
1038 1038
1039 1039 if not( basicHeaderObj.read(fp) ):
1040 1040 fp.close()
1041 1041 return False
1042 1042
1043 1043 if not( systemHeaderObj.read(fp) ):
1044 1044 fp.close()
1045 1045 return False
1046 1046
1047 1047 if not( radarControllerHeaderObj.read(fp) ):
1048 1048 fp.close()
1049 1049 return False
1050 1050
1051 1051 if not( processingHeaderObj.read(fp) ):
1052 1052 fp.close()
1053 1053 return False
1054 1054
1055 1055 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
1056 1056 else:
1057 1057 msg = "[Reading] Skipping the file %s due to it hasn't enough data" %filename
1058 1058
1059 1059 fp.close()
1060 1060
1061 1061 fileSize = os.path.getsize(filename)
1062 1062 currentSize = fileSize - currentPosition
1063 1063
1064 1064 if currentSize < neededSize:
1065 1065 if msgFlag and (msg != None):
1066 1066 print msg
1067 1067 return False
1068 1068
1069 1069 return True
1070 1070
1071 1071 def findDatafiles(self, path, startDate=None, endDate=None, expLabel='', ext='.r', walk=True, include_path=False):
1072 1072
1073 1073 dateList = []
1074 1074 pathList = []
1075 1075
1076 1076 multi_path = path.split(',')
1077 1077
1078 1078 if not walk:
1079 1079
1080 1080 for single_path in multi_path:
1081 1081
1082 1082 if not os.path.isdir(single_path):
1083 1083 continue
1084 1084
1085 1085 fileList = glob.glob1(single_path, "*"+ext)
1086 1086
1087 1087 if not fileList:
1088 1088 continue
1089 1089
1090 1090 fileList.sort()
1091 1091
1092 1092 for thisFile in fileList:
1093 1093
1094 1094 if not os.path.isfile(os.path.join(single_path, thisFile)):
1095 1095 continue
1096 1096
1097 1097 if not isRadarFile(thisFile):
1098 1098 continue
1099 1099
1100 1100 if not isFileInDateRange(thisFile, startDate, endDate):
1101 1101 continue
1102 1102
1103 1103 thisDate = getDateFromRadarFile(thisFile)
1104 1104
1105 1105 if thisDate in dateList:
1106 1106 continue
1107 1107
1108 1108 dateList.append(thisDate)
1109 1109 pathList.append(single_path)
1110 1110
1111 1111 else:
1112 1112 for single_path in multi_path:
1113 1113
1114 1114 if not os.path.isdir(single_path):
1115 1115 continue
1116 1116
1117 1117 dirList = []
1118 1118
1119 1119 for thisPath in os.listdir(single_path):
1120 1120
1121 1121 if not os.path.isdir(os.path.join(single_path,thisPath)):
1122 1122 continue
1123 1123
1124 1124 if not isRadarFolder(thisPath):
1125 1125 continue
1126 1126
1127 1127 if not isFolderInDateRange(thisPath, startDate, endDate):
1128 1128 continue
1129 1129
1130 1130 dirList.append(thisPath)
1131 1131
1132 1132 if not dirList:
1133 1133 continue
1134 1134
1135 1135 dirList.sort()
1136 1136
1137 1137 for thisDir in dirList:
1138 1138
1139 1139 datapath = os.path.join(single_path, thisDir, expLabel)
1140 1140 fileList = glob.glob1(datapath, "*"+ext)
1141 1141
1142 1142 if len(fileList) < 1:
1143 1143 continue
1144 1144
1145 1145 thisDate = getDateFromRadarFolder(thisDir)
1146 1146
1147 1147 pathList.append(datapath)
1148 1148 dateList.append(thisDate)
1149 1149
1150 1150 dateList.sort()
1151 1151
1152 1152 if walk:
1153 1153 pattern_path = os.path.join(multi_path[0], "[dYYYYDDD]", expLabel)
1154 1154 else:
1155 1155 pattern_path = multi_path[0]
1156 1156
1157 1157 if not dateList:
1158 1158 print "[Reading] No *%s files in %s from %s to %s" %(ext, pattern_path, startDate, endDate)
1159 1159
1160 1160 if include_path:
1161 1161 return dateList, pathList
1162 1162
1163 1163 return dateList
1164 1164
1165 1165 def setup(self,
1166 1166 path=None,
1167 1167 startDate=None,
1168 1168 endDate=None,
1169 1169 startTime=datetime.time(0,0,0),
1170 1170 endTime=datetime.time(23,59,59),
1171 1171 set=None,
1172 1172 expLabel = "",
1173 1173 ext = None,
1174 1174 online = False,
1175 1175 delay = 60,
1176 1176 walk = True,
1177 1177 getblock = False,
1178 1178 nTxs = 1,
1179 1179 realtime=False):
1180 1180
1181 1181 if path == None:
1182 1182 raise ValueError, "[Reading] The path is not valid"
1183 1183
1184 1184 if ext == None:
1185 1185 ext = self.ext
1186 1186
1187 1187 if online:
1188 1188 print "[Reading] Searching files in online mode..."
1189 1189
1190 1190 for nTries in range( self.nTries ):
1191 1191 fullpath, foldercounter, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk, set=set)
1192 1192
1193 1193 if fullpath:
1194 1194 break
1195 1195
1196 1196 print '[Reading] Waiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
1197 1197 sleep( self.delay )
1198 1198
1199 1199 if not(fullpath):
1200 1200 print "[Reading] There 'isn't any valid file in %s" % path
1201 1201 return
1202 1202
1203 1203 self.year = year
1204 1204 self.doy = doy
1205 1205 self.set = set - 1
1206 1206 self.path = path
1207 1207 self.foldercounter = foldercounter
1208 1208 last_set = None
1209 1209
1210 1210 else:
1211 1211 print "[Reading] Searching files in offline mode ..."
1212 1212 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
1213 1213 startTime=startTime, endTime=endTime,
1214 1214 set=set, expLabel=expLabel, ext=ext,
1215 1215 walk=walk)
1216 1216
1217 1217 if not(pathList):
1218 1218 # print "[Reading] No *%s files in %s (%s - %s)"%(ext, path,
1219 1219 # datetime.datetime.combine(startDate,startTime).ctime(),
1220 1220 # datetime.datetime.combine(endDate,endTime).ctime())
1221 1221
1222 1222 # sys.exit(-1)
1223 1223
1224 1224 self.fileIndex = -1
1225 1225 self.pathList = []
1226 1226 self.filenameList = []
1227 1227 return
1228 1228
1229 1229 self.fileIndex = -1
1230 1230 self.pathList = pathList
1231 1231 self.filenameList = filenameList
1232 1232 file_name = os.path.basename(filenameList[-1])
1233 1233 basename, ext = os.path.splitext(file_name)
1234 1234 last_set = int(basename[-3:])
1235 1235
1236 1236 self.online = online
1237 1237 self.realtime = realtime
1238 1238 self.delay = delay
1239 1239 ext = ext.lower()
1240 1240 self.ext = ext
1241 1241 self.getByBlock = getblock
1242 1242 self.nTxs = nTxs
1243 1243
1244 1244 if not(self.setNextFile()):
1245 1245 if (startDate!=None) and (endDate!=None):
1246 1246 print "[Reading] No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
1247 1247 elif startDate != None:
1248 1248 print "[Reading] No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
1249 1249 else:
1250 1250 print "[Reading] No files"
1251 1251
1252 1252 self.fileIndex = -1
1253 1253 self.pathList = []
1254 1254 self.filenameList = []
1255 1255 return
1256 1256
1257 1257 # self.getBasicHeader()
1258 1258
1259 1259 if last_set != None:
1260 1260 self.dataOut.last_block = last_set * self.processingHeaderObj.dataBlocksPerFile + self.basicHeaderObj.dataBlock
1261 1261 return
1262 1262
1263 1263 def getBasicHeader(self):
1264 1264
1265 1265 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds
1266 1266
1267 1267 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
1268 1268
1269 1269 self.dataOut.timeZone = self.basicHeaderObj.timeZone
1270 1270
1271 1271 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
1272 1272
1273 1273 self.dataOut.errorCount = self.basicHeaderObj.errorCount
1274 1274
1275 1275 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
1276 1276
1277 1277 self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds/self.nTxs
1278 1278
1279 1279 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock*self.nTxs
1280 1280
1281 1281
1282 1282 def getFirstHeader(self):
1283 1283
1284 1284 raise NotImplementedError
1285 1285
1286 1286 def getData(self):
1287 1287
1288 1288 raise NotImplementedError
1289 1289
1290 1290 def hasNotDataInBuffer(self):
1291 1291
1292 1292 raise NotImplementedError
1293 1293
1294 1294 def readBlock(self):
1295 1295
1296 1296 raise NotImplementedError
1297 1297
1298 1298 def isEndProcess(self):
1299 1299
1300 1300 return self.flagNoMoreFiles
1301 1301
1302 1302 def printReadBlocks(self):
1303 1303
1304 1304 print "[Reading] Number of read blocks per file %04d" %self.nReadBlocks
1305 1305
1306 1306 def printTotalBlocks(self):
1307 1307
1308 1308 print "[Reading] Number of read blocks %04d" %self.nTotalBlocks
1309 1309
1310 1310 def printNumberOfBlock(self):
1311 1311
1312 1312 if self.flagIsNewBlock:
1313 1313 print "[Reading] Block No. %d/%d -> %s" %(self.nReadBlocks,
1314 1314 self.processingHeaderObj.dataBlocksPerFile,
1315 1315 self.dataOut.datatime.ctime())
1316 1316
1317 1317 def printInfo(self):
1318 1318
1319 1319 if self.__printInfo == False:
1320 1320 return
1321 1321
1322 1322 self.basicHeaderObj.printInfo()
1323 1323 self.systemHeaderObj.printInfo()
1324 1324 self.radarControllerHeaderObj.printInfo()
1325 1325 self.processingHeaderObj.printInfo()
1326 1326
1327 1327 self.__printInfo = False
1328 1328
1329 1329
1330 1330 def run(self, **kwargs):
1331 1331
1332 1332 if not(self.isConfig):
1333 1333
1334 1334 # self.dataOut = dataOut
1335 1335 self.setup(**kwargs)
1336 1336 self.isConfig = True
1337 1337
1338 1338 self.getData()
1339 1339
1340 1340 class JRODataWriter(JRODataIO):
1341 1341
1342 1342 """
1343 1343 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
1344 1344 de los datos siempre se realiza por bloques.
1345 1345 """
1346 1346
1347 1347 blockIndex = 0
1348 1348
1349 1349 path = None
1350 1350
1351 1351 setFile = None
1352 1352
1353 1353 profilesPerBlock = None
1354 1354
1355 1355 blocksPerFile = None
1356 1356
1357 1357 nWriteBlocks = 0
1358 1358
1359 1359 fileDate = None
1360 1360
1361 1361 def __init__(self, dataOut=None):
1362 1362 raise NotImplementedError
1363 1363
1364 1364
1365 1365 def hasAllDataInBuffer(self):
1366 1366 raise NotImplementedError
1367 1367
1368 1368
1369 1369 def setBlockDimension(self):
1370 1370 raise NotImplementedError
1371 1371
1372 1372
1373 1373 def writeBlock(self):
1374 1374 raise NotImplementedError
1375 1375
1376 1376
1377 1377 def putData(self):
1378 1378 raise NotImplementedError
1379 1379
1380 1380
1381 1381 def getProcessFlags(self):
1382 1382
1383 1383 processFlags = 0
1384 1384
1385 1385 dtype_index = get_dtype_index(self.dtype)
1386 1386 procflag_dtype = get_procflag_dtype(dtype_index)
1387 1387
1388 1388 processFlags += procflag_dtype
1389 1389
1390 1390 if self.dataOut.flagDecodeData:
1391 1391 processFlags += PROCFLAG.DECODE_DATA
1392 1392
1393 1393 if self.dataOut.flagDeflipData:
1394 1394 processFlags += PROCFLAG.DEFLIP_DATA
1395 1395
1396 1396 if self.dataOut.code is not None:
1397 1397 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1398 1398
1399 1399 if self.dataOut.nCohInt > 1:
1400 1400 processFlags += PROCFLAG.COHERENT_INTEGRATION
1401 1401
1402 1402 if self.dataOut.type == "Spectra":
1403 1403 if self.dataOut.nIncohInt > 1:
1404 1404 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
1405 1405
1406 1406 if self.dataOut.data_dc is not None:
1407 1407 processFlags += PROCFLAG.SAVE_CHANNELS_DC
1408 1408
1409 1409 if self.dataOut.flagShiftFFT:
1410 1410 processFlags += PROCFLAG.SHIFT_FFT_DATA
1411 1411
1412 1412 return processFlags
1413 1413
1414 1414 def setBasicHeader(self):
1415 1415
1416 1416 self.basicHeaderObj.size = self.basicHeaderSize #bytes
1417 1417 self.basicHeaderObj.version = self.versionFile
1418 1418 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1419 1419
1420 1420 utc = numpy.floor(self.dataOut.utctime)
1421 1421 milisecond = (self.dataOut.utctime - utc)* 1000.0
1422 1422
1423 1423 self.basicHeaderObj.utc = utc
1424 1424 self.basicHeaderObj.miliSecond = milisecond
1425 1425 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1426 1426 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1427 1427 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1428 1428
1429 1429 def setFirstHeader(self):
1430 1430 """
1431 1431 Obtiene una copia del First Header
1432 1432
1433 1433 Affected:
1434 1434
1435 1435 self.basicHeaderObj
1436 1436 self.systemHeaderObj
1437 1437 self.radarControllerHeaderObj
1438 1438 self.processingHeaderObj self.
1439 1439
1440 1440 Return:
1441 1441 None
1442 1442 """
1443 1443
1444 1444 raise NotImplementedError
1445 1445
1446 1446 def __writeFirstHeader(self):
1447 1447 """
1448 1448 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1449 1449
1450 1450 Affected:
1451 1451 __dataType
1452 1452
1453 1453 Return:
1454 1454 None
1455 1455 """
1456 1456
1457 1457 # CALCULAR PARAMETROS
1458 1458
1459 1459 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1460 1460 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1461 1461
1462 1462 self.basicHeaderObj.write(self.fp)
1463 1463 self.systemHeaderObj.write(self.fp)
1464 1464 self.radarControllerHeaderObj.write(self.fp)
1465 1465 self.processingHeaderObj.write(self.fp)
1466 1466
1467 1467 def __setNewBlock(self):
1468 1468 """
1469 1469 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1470 1470
1471 1471 Return:
1472 1472 0 : si no pudo escribir nada
1473 1473 1 : Si escribio el Basic el First Header
1474 1474 """
1475 1475 if self.fp == None:
1476 1476 self.setNextFile()
1477 1477
1478 1478 if self.flagIsNewFile:
1479 1479 return 1
1480 1480
1481 1481 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1482 1482 self.basicHeaderObj.write(self.fp)
1483 1483 return 1
1484 1484
1485 1485 if not( self.setNextFile() ):
1486 1486 return 0
1487 1487
1488 1488 return 1
1489 1489
1490 1490
1491 1491 def writeNextBlock(self):
1492 1492 """
1493 1493 Selecciona el bloque siguiente de datos y los escribe en un file
1494 1494
1495 1495 Return:
1496 1496 0 : Si no hizo pudo escribir el bloque de datos
1497 1497 1 : Si no pudo escribir el bloque de datos
1498 1498 """
1499 1499 if not( self.__setNewBlock() ):
1500 1500 return 0
1501 1501
1502 1502 self.writeBlock()
1503 1503
1504 1504 print "[Writing] Block No. %d/%d" %(self.blockIndex,
1505 1505 self.processingHeaderObj.dataBlocksPerFile)
1506 1506
1507 1507 return 1
1508 1508
1509 1509 def setNextFile(self):
1510 1510 """
1511 1511 Determina el siguiente file que sera escrito
1512 1512
1513 1513 Affected:
1514 1514 self.filename
1515 1515 self.subfolder
1516 1516 self.fp
1517 1517 self.setFile
1518 1518 self.flagIsNewFile
1519 1519
1520 1520 Return:
1521 1521 0 : Si el archivo no puede ser escrito
1522 1522 1 : Si el archivo esta listo para ser escrito
1523 1523 """
1524 1524 ext = self.ext
1525 1525 path = self.path
1526 1526
1527 1527 if self.fp != None:
1528 1528 self.fp.close()
1529 1529
1530 1530 timeTuple = time.localtime( self.dataOut.utctime)
1531 1531 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1532 1532
1533 1533 fullpath = os.path.join( path, subfolder )
1534 1534 setFile = self.setFile
1535 1535
1536 1536 if not( os.path.exists(fullpath) ):
1537 1537 os.mkdir(fullpath)
1538 1538 setFile = -1 #inicializo mi contador de seteo
1539 1539 else:
1540 1540 filesList = os.listdir( fullpath )
1541 1541 if len( filesList ) > 0:
1542 1542 filesList = sorted( filesList, key=str.lower )
1543 1543 filen = filesList[-1]
1544 1544 # el filename debera tener el siguiente formato
1545 1545 # 0 1234 567 89A BCDE (hex)
1546 1546 # x YYYY DDD SSS .ext
1547 1547 if isNumber( filen[8:11] ):
1548 1548 setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1549 1549 else:
1550 1550 setFile = -1
1551 1551 else:
1552 1552 setFile = -1 #inicializo mi contador de seteo
1553 1553
1554 1554 setFile += 1
1555 1555
1556 1556 #If this is a new day it resets some values
1557 1557 if self.dataOut.datatime.date() > self.fileDate:
1558 1558 setFile = 0
1559 1559 self.nTotalBlocks = 0
1560 1560
1561 1561 filen = '%s%4.4d%3.3d%3.3d%s' % (self.optchar, timeTuple.tm_year, timeTuple.tm_yday, setFile, ext )
1562 1562
1563 1563 filename = os.path.join( path, subfolder, filen )
1564 1564
1565 1565 fp = open( filename,'wb' )
1566 1566
1567 1567 self.blockIndex = 0
1568 1568
1569 1569 #guardando atributos
1570 1570 self.filename = filename
1571 1571 self.subfolder = subfolder
1572 1572 self.fp = fp
1573 1573 self.setFile = setFile
1574 1574 self.flagIsNewFile = 1
1575 1575 self.fileDate = self.dataOut.datatime.date()
1576 1576
1577 1577 self.setFirstHeader()
1578 1578
1579 1579 print '[Writing] Opening file: %s'%self.filename
1580 1580
1581 1581 self.__writeFirstHeader()
1582 1582
1583 1583 return 1
1584 1584
1585 1585 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=None, ext=None, datatype=4):
1586 1586 """
1587 1587 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1588 1588
1589 1589 Inputs:
1590 1590 path : directory where data will be saved
1591 1591 profilesPerBlock : number of profiles per block
1592 1592 set : initial file set
1593 1593 datatype : An integer number that defines data type:
1594 1594 0 : int8 (1 byte)
1595 1595 1 : int16 (2 bytes)
1596 1596 2 : int32 (4 bytes)
1597 1597 3 : int64 (8 bytes)
1598 1598 4 : float32 (4 bytes)
1599 1599 5 : double64 (8 bytes)
1600 1600
1601 1601 Return:
1602 1602 0 : Si no realizo un buen seteo
1603 1603 1 : Si realizo un buen seteo
1604 1604 """
1605 1605
1606 1606 if ext == None:
1607 1607 ext = self.ext
1608 1608
1609 1609 self.ext = ext.lower()
1610 1610
1611 1611 self.path = path
1612 1612
1613 1613 if set is None:
1614 1614 self.setFile = -1
1615 1615 else:
1616 1616 self.setFile = set - 1
1617 1617
1618 1618 self.blocksPerFile = blocksPerFile
1619 1619
1620 1620 self.profilesPerBlock = profilesPerBlock
1621 1621
1622 1622 self.dataOut = dataOut
1623 1623 self.fileDate = self.dataOut.datatime.date()
1624 1624 #By default
1625 1625 self.dtype = self.dataOut.dtype
1626 1626
1627 1627 if datatype is not None:
1628 1628 self.dtype = get_numpy_dtype(datatype)
1629 1629
1630 1630 if not(self.setNextFile()):
1631 1631 print "[Writing] There isn't a next file"
1632 1632 return 0
1633 1633
1634 1634 self.setBlockDimension()
1635 1635
1636 1636 return 1
1637 1637
1638 1638 def run(self, dataOut, **kwargs):
1639 1639
1640 1640 if not(self.isConfig):
1641 1641
1642 1642 self.setup(dataOut, **kwargs)
1643 1643 self.isConfig = True
1644 1644
1645 1645 self.putData()
1646 1646
@@ -1,1157 +1,1157
1 1 import sys
2 2 import numpy
3 3
4 4 from jroproc_base import ProcessingUnit, Operation
5 5 from schainpy.model.data.jrodata import Voltage
6 6
7 7 class VoltageProc(ProcessingUnit):
8 8
9 9
10 10 def __init__(self):
11 11
12 12 ProcessingUnit.__init__(self)
13 13
14 14 # self.objectDict = {}
15 15 self.dataOut = Voltage()
16 16 self.flip = 1
17 17
18 18 def run(self):
19 19 if self.dataIn.type == 'AMISR':
20 20 self.__updateObjFromAmisrInput()
21 21
22 22 if self.dataIn.type == 'Voltage':
23 23 self.dataOut.copy(self.dataIn)
24 24
25 25 # self.dataOut.copy(self.dataIn)
26 26
27 27 def __updateObjFromAmisrInput(self):
28 28
29 29 self.dataOut.timeZone = self.dataIn.timeZone
30 30 self.dataOut.dstFlag = self.dataIn.dstFlag
31 31 self.dataOut.errorCount = self.dataIn.errorCount
32 32 self.dataOut.useLocalTime = self.dataIn.useLocalTime
33 33
34 34 self.dataOut.flagNoData = self.dataIn.flagNoData
35 35 self.dataOut.data = self.dataIn.data
36 36 self.dataOut.utctime = self.dataIn.utctime
37 37 self.dataOut.channelList = self.dataIn.channelList
38 38 # self.dataOut.timeInterval = self.dataIn.timeInterval
39 39 self.dataOut.heightList = self.dataIn.heightList
40 40 self.dataOut.nProfiles = self.dataIn.nProfiles
41 41
42 42 self.dataOut.nCohInt = self.dataIn.nCohInt
43 43 self.dataOut.ippSeconds = self.dataIn.ippSeconds
44 44 self.dataOut.frequency = self.dataIn.frequency
45 45
46 46 self.dataOut.azimuth = self.dataIn.azimuth
47 47 self.dataOut.zenith = self.dataIn.zenith
48 48
49 49 self.dataOut.beam.codeList = self.dataIn.beam.codeList
50 50 self.dataOut.beam.azimuthList = self.dataIn.beam.azimuthList
51 51 self.dataOut.beam.zenithList = self.dataIn.beam.zenithList
52 52 #
53 53 # pass#
54 54 #
55 55 # def init(self):
56 56 #
57 57 #
58 58 # if self.dataIn.type == 'AMISR':
59 59 # self.__updateObjFromAmisrInput()
60 60 #
61 61 # if self.dataIn.type == 'Voltage':
62 62 # self.dataOut.copy(self.dataIn)
63 63 # # No necesita copiar en cada init() los atributos de dataIn
64 64 # # la copia deberia hacerse por cada nuevo bloque de datos
65 65
66 66 def selectChannels(self, channelList):
67 67
68 68 channelIndexList = []
69 69
70 70 for channel in channelList:
71 71 if channel not in self.dataOut.channelList:
72 72 raise ValueError, "Channel %d is not in %s" %(channel, str(self.dataOut.channelList))
73 73
74 74 index = self.dataOut.channelList.index(channel)
75 75 channelIndexList.append(index)
76 76
77 77 self.selectChannelsByIndex(channelIndexList)
78 78
79 79 def selectChannelsByIndex(self, channelIndexList):
80 80 """
81 81 Selecciona un bloque de datos en base a canales segun el channelIndexList
82 82
83 83 Input:
84 84 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
85 85
86 86 Affected:
87 87 self.dataOut.data
88 88 self.dataOut.channelIndexList
89 89 self.dataOut.nChannels
90 90 self.dataOut.m_ProcessingHeader.totalSpectra
91 91 self.dataOut.systemHeaderObj.numChannels
92 92 self.dataOut.m_ProcessingHeader.blockSize
93 93
94 94 Return:
95 95 None
96 96 """
97 97
98 98 for channelIndex in channelIndexList:
99 99 if channelIndex not in self.dataOut.channelIndexList:
100 100 print channelIndexList
101 101 raise ValueError, "The value %d in channelIndexList is not valid" %channelIndex
102 102
103 103 if self.dataOut.flagDataAsBlock:
104 104 """
105 105 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
106 106 """
107 107 data = self.dataOut.data[channelIndexList,:,:]
108 108 else:
109 109 data = self.dataOut.data[channelIndexList,:]
110 110
111 111 self.dataOut.data = data
112 112 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
113 113 # self.dataOut.nChannels = nChannels
114 114
115 115 return 1
116 116
117 117 def selectHeights(self, minHei=None, maxHei=None):
118 118 """
119 119 Selecciona un bloque de datos en base a un grupo de valores de alturas segun el rango
120 120 minHei <= height <= maxHei
121 121
122 122 Input:
123 123 minHei : valor minimo de altura a considerar
124 124 maxHei : valor maximo de altura a considerar
125 125
126 126 Affected:
127 127 Indirectamente son cambiados varios valores a travez del metodo selectHeightsByIndex
128 128
129 129 Return:
130 130 1 si el metodo se ejecuto con exito caso contrario devuelve 0
131 131 """
132 132
133 133 if minHei == None:
134 134 minHei = self.dataOut.heightList[0]
135 135
136 136 if maxHei == None:
137 137 maxHei = self.dataOut.heightList[-1]
138 138
139 139 if (minHei < self.dataOut.heightList[0]):
140 140 minHei = self.dataOut.heightList[0]
141 141
142 142 if (maxHei > self.dataOut.heightList[-1]):
143 143 maxHei = self.dataOut.heightList[-1]
144 144
145 145 minIndex = 0
146 146 maxIndex = 0
147 147 heights = self.dataOut.heightList
148 148
149 149 inda = numpy.where(heights >= minHei)
150 150 indb = numpy.where(heights <= maxHei)
151 151
152 152 try:
153 153 minIndex = inda[0][0]
154 154 except:
155 155 minIndex = 0
156 156
157 157 try:
158 158 maxIndex = indb[0][-1]
159 159 except:
160 160 maxIndex = len(heights)
161 161
162 162 self.selectHeightsByIndex(minIndex, maxIndex)
163 163
164 164 return 1
165 165
166 166
167 167 def selectHeightsByIndex(self, minIndex, maxIndex):
168 168 """
169 169 Selecciona un bloque de datos en base a un grupo indices de alturas segun el rango
170 170 minIndex <= index <= maxIndex
171 171
172 172 Input:
173 173 minIndex : valor de indice minimo de altura a considerar
174 174 maxIndex : valor de indice maximo de altura a considerar
175 175
176 176 Affected:
177 177 self.dataOut.data
178 178 self.dataOut.heightList
179 179
180 180 Return:
181 181 1 si el metodo se ejecuto con exito caso contrario devuelve 0
182 182 """
183 183
184 184 if (minIndex < 0) or (minIndex > maxIndex):
185 185 raise ValueError, "Height index range (%d,%d) is not valid" % (minIndex, maxIndex)
186 186
187 187 if (maxIndex >= self.dataOut.nHeights):
188 188 maxIndex = self.dataOut.nHeights
189 189
190 190 #voltage
191 191 if self.dataOut.flagDataAsBlock:
192 192 """
193 193 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
194 194 """
195 195 data = self.dataOut.data[:,:, minIndex:maxIndex]
196 196 else:
197 197 data = self.dataOut.data[:, minIndex:maxIndex]
198 198
199 199 # firstHeight = self.dataOut.heightList[minIndex]
200 200
201 201 self.dataOut.data = data
202 202 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex]
203 203
204 204 if self.dataOut.nHeights <= 1:
205 205 raise ValueError, "selectHeights: Too few heights. Current number of heights is %d" %(self.dataOut.nHeights)
206 206
207 207 return 1
208 208
209 209
210 210 def filterByHeights(self, window):
211 211
212 212 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
213 213
214 214 if window == None:
215 215 window = (self.dataOut.radarControllerHeaderObj.txA/self.dataOut.radarControllerHeaderObj.nBaud) / deltaHeight
216 216
217 217 newdelta = deltaHeight * window
218 218 r = self.dataOut.nHeights % window
219 219 newheights = (self.dataOut.nHeights-r)/window
220 220
221 221 if newheights <= 1:
222 222 raise ValueError, "filterByHeights: Too few heights. Current number of heights is %d and window is %d" %(self.dataOut.nHeights, window)
223 223
224 224 if self.dataOut.flagDataAsBlock:
225 225 """
226 226 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
227 227 """
228 228 buffer = self.dataOut.data[:, :, 0:self.dataOut.nHeights-r]
229 229 buffer = buffer.reshape(self.dataOut.nChannels,self.dataOut.nProfiles,self.dataOut.nHeights/window,window)
230 230 buffer = numpy.sum(buffer,3)
231 231
232 232 else:
233 233 buffer = self.dataOut.data[:,0:self.dataOut.nHeights-r]
234 234 buffer = buffer.reshape(self.dataOut.nChannels,self.dataOut.nHeights/window,window)
235 235 buffer = numpy.sum(buffer,2)
236 236
237 237 self.dataOut.data = buffer
238 238 self.dataOut.heightList = self.dataOut.heightList[0] + numpy.arange( newheights )*newdelta
239 239 self.dataOut.windowOfFilter = window
240 240
241 241 def setH0(self, h0, deltaHeight = None):
242 242
243 243 if not deltaHeight:
244 244 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
245 245
246 246 nHeights = self.dataOut.nHeights
247 247
248 248 newHeiRange = h0 + numpy.arange(nHeights)*deltaHeight
249 249
250 250 self.dataOut.heightList = newHeiRange
251 251
252 252 def deFlip(self, channelList = []):
253 253
254 254 data = self.dataOut.data.copy()
255 255
256 256 if self.dataOut.flagDataAsBlock:
257 257 flip = self.flip
258 258 profileList = range(self.dataOut.nProfiles)
259 259
260 260 if not channelList:
261 261 for thisProfile in profileList:
262 262 data[:,thisProfile,:] = data[:,thisProfile,:]*flip
263 263 flip *= -1.0
264 264 else:
265 265 for thisChannel in channelList:
266 266 if thisChannel not in self.dataOut.channelList:
267 267 continue
268 268
269 269 for thisProfile in profileList:
270 270 data[thisChannel,thisProfile,:] = data[thisChannel,thisProfile,:]*flip
271 271 flip *= -1.0
272 272
273 273 self.flip = flip
274 274
275 275 else:
276 276 if not channelList:
277 277 data[:,:] = data[:,:]*self.flip
278 278 else:
279 279 for thisChannel in channelList:
280 280 if thisChannel not in self.dataOut.channelList:
281 281 continue
282 282
283 283 data[thisChannel,:] = data[thisChannel,:]*self.flip
284 284
285 285 self.flip *= -1.
286 286
287 287 self.dataOut.data = data
288 288
289 289 def setRadarFrequency(self, frequency=None):
290 290
291 291 if frequency != None:
292 292 self.dataOut.frequency = frequency
293 293
294 294 return 1
295 295
296 296 class CohInt(Operation):
297 297
298 298 isConfig = False
299 299
300 300 __profIndex = 0
301 301 __withOverapping = False
302 302
303 303 __byTime = False
304 304 __initime = None
305 305 __lastdatatime = None
306 306 __integrationtime = None
307 307
308 308 __buffer = None
309 309
310 310 __dataReady = False
311 311
312 312 n = None
313 313
314 314
315 315 def __init__(self):
316 316
317 317 Operation.__init__(self)
318 318
319 319 # self.isConfig = False
320 320
321 321 def setup(self, n=None, timeInterval=None, overlapping=False, byblock=False):
322 322 """
323 323 Set the parameters of the integration class.
324 324
325 325 Inputs:
326 326
327 327 n : Number of coherent integrations
328 328 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
329 329 overlapping :
330 330
331 331 """
332 332
333 333 self.__initime = None
334 334 self.__lastdatatime = 0
335 335 self.__buffer = None
336 336 self.__dataReady = False
337 337 self.byblock = byblock
338 338
339 339 if n == None and timeInterval == None:
340 340 raise ValueError, "n or timeInterval should be specified ..."
341 341
342 342 if n != None:
343 343 self.n = n
344 344 self.__byTime = False
345 345 else:
346 346 self.__integrationtime = timeInterval #* 60. #if (type(timeInterval)!=integer) -> change this line
347 347 self.n = 9999
348 348 self.__byTime = True
349 349
350 350 if overlapping:
351 351 self.__withOverapping = True
352 352 self.__buffer = None
353 353 else:
354 354 self.__withOverapping = False
355 355 self.__buffer = 0
356 356
357 357 self.__profIndex = 0
358 358
359 359 def putData(self, data):
360 360
361 361 """
362 362 Add a profile to the __buffer and increase in one the __profileIndex
363 363
364 364 """
365 365
366 366 if not self.__withOverapping:
367 367 self.__buffer += data.copy()
368 368 self.__profIndex += 1
369 369 return
370 370
371 371 #Overlapping data
372 372 nChannels, nHeis = data.shape
373 373 data = numpy.reshape(data, (1, nChannels, nHeis))
374 374
375 375 #If the buffer is empty then it takes the data value
376 376 if self.__buffer is None:
377 377 self.__buffer = data
378 378 self.__profIndex += 1
379 379 return
380 380
381 381 #If the buffer length is lower than n then stakcing the data value
382 382 if self.__profIndex < self.n:
383 383 self.__buffer = numpy.vstack((self.__buffer, data))
384 384 self.__profIndex += 1
385 385 return
386 386
387 387 #If the buffer length is equal to n then replacing the last buffer value with the data value
388 388 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
389 389 self.__buffer[self.n-1] = data
390 390 self.__profIndex = self.n
391 391 return
392 392
393 393
394 394 def pushData(self):
395 395 """
396 396 Return the sum of the last profiles and the profiles used in the sum.
397 397
398 398 Affected:
399 399
400 400 self.__profileIndex
401 401
402 402 """
403 403
404 404 if not self.__withOverapping:
405 405 data = self.__buffer
406 406 n = self.__profIndex
407 407
408 408 self.__buffer = 0
409 409 self.__profIndex = 0
410 410
411 411 return data, n
412 412
413 413 #Integration with Overlapping
414 414 data = numpy.sum(self.__buffer, axis=0)
415 415 n = self.__profIndex
416 416
417 417 return data, n
418 418
419 419 def byProfiles(self, data):
420 420
421 421 self.__dataReady = False
422 422 avgdata = None
423 423 # n = None
424 424
425 425 self.putData(data)
426 426
427 427 if self.__profIndex == self.n:
428 428
429 429 avgdata, n = self.pushData()
430 430 self.__dataReady = True
431 431
432 432 return avgdata
433 433
434 434 def byTime(self, data, datatime):
435 435
436 436 self.__dataReady = False
437 437 avgdata = None
438 438 n = None
439 439
440 440 self.putData(data)
441 441
442 442 if (datatime - self.__initime) >= self.__integrationtime:
443 443 avgdata, n = self.pushData()
444 444 self.n = n
445 445 self.__dataReady = True
446 446
447 447 return avgdata
448 448
449 449 def integrate(self, data, datatime=None):
450 450
451 451 if self.__initime == None:
452 452 self.__initime = datatime
453 453
454 454 if self.__byTime:
455 455 avgdata = self.byTime(data, datatime)
456 456 else:
457 457 avgdata = self.byProfiles(data)
458 458
459 459
460 460 self.__lastdatatime = datatime
461 461
462 462 if avgdata is None:
463 463 return None, None
464 464
465 465 avgdatatime = self.__initime
466 466
467 467 deltatime = datatime -self.__lastdatatime
468 468
469 469 if not self.__withOverapping:
470 470 self.__initime = datatime
471 471 else:
472 472 self.__initime += deltatime
473 473
474 474 return avgdata, avgdatatime
475 475
476 476 def integrateByBlock(self, dataOut):
477 477
478 478 times = int(dataOut.data.shape[1]/self.n)
479 479 avgdata = numpy.zeros((dataOut.nChannels, times, dataOut.nHeights), dtype=numpy.complex)
480 480
481 481 id_min = 0
482 482 id_max = self.n
483 483
484 484 for i in range(times):
485 485 junk = dataOut.data[:,id_min:id_max,:]
486 486 avgdata[:,i,:] = junk.sum(axis=1)
487 487 id_min += self.n
488 488 id_max += self.n
489 489
490 490 timeInterval = dataOut.ippSeconds*self.n
491 491 avgdatatime = (times - 1) * timeInterval + dataOut.utctime
492 492 self.__dataReady = True
493 493 return avgdata, avgdatatime
494 494
495 495 def run(self, dataOut, **kwargs):
496 496
497 497 if not self.isConfig:
498 498 self.setup(**kwargs)
499 499 self.isConfig = True
500 500
501 501 if dataOut.flagDataAsBlock:
502 502 """
503 503 Si la data es leida por bloques, dimension = [nChannels, nProfiles, nHeis]
504 504 """
505 505 avgdata, avgdatatime = self.integrateByBlock(dataOut)
506 506 dataOut.nProfiles /= self.n
507 507 else:
508 508 avgdata, avgdatatime = self.integrate(dataOut.data, dataOut.utctime)
509 509
510 510 # dataOut.timeInterval *= n
511 511 dataOut.flagNoData = True
512 512
513 513 if self.__dataReady:
514 514 dataOut.data = avgdata
515 515 dataOut.nCohInt *= self.n
516 516 dataOut.utctime = avgdatatime
517 517 # dataOut.timeInterval = dataOut.ippSeconds * dataOut.nCohInt
518 518 dataOut.flagNoData = False
519 519
520 520 class Decoder(Operation):
521 521
522 522 isConfig = False
523 523 __profIndex = 0
524 524
525 525 code = None
526 526
527 527 nCode = None
528 528 nBaud = None
529 529
530 530
531 531 def __init__(self):
532 532
533 533 Operation.__init__(self)
534 534
535 535 self.times = None
536 536 self.osamp = None
537 537 # self.__setValues = False
538 538 self.isConfig = False
539 539
540 540 def setup(self, code, osamp, dataOut):
541 541
542 542 self.__profIndex = 0
543 543
544 544 self.code = code
545 545
546 546 self.nCode = len(code)
547 547 self.nBaud = len(code[0])
548 548
549 549 if (osamp != None) and (osamp >1):
550 550 self.osamp = osamp
551 551 self.code = numpy.repeat(code, repeats=self.osamp, axis=1)
552 552 self.nBaud = self.nBaud*self.osamp
553 553
554 554 self.__nChannels = dataOut.nChannels
555 555 self.__nProfiles = dataOut.nProfiles
556 556 self.__nHeis = dataOut.nHeights
557 557
558 558 if self.__nHeis < self.nBaud:
559 559 raise ValueError, 'Number of heights (%d) should be greater than number of bauds (%d)' %(self.__nHeis, self.nBaud)
560 560
561 561 #Frequency
562 562 __codeBuffer = numpy.zeros((self.nCode, self.__nHeis), dtype=numpy.complex)
563 563
564 564 __codeBuffer[:,0:self.nBaud] = self.code
565 565
566 566 self.fft_code = numpy.conj(numpy.fft.fft(__codeBuffer, axis=1))
567 567
568 568 if dataOut.flagDataAsBlock:
569 569
570 570 self.ndatadec = self.__nHeis #- self.nBaud + 1
571 571
572 572 self.datadecTime = numpy.zeros((self.__nChannels, self.__nProfiles, self.ndatadec), dtype=numpy.complex)
573 573
574 574 else:
575 575
576 576 #Time
577 577 self.ndatadec = self.__nHeis #- self.nBaud + 1
578 578
579 579 self.datadecTime = numpy.zeros((self.__nChannels, self.ndatadec), dtype=numpy.complex)
580 580
581 581 def __convolutionInFreq(self, data):
582 582
583 583 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
584 584
585 585 fft_data = numpy.fft.fft(data, axis=1)
586 586
587 587 conv = fft_data*fft_code
588 588
589 589 data = numpy.fft.ifft(conv,axis=1)
590 590
591 591 return data
592 592
593 593 def __convolutionInFreqOpt(self, data):
594 594
595 595 raise NotImplementedError
596 596
597 597 def __convolutionInTime(self, data):
598 598
599 599 code = self.code[self.__profIndex]
600 600
601 601 for i in range(self.__nChannels):
602 602 self.datadecTime[i,:] = numpy.correlate(data[i,:], code, mode='full')[self.nBaud-1:]
603 603
604 604 return self.datadecTime
605 605
606 606 def __convolutionByBlockInTime(self, data):
607 607
608 608 repetitions = self.__nProfiles / self.nCode
609 609
610 610 junk = numpy.lib.stride_tricks.as_strided(self.code, (repetitions, self.code.size), (0, self.code.itemsize))
611 611 junk = junk.flatten()
612 612 code_block = numpy.reshape(junk, (self.nCode*repetitions, self.nBaud))
613 613
614 614 for i in range(self.__nChannels):
615 615 for j in range(self.__nProfiles):
616 616 self.datadecTime[i,j,:] = numpy.correlate(data[i,j,:], code_block[j,:], mode='full')[self.nBaud-1:]
617 617
618 618 return self.datadecTime
619 619
620 620 def __convolutionByBlockInFreq(self, data):
621 621
622 622 raise NotImplementedError, "Decoder by frequency fro Blocks not implemented"
623 623
624 624
625 625 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
626 626
627 627 fft_data = numpy.fft.fft(data, axis=2)
628 628
629 629 conv = fft_data*fft_code
630 630
631 631 data = numpy.fft.ifft(conv,axis=2)
632 632
633 633 return data
634 634
635 635 def run(self, dataOut, code=None, nCode=None, nBaud=None, mode = 0, osamp=None, times=None):
636 636
637 637 if dataOut.flagDecodeData:
638 638 print "This data is already decoded, recoding again ..."
639 639
640 640 if not self.isConfig:
641 641
642 642 if code is None:
643 643 if dataOut.code is None:
644 644 raise ValueError, "Code could not be read from %s instance. Enter a value in Code parameter" %dataOut.type
645 645
646 646 code = dataOut.code
647 647 else:
648 648 code = numpy.array(code).reshape(nCode,nBaud)
649 649
650 650 self.setup(code, osamp, dataOut)
651 651
652 652 self.isConfig = True
653 653
654 654 if mode == 3:
655 655 sys.stderr.write("Decoder Warning: mode=%d is not valid, using mode=0\n" %mode)
656 656
657 657 if times != None:
658 658 sys.stderr.write("Decoder Warning: Argument 'times' in not used anymore\n")
659 659
660 660 if self.code is None:
661 661 print "Fail decoding: Code is not defined."
662 662 return
663 663
664 664 datadec = None
665 665 if mode == 3:
666 666 mode = 0
667 667
668 668 if dataOut.flagDataAsBlock:
669 669 """
670 670 Decoding when data have been read as block,
671 671 """
672 672
673 673 if mode == 0:
674 674 datadec = self.__convolutionByBlockInTime(dataOut.data)
675 675 if mode == 1:
676 676 datadec = self.__convolutionByBlockInFreq(dataOut.data)
677 677 else:
678 678 """
679 679 Decoding when data have been read profile by profile
680 680 """
681 681 if mode == 0:
682 682 datadec = self.__convolutionInTime(dataOut.data)
683 683
684 684 if mode == 1:
685 685 datadec = self.__convolutionInFreq(dataOut.data)
686 686
687 687 if mode == 2:
688 688 datadec = self.__convolutionInFreqOpt(dataOut.data)
689 689
690 690 if datadec is None:
691 691 raise ValueError, "Codification mode selected is not valid: mode=%d. Try selecting 0 or 1" %mode
692 692
693 693 dataOut.code = self.code
694 694 dataOut.nCode = self.nCode
695 695 dataOut.nBaud = self.nBaud
696 696
697 697 dataOut.data = datadec
698 698
699 699 dataOut.heightList = dataOut.heightList[0:datadec.shape[-1]]
700 700
701 701 dataOut.flagDecodeData = True #asumo q la data esta decodificada
702 702
703 703 if self.__profIndex == self.nCode-1:
704 704 self.__profIndex = 0
705 705 return 1
706 706
707 707 self.__profIndex += 1
708 708
709 709 return 1
710 710 # dataOut.flagDeflipData = True #asumo q la data no esta sin flip
711 711
712 712
713 713 class ProfileConcat(Operation):
714 714
715 715 isConfig = False
716 716 buffer = None
717 717
718 718 def __init__(self):
719 719
720 720 Operation.__init__(self)
721 721 self.profileIndex = 0
722 722
723 723 def reset(self):
724 724 self.buffer = numpy.zeros_like(self.buffer)
725 725 self.start_index = 0
726 726 self.times = 1
727 727
728 728 def setup(self, data, m, n=1):
729 729 self.buffer = numpy.zeros((data.shape[0],data.shape[1]*m),dtype=type(data[0,0]))
730 730 self.nHeights = data.nHeights
731 731 self.start_index = 0
732 732 self.times = 1
733 733
734 734 def concat(self, data):
735 735
736 736 self.buffer[:,self.start_index:self.profiles*self.times] = data.copy()
737 737 self.start_index = self.start_index + self.nHeights
738 738
739 739 def run(self, dataOut, m):
740 740
741 741 dataOut.flagNoData = True
742 742
743 743 if not self.isConfig:
744 744 self.setup(dataOut.data, m, 1)
745 745 self.isConfig = True
746 746
747 747 if dataOut.flagDataAsBlock:
748 748 raise ValueError, "ProfileConcat can only be used when voltage have been read profile by profile, getBlock = False"
749 749
750 750 else:
751 751 self.concat(dataOut.data)
752 752 self.times += 1
753 753 if self.times > m:
754 754 dataOut.data = self.buffer
755 755 self.reset()
756 756 dataOut.flagNoData = False
757 757 # se deben actualizar mas propiedades del header y del objeto dataOut, por ejemplo, las alturas
758 758 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
759 759 xf = dataOut.heightList[0] + dataOut.nHeights * deltaHeight * m
760 760 dataOut.heightList = numpy.arange(dataOut.heightList[0], xf, deltaHeight)
761 761 dataOut.ippSeconds *= m
762 762
763 763 class ProfileSelector(Operation):
764 764
765 765 profileIndex = None
766 766 # Tamanho total de los perfiles
767 767 nProfiles = None
768 768
769 769 def __init__(self):
770 770
771 771 Operation.__init__(self)
772 772 self.profileIndex = 0
773 773
774 def incIndex(self):
774 def incProfileIndex(self):
775 775
776 776 self.profileIndex += 1
777 777
778 778 if self.profileIndex >= self.nProfiles:
779 779 self.profileIndex = 0
780 780
781 781 def isThisProfileInRange(self, profileIndex, minIndex, maxIndex):
782 782
783 783 if profileIndex < minIndex:
784 784 return False
785 785
786 786 if profileIndex > maxIndex:
787 787 return False
788 788
789 789 return True
790 790
791 791 def isThisProfileInList(self, profileIndex, profileList):
792 792
793 793 if profileIndex not in profileList:
794 794 return False
795 795
796 796 return True
797 797
798 798 def run(self, dataOut, profileList=None, profileRangeList=None, beam=None, byblock=False, rangeList = None, nProfiles=None):
799 799
800 800 """
801 801 ProfileSelector:
802 802
803 803 Inputs:
804 804 profileList : Index of profiles selected. Example: profileList = (0,1,2,7,8)
805 805
806 806 profileRangeList : Minimum and maximum profile indexes. Example: profileRangeList = (4, 30)
807 807
808 808 rangeList : List of profile ranges. Example: rangeList = ((4, 30), (32, 64), (128, 256))
809 809
810 810 """
811 811
812 812 dataOut.flagNoData = True
813 813
814 814 if dataOut.flagDataAsBlock:
815 815 """
816 816 data dimension = [nChannels, nProfiles, nHeis]
817 817 """
818 818 if profileList != None:
819 819 dataOut.data = dataOut.data[:,profileList,:]
820 820
821 821 if profileRangeList != None:
822 822 minIndex = profileRangeList[0]
823 823 maxIndex = profileRangeList[1]
824 824 profileList = range(minIndex, maxIndex+1)
825 825
826 826 dataOut.data = dataOut.data[:,minIndex:maxIndex+1,:]
827 827
828 828 if rangeList != None:
829 829
830 830 profileList = []
831 831
832 832 for thisRange in rangeList:
833 833 minIndex = thisRange[0]
834 834 maxIndex = thisRange[1]
835 835
836 836 profileList.extend(range(minIndex, maxIndex+1))
837 837
838 838 dataOut.data = dataOut.data[:,profileList,:]
839 839
840 840 dataOut.nProfiles = len(profileList)
841 841 dataOut.profileIndex = dataOut.nProfiles - 1
842 842 dataOut.flagNoData = False
843 843
844 844 return True
845 845
846 846 """
847 847 data dimension = [nChannels, nHeis]
848 848 """
849 849
850 850 if profileList != None:
851 851
852 852 if self.isThisProfileInList(dataOut.profileIndex, profileList):
853 853
854 854 self.nProfiles = len(profileList)
855 855 dataOut.nProfiles = self.nProfiles
856 856 dataOut.profileIndex = self.profileIndex
857 857 dataOut.flagNoData = False
858 858
859 self.incIndex()
859 self.incProfileIndex()
860 860 return True
861 861
862 862 if profileRangeList != None:
863 863
864 864 minIndex = profileRangeList[0]
865 865 maxIndex = profileRangeList[1]
866 866
867 867 if self.isThisProfileInRange(dataOut.profileIndex, minIndex, maxIndex):
868 868
869 869 self.nProfiles = maxIndex - minIndex + 1
870 870 dataOut.nProfiles = self.nProfiles
871 871 dataOut.profileIndex = self.profileIndex
872 872 dataOut.flagNoData = False
873 873
874 self.incIndex()
874 self.incProfileIndex()
875 875 return True
876 876
877 877 if rangeList != None:
878 878
879 879 nProfiles = 0
880 880
881 881 for thisRange in rangeList:
882 882 minIndex = thisRange[0]
883 883 maxIndex = thisRange[1]
884 884
885 885 nProfiles += maxIndex - minIndex + 1
886 886
887 887 for thisRange in rangeList:
888 888
889 889 minIndex = thisRange[0]
890 890 maxIndex = thisRange[1]
891 891
892 892 if self.isThisProfileInRange(dataOut.profileIndex, minIndex, maxIndex):
893 893
894 894 self.nProfiles = nProfiles
895 895 dataOut.nProfiles = self.nProfiles
896 896 dataOut.profileIndex = self.profileIndex
897 897 dataOut.flagNoData = False
898 898
899 self.incIndex()
899 self.incProfileIndex()
900 900
901 901 break
902 902
903 903 return True
904 904
905 905
906 906 if beam != None: #beam is only for AMISR data
907 907 if self.isThisProfileInList(dataOut.profileIndex, dataOut.beamRangeDict[beam]):
908 908 dataOut.flagNoData = False
909 909 dataOut.profileIndex = self.profileIndex
910 910
911 self.incIndex()
911 self.incProfileIndex()
912 912
913 913 return True
914 914
915 915 raise ValueError, "ProfileSelector needs profileList, profileRangeList or rangeList parameter"
916 916
917 917 return False
918 918
919 919
920 920
921 921 class Reshaper(Operation):
922 922
923 923 def __init__(self):
924 924
925 925 Operation.__init__(self)
926 926
927 927 self.__buffer = None
928 928 self.__nitems = 0
929 929
930 930 def __appendProfile(self, dataOut, nTxs):
931 931
932 932 if self.__buffer is None:
933 933 shape = (dataOut.nChannels, int(dataOut.nHeights/nTxs) )
934 934 self.__buffer = numpy.empty(shape, dtype = dataOut.data.dtype)
935 935
936 936 ini = dataOut.nHeights * self.__nitems
937 937 end = ini + dataOut.nHeights
938 938
939 939 self.__buffer[:, ini:end] = dataOut.data
940 940
941 941 self.__nitems += 1
942 942
943 943 return int(self.__nitems*nTxs)
944 944
945 945 def __getBuffer(self):
946 946
947 947 if self.__nitems == int(1./self.__nTxs):
948 948
949 949 self.__nitems = 0
950 950
951 951 return self.__buffer.copy()
952 952
953 953 return None
954 954
955 955 def __checkInputs(self, dataOut, shape, nTxs):
956 956
957 957 if shape is None and nTxs is None:
958 958 raise ValueError, "Reshaper: shape of factor should be defined"
959 959
960 960 if nTxs:
961 961 if nTxs < 0:
962 962 raise ValueError, "nTxs should be greater than 0"
963 963
964 964 if nTxs < 1 and dataOut.nProfiles % (1./nTxs) != 0:
965 965 raise ValueError, "nProfiles= %d is not divisibled by (1./nTxs) = %f" %(dataOut.nProfiles, (1./nTxs))
966 966
967 967 shape = [dataOut.nChannels, dataOut.nProfiles*nTxs, dataOut.nHeights/nTxs]
968 968
969 969 if len(shape) != 2 and len(shape) != 3:
970 970 raise ValueError, "shape dimension should be equal to 2 or 3. shape = (nProfiles, nHeis) or (nChannels, nProfiles, nHeis). Actually shape = (%d, %d, %d)" %(dataOut.nChannels, dataOut.nProfiles, dataOut.nHeights)
971 971
972 972 if len(shape) == 2:
973 973 shape_tuple = [dataOut.nChannels]
974 974 shape_tuple.extend(shape)
975 975 else:
976 976 shape_tuple = list(shape)
977 977
978 978 if not nTxs:
979 979 nTxs = int(shape_tuple[1]/dataOut.nProfiles)
980 980
981 981 return shape_tuple, nTxs
982 982
983 983 def run(self, dataOut, shape=None, nTxs=None):
984 984
985 985 shape_tuple, self.__nTxs = self.__checkInputs(dataOut, shape, nTxs)
986 986
987 987 dataOut.flagNoData = True
988 988 profileIndex = None
989 989
990 990 if dataOut.flagDataAsBlock:
991 991
992 992 dataOut.data = numpy.reshape(dataOut.data, shape_tuple)
993 993 dataOut.flagNoData = False
994 994
995 995 profileIndex = int(dataOut.nProfiles*nTxs) - 1
996 996
997 997 else:
998 998
999 999 if self.__nTxs < 1:
1000 1000
1001 1001 self.__appendProfile(dataOut, self.__nTxs)
1002 1002 new_data = self.__getBuffer()
1003 1003
1004 1004 if new_data is not None:
1005 1005 dataOut.data = new_data
1006 1006 dataOut.flagNoData = False
1007 1007
1008 1008 profileIndex = dataOut.profileIndex*nTxs
1009 1009
1010 1010 else:
1011 1011 raise ValueError, "nTxs should be greater than 0 and lower than 1, or use VoltageReader(..., getblock=True)"
1012 1012
1013 1013 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1014 1014
1015 1015 dataOut.heightList = numpy.arange(dataOut.nHeights/self.__nTxs) * deltaHeight + dataOut.heightList[0]
1016 1016
1017 1017 dataOut.nProfiles = int(dataOut.nProfiles*self.__nTxs)
1018 1018
1019 1019 dataOut.profileIndex = profileIndex
1020 1020
1021 1021 dataOut.ippSeconds /= self.__nTxs
1022 1022 #
1023 1023 # import collections
1024 1024 # from scipy.stats import mode
1025 1025 #
1026 1026 # class Synchronize(Operation):
1027 1027 #
1028 1028 # isConfig = False
1029 1029 # __profIndex = 0
1030 1030 #
1031 1031 # def __init__(self):
1032 1032 #
1033 1033 # Operation.__init__(self)
1034 1034 # # self.isConfig = False
1035 1035 # self.__powBuffer = None
1036 1036 # self.__startIndex = 0
1037 1037 # self.__pulseFound = False
1038 1038 #
1039 1039 # def __findTxPulse(self, dataOut, channel=0, pulse_with = None):
1040 1040 #
1041 1041 # #Read data
1042 1042 #
1043 1043 # powerdB = dataOut.getPower(channel = channel)
1044 1044 # noisedB = dataOut.getNoise(channel = channel)[0]
1045 1045 #
1046 1046 # self.__powBuffer.extend(powerdB.flatten())
1047 1047 #
1048 1048 # dataArray = numpy.array(self.__powBuffer)
1049 1049 #
1050 1050 # filteredPower = numpy.correlate(dataArray, dataArray[0:self.__nSamples], "same")
1051 1051 #
1052 1052 # maxValue = numpy.nanmax(filteredPower)
1053 1053 #
1054 1054 # if maxValue < noisedB + 10:
1055 1055 # #No se encuentra ningun pulso de transmision
1056 1056 # return None
1057 1057 #
1058 1058 # maxValuesIndex = numpy.where(filteredPower > maxValue - 0.1*abs(maxValue))[0]
1059 1059 #
1060 1060 # if len(maxValuesIndex) < 2:
1061 1061 # #Solo se encontro un solo pulso de transmision de un baudio, esperando por el siguiente TX
1062 1062 # return None
1063 1063 #
1064 1064 # phasedMaxValuesIndex = maxValuesIndex - self.__nSamples
1065 1065 #
1066 1066 # #Seleccionar solo valores con un espaciamiento de nSamples
1067 1067 # pulseIndex = numpy.intersect1d(maxValuesIndex, phasedMaxValuesIndex)
1068 1068 #
1069 1069 # if len(pulseIndex) < 2:
1070 1070 # #Solo se encontro un pulso de transmision con ancho mayor a 1
1071 1071 # return None
1072 1072 #
1073 1073 # spacing = pulseIndex[1:] - pulseIndex[:-1]
1074 1074 #
1075 1075 # #remover senales que se distancien menos de 10 unidades o muestras
1076 1076 # #(No deberian existir IPP menor a 10 unidades)
1077 1077 #
1078 1078 # realIndex = numpy.where(spacing > 10 )[0]
1079 1079 #
1080 1080 # if len(realIndex) < 2:
1081 1081 # #Solo se encontro un pulso de transmision con ancho mayor a 1
1082 1082 # return None
1083 1083 #
1084 1084 # #Eliminar pulsos anchos (deja solo la diferencia entre IPPs)
1085 1085 # realPulseIndex = pulseIndex[realIndex]
1086 1086 #
1087 1087 # period = mode(realPulseIndex[1:] - realPulseIndex[:-1])[0][0]
1088 1088 #
1089 1089 # print "IPP = %d samples" %period
1090 1090 #
1091 1091 # self.__newNSamples = dataOut.nHeights #int(period)
1092 1092 # self.__startIndex = int(realPulseIndex[0])
1093 1093 #
1094 1094 # return 1
1095 1095 #
1096 1096 #
1097 1097 # def setup(self, nSamples, nChannels, buffer_size = 4):
1098 1098 #
1099 1099 # self.__powBuffer = collections.deque(numpy.zeros( buffer_size*nSamples,dtype=numpy.float),
1100 1100 # maxlen = buffer_size*nSamples)
1101 1101 #
1102 1102 # bufferList = []
1103 1103 #
1104 1104 # for i in range(nChannels):
1105 1105 # bufferByChannel = collections.deque(numpy.zeros( buffer_size*nSamples, dtype=numpy.complex) + numpy.NAN,
1106 1106 # maxlen = buffer_size*nSamples)
1107 1107 #
1108 1108 # bufferList.append(bufferByChannel)
1109 1109 #
1110 1110 # self.__nSamples = nSamples
1111 1111 # self.__nChannels = nChannels
1112 1112 # self.__bufferList = bufferList
1113 1113 #
1114 1114 # def run(self, dataOut, channel = 0):
1115 1115 #
1116 1116 # if not self.isConfig:
1117 1117 # nSamples = dataOut.nHeights
1118 1118 # nChannels = dataOut.nChannels
1119 1119 # self.setup(nSamples, nChannels)
1120 1120 # self.isConfig = True
1121 1121 #
1122 1122 # #Append new data to internal buffer
1123 1123 # for thisChannel in range(self.__nChannels):
1124 1124 # bufferByChannel = self.__bufferList[thisChannel]
1125 1125 # bufferByChannel.extend(dataOut.data[thisChannel])
1126 1126 #
1127 1127 # if self.__pulseFound:
1128 1128 # self.__startIndex -= self.__nSamples
1129 1129 #
1130 1130 # #Finding Tx Pulse
1131 1131 # if not self.__pulseFound:
1132 1132 # indexFound = self.__findTxPulse(dataOut, channel)
1133 1133 #
1134 1134 # if indexFound == None:
1135 1135 # dataOut.flagNoData = True
1136 1136 # return
1137 1137 #
1138 1138 # self.__arrayBuffer = numpy.zeros((self.__nChannels, self.__newNSamples), dtype = numpy.complex)
1139 1139 # self.__pulseFound = True
1140 1140 # self.__startIndex = indexFound
1141 1141 #
1142 1142 # #If pulse was found ...
1143 1143 # for thisChannel in range(self.__nChannels):
1144 1144 # bufferByChannel = self.__bufferList[thisChannel]
1145 1145 # #print self.__startIndex
1146 1146 # x = numpy.array(bufferByChannel)
1147 1147 # self.__arrayBuffer[thisChannel] = x[self.__startIndex:self.__startIndex+self.__newNSamples]
1148 1148 #
1149 1149 # deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1150 1150 # dataOut.heightList = numpy.arange(self.__newNSamples)*deltaHeight
1151 1151 # # dataOut.ippSeconds = (self.__newNSamples / deltaHeight)/1e6
1152 1152 #
1153 1153 # dataOut.data = self.__arrayBuffer
1154 1154 #
1155 1155 # self.__startIndex += self.__newNSamples
1156 1156 #
1157 1157 # return No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now