##// END OF EJS Templates
remove prints
Juan C. Espinoza -
r995:39af0b6e404b
parent child
Show More
@@ -1,1816 +1,1813
1 1 '''
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6 import os
7 7 import sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import inspect
13 13 import time, datetime
14 14 import traceback
15 15 import zmq
16 16
17 17 try:
18 18 from gevent import sleep
19 19 except:
20 20 from time import sleep
21 21
22 22 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
23 23 from schainpy.model.data.jroheaderIO import get_dtype_index, get_numpy_dtype, get_procflag_dtype, get_dtype_width
24 24
25 25 LOCALTIME = True
26 26
27 27 def isNumber(cad):
28 28 """
29 29 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
30 30
31 31 Excepciones:
32 32 Si un determinado string no puede ser convertido a numero
33 33 Input:
34 34 str, string al cual se le analiza para determinar si convertible a un numero o no
35 35
36 36 Return:
37 37 True : si el string es uno numerico
38 38 False : no es un string numerico
39 39 """
40 40 try:
41 41 float( cad )
42 42 return True
43 43 except:
44 44 return False
45 45
46 46 def isFileInEpoch(filename, startUTSeconds, endUTSeconds):
47 47 """
48 48 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
49 49
50 50 Inputs:
51 51 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
52 52
53 53 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
54 54 segundos contados desde 01/01/1970.
55 55 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
56 56 segundos contados desde 01/01/1970.
57 57
58 58 Return:
59 59 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
60 60 fecha especificado, de lo contrario retorna False.
61 61
62 62 Excepciones:
63 63 Si el archivo no existe o no puede ser abierto
64 64 Si la cabecera no puede ser leida.
65 65
66 66 """
67 67 basicHeaderObj = BasicHeader(LOCALTIME)
68 68
69 69 try:
70 70 fp = open(filename,'rb')
71 71 except IOError:
72 72 print "The file %s can't be opened" %(filename)
73 73 return 0
74 74
75 75 sts = basicHeaderObj.read(fp)
76 76 fp.close()
77 77
78 78 if not(sts):
79 79 print "Skipping the file %s because it has not a valid header" %(filename)
80 80 return 0
81 81
82 82 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
83 83 return 0
84 84
85 85 return 1
86 86
87 87 def isTimeInRange(thisTime, startTime, endTime):
88 88
89 89 if endTime >= startTime:
90 90 if (thisTime < startTime) or (thisTime > endTime):
91 91 return 0
92 92
93 93 return 1
94 94 else:
95 95 if (thisTime < startTime) and (thisTime > endTime):
96 96 return 0
97 97
98 98 return 1
99 99
100 100 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
101 101 """
102 102 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
103 103
104 104 Inputs:
105 105 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
106 106
107 107 startDate : fecha inicial del rango seleccionado en formato datetime.date
108 108
109 109 endDate : fecha final del rango seleccionado en formato datetime.date
110 110
111 111 startTime : tiempo inicial del rango seleccionado en formato datetime.time
112 112
113 113 endTime : tiempo final del rango seleccionado en formato datetime.time
114 114
115 115 Return:
116 116 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
117 117 fecha especificado, de lo contrario retorna False.
118 118
119 119 Excepciones:
120 120 Si el archivo no existe o no puede ser abierto
121 121 Si la cabecera no puede ser leida.
122 122
123 123 """
124 124
125 125
126 126 try:
127 127 fp = open(filename,'rb')
128 128 except IOError:
129 129 print "The file %s can't be opened" %(filename)
130 130 return None
131 131
132 132 firstBasicHeaderObj = BasicHeader(LOCALTIME)
133 133 systemHeaderObj = SystemHeader()
134 134 radarControllerHeaderObj = RadarControllerHeader()
135 135 processingHeaderObj = ProcessingHeader()
136 136
137 137 lastBasicHeaderObj = BasicHeader(LOCALTIME)
138 138
139 139 sts = firstBasicHeaderObj.read(fp)
140 140
141 141 if not(sts):
142 142 print "[Reading] Skipping the file %s because it has not a valid header" %(filename)
143 143 return None
144 144
145 145 if not systemHeaderObj.read(fp):
146 146 return None
147 147
148 148 if not radarControllerHeaderObj.read(fp):
149 149 return None
150 150
151 151 if not processingHeaderObj.read(fp):
152 152 return None
153 153
154 154 filesize = os.path.getsize(filename)
155 155
156 156 offset = processingHeaderObj.blockSize + 24 #header size
157 157
158 158 if filesize <= offset:
159 159 print "[Reading] %s: This file has not enough data" %filename
160 160 return None
161 161
162 162 fp.seek(-offset, 2)
163 163
164 164 sts = lastBasicHeaderObj.read(fp)
165 165
166 166 fp.close()
167 167
168 168 thisDatetime = lastBasicHeaderObj.datatime
169 169 thisTime_last_block = thisDatetime.time()
170 170
171 171 thisDatetime = firstBasicHeaderObj.datatime
172 172 thisDate = thisDatetime.date()
173 173 thisTime_first_block = thisDatetime.time()
174 174
175 175 #General case
176 176 # o>>>>>>>>>>>>>><<<<<<<<<<<<<<o
177 177 #-----------o----------------------------o-----------
178 178 # startTime endTime
179 179
180 180 if endTime >= startTime:
181 181 if (thisTime_last_block < startTime) or (thisTime_first_block > endTime):
182 182 return None
183 183
184 184 return thisDatetime
185 185
186 186 #If endTime < startTime then endTime belongs to the next day
187 187
188 188
189 189 #<<<<<<<<<<<o o>>>>>>>>>>>
190 190 #-----------o----------------------------o-----------
191 191 # endTime startTime
192 192
193 193 if (thisDate == startDate) and (thisTime_last_block < startTime):
194 194 return None
195 195
196 196 if (thisDate == endDate) and (thisTime_first_block > endTime):
197 197 return None
198 198
199 199 if (thisTime_last_block < startTime) and (thisTime_first_block > endTime):
200 200 return None
201 201
202 202 return thisDatetime
203 203
204 204 def isFolderInDateRange(folder, startDate=None, endDate=None):
205 205 """
206 206 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
207 207
208 208 Inputs:
209 209 folder : nombre completo del directorio.
210 210 Su formato deberia ser "/path_root/?YYYYDDD"
211 211
212 212 siendo:
213 213 YYYY : Anio (ejemplo 2015)
214 214 DDD : Dia del anio (ejemplo 305)
215 215
216 216 startDate : fecha inicial del rango seleccionado en formato datetime.date
217 217
218 218 endDate : fecha final del rango seleccionado en formato datetime.date
219 219
220 220 Return:
221 221 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
222 222 fecha especificado, de lo contrario retorna False.
223 223 Excepciones:
224 224 Si el directorio no tiene el formato adecuado
225 225 """
226 226
227 227 basename = os.path.basename(folder)
228 228
229 229 if not isRadarFolder(basename):
230 230 print "The folder %s has not the rigth format" %folder
231 231 return 0
232 232
233 233 if startDate and endDate:
234 234 thisDate = getDateFromRadarFolder(basename)
235 235
236 236 if thisDate < startDate:
237 237 return 0
238 238
239 239 if thisDate > endDate:
240 240 return 0
241 241
242 242 return 1
243 243
244 244 def isFileInDateRange(filename, startDate=None, endDate=None):
245 245 """
246 246 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
247 247
248 248 Inputs:
249 249 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
250 250
251 251 Su formato deberia ser "?YYYYDDDsss"
252 252
253 253 siendo:
254 254 YYYY : Anio (ejemplo 2015)
255 255 DDD : Dia del anio (ejemplo 305)
256 256 sss : set
257 257
258 258 startDate : fecha inicial del rango seleccionado en formato datetime.date
259 259
260 260 endDate : fecha final del rango seleccionado en formato datetime.date
261 261
262 262 Return:
263 263 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
264 264 fecha especificado, de lo contrario retorna False.
265 265 Excepciones:
266 266 Si el archivo no tiene el formato adecuado
267 267 """
268 268
269 269 basename = os.path.basename(filename)
270 270
271 271 if not isRadarFile(basename):
272 272 print "The filename %s has not the rigth format" %filename
273 273 return 0
274 274
275 275 if startDate and endDate:
276 276 thisDate = getDateFromRadarFile(basename)
277 277
278 278 if thisDate < startDate:
279 279 return 0
280 280
281 281 if thisDate > endDate:
282 282 return 0
283 283
284 284 return 1
285 285
286 286 def getFileFromSet(path, ext, set):
287 287 validFilelist = []
288 288 fileList = os.listdir(path)
289 289
290 290 # 0 1234 567 89A BCDE
291 291 # H YYYY DDD SSS .ext
292 292
293 293 for thisFile in fileList:
294 294 try:
295 295 year = int(thisFile[1:5])
296 296 doy = int(thisFile[5:8])
297 297 except:
298 298 continue
299 299
300 300 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
301 301 continue
302 302
303 303 validFilelist.append(thisFile)
304 304
305 305 myfile = fnmatch.filter(validFilelist,'*%4.4d%3.3d%3.3d*'%(year,doy,set))
306 306
307 307 if len(myfile)!= 0:
308 308 return myfile[0]
309 309 else:
310 310 filename = '*%4.4d%3.3d%3.3d%s'%(year,doy,set,ext.lower())
311 311 print 'the filename %s does not exist'%filename
312 312 print '...going to the last file: '
313 313
314 314 if validFilelist:
315 315 validFilelist = sorted( validFilelist, key=str.lower )
316 316 return validFilelist[-1]
317 317
318 318 return None
319 319
320 320 def getlastFileFromPath(path, ext):
321 321 """
322 322 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
323 323 al final de la depuracion devuelve el ultimo file de la lista que quedo.
324 324
325 325 Input:
326 326 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
327 327 ext : extension de los files contenidos en una carpeta
328 328
329 329 Return:
330 330 El ultimo file de una determinada carpeta, no se considera el path.
331 331 """
332 332 validFilelist = []
333 333 fileList = os.listdir(path)
334 334
335 335 # 0 1234 567 89A BCDE
336 336 # H YYYY DDD SSS .ext
337 337
338 338 for thisFile in fileList:
339 339
340 340 year = thisFile[1:5]
341 341 if not isNumber(year):
342 342 continue
343 343
344 344 doy = thisFile[5:8]
345 345 if not isNumber(doy):
346 346 continue
347 347
348 348 year = int(year)
349 349 doy = int(doy)
350 350
351 351 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
352 352 continue
353 353
354 354 validFilelist.append(thisFile)
355 355
356 356 if validFilelist:
357 357 validFilelist = sorted( validFilelist, key=str.lower )
358 358 return validFilelist[-1]
359 359
360 360 return None
361 361
362 362 def checkForRealPath(path, foldercounter, year, doy, set, ext):
363 363 """
364 364 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
365 365 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
366 366 el path exacto de un determinado file.
367 367
368 368 Example :
369 369 nombre correcto del file es .../.../D2009307/P2009307367.ext
370 370
371 371 Entonces la funcion prueba con las siguientes combinaciones
372 372 .../.../y2009307367.ext
373 373 .../.../Y2009307367.ext
374 374 .../.../x2009307/y2009307367.ext
375 375 .../.../x2009307/Y2009307367.ext
376 376 .../.../X2009307/y2009307367.ext
377 377 .../.../X2009307/Y2009307367.ext
378 378 siendo para este caso, la ultima combinacion de letras, identica al file buscado
379 379
380 380 Return:
381 381 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
382 382 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
383 383 para el filename
384 384 """
385 385 fullfilename = None
386 386 find_flag = False
387 387 filename = None
388 388
389 389 prefixDirList = [None,'d','D']
390 390 if ext.lower() == ".r": #voltage
391 391 prefixFileList = ['d','D']
392 392 elif ext.lower() == ".pdata": #spectra
393 393 prefixFileList = ['p','P']
394 394 else:
395 395 return None, filename
396 396
397 397 #barrido por las combinaciones posibles
398 398 for prefixDir in prefixDirList:
399 399 thispath = path
400 400 if prefixDir != None:
401 401 #formo el nombre del directorio xYYYYDDD (x=d o x=D)
402 402 if foldercounter == 0:
403 403 thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy ))
404 404 else:
405 405 thispath = os.path.join(path, "%s%04d%03d_%02d" % ( prefixDir, year, doy , foldercounter))
406 406 for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D"
407 407 filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
408 408 fullfilename = os.path.join( thispath, filename ) #formo el path completo
409 409
410 410 if os.path.exists( fullfilename ): #verifico que exista
411 411 find_flag = True
412 412 break
413 413 if find_flag:
414 414 break
415 415
416 416 if not(find_flag):
417 417 return None, filename
418 418
419 419 return fullfilename, filename
420 420
421 421 def isRadarFolder(folder):
422 422 try:
423 423 year = int(folder[1:5])
424 424 doy = int(folder[5:8])
425 425 except:
426 426 return 0
427 427
428 428 return 1
429 429
430 430 def isRadarFile(file):
431 431 try:
432 432 year = int(file[1:5])
433 433 doy = int(file[5:8])
434 434 set = int(file[8:11])
435 435 except:
436 436 return 0
437 437
438 438 return 1
439 439
440 440 def getDateFromRadarFile(file):
441 441 try:
442 442 year = int(file[1:5])
443 443 doy = int(file[5:8])
444 444 set = int(file[8:11])
445 445 except:
446 446 return None
447 447
448 448 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy-1)
449 449 return thisDate
450 450
451 451 def getDateFromRadarFolder(folder):
452 452 try:
453 453 year = int(folder[1:5])
454 454 doy = int(folder[5:8])
455 455 except:
456 456 return None
457 457
458 458 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy-1)
459 459 return thisDate
460 460
461 461 class JRODataIO:
462 462
463 463 c = 3E8
464 464
465 465 isConfig = False
466 466
467 467 basicHeaderObj = None
468 468
469 469 systemHeaderObj = None
470 470
471 471 radarControllerHeaderObj = None
472 472
473 473 processingHeaderObj = None
474 474
475 475 dtype = None
476 476
477 477 pathList = []
478 478
479 479 filenameList = []
480 480
481 481 filename = None
482 482
483 483 ext = None
484 484
485 485 flagIsNewFile = 1
486 486
487 487 flagDiscontinuousBlock = 0
488 488
489 489 flagIsNewBlock = 0
490 490
491 491 fp = None
492 492
493 493 firstHeaderSize = 0
494 494
495 495 basicHeaderSize = 24
496 496
497 497 versionFile = 1103
498 498
499 499 fileSize = None
500 500
501 501 # ippSeconds = None
502 502
503 503 fileSizeByHeader = None
504 504
505 505 fileIndex = None
506 506
507 507 profileIndex = None
508 508
509 509 blockIndex = None
510 510
511 511 nTotalBlocks = None
512 512
513 513 maxTimeStep = 30
514 514
515 515 lastUTTime = None
516 516
517 517 datablock = None
518 518
519 519 dataOut = None
520 520
521 521 blocksize = None
522 522
523 523 getByBlock = False
524 524
525 525 def __init__(self):
526 526
527 527 raise NotImplementedError
528 528
529 529 def run(self):
530 530
531 531 raise NotImplementedError
532 532
533 533 def getDtypeWidth(self):
534 534
535 535 dtype_index = get_dtype_index(self.dtype)
536 536 dtype_width = get_dtype_width(dtype_index)
537 537
538 538 return dtype_width
539 539
540 540 def getAllowedArgs(self):
541 541 return inspect.getargspec(self.run).args
542 542
543 543 class JRODataReader(JRODataIO):
544 544
545 545
546 546 online = 0
547 547
548 548 realtime = 0
549 549
550 550 nReadBlocks = 0
551 551
552 552 delay = 10 #number of seconds waiting a new file
553 553
554 554 nTries = 3 #quantity tries
555 555
556 556 nFiles = 3 #number of files for searching
557 557
558 558 path = None
559 559
560 560 foldercounter = 0
561 561
562 562 flagNoMoreFiles = 0
563 563
564 564 datetimeList = []
565 565
566 566 __isFirstTimeOnline = 1
567 567
568 568 __printInfo = True
569 569
570 570 profileIndex = None
571 571
572 572 nTxs = 1
573 573
574 574 txIndex = None
575 575
576 576 #Added--------------------
577 577
578 578 selBlocksize = None
579 579
580 580 selBlocktime = None
581 581
582 582
583 583 def __init__(self):
584 584
585 585 """
586 586 This class is used to find data files
587 587
588 588 Example:
589 589 reader = JRODataReader()
590 590 fileList = reader.findDataFiles()
591 591
592 592 """
593 593 pass
594 594
595 595
596 596 def createObjByDefault(self):
597 597 """
598 598
599 599 """
600 600 raise NotImplementedError
601 601
602 602 def getBlockDimension(self):
603 603
604 604 raise NotImplementedError
605 605
606 606 def __searchFilesOffLine(self,
607 607 path,
608 608 startDate=None,
609 609 endDate=None,
610 610 startTime=datetime.time(0,0,0),
611 611 endTime=datetime.time(23,59,59),
612 612 set=None,
613 613 expLabel='',
614 614 ext='.r',
615 615 queue=None,
616 616 cursor=None,
617 617 skip=None,
618 618 walk=True):
619 619
620 620 self.filenameList = []
621 621 self.datetimeList = []
622 622
623 623 pathList = []
624 624
625 625 dateList, pathList = self.findDatafiles(path, startDate, endDate, expLabel, ext, walk, include_path=True)
626 626
627 627 if dateList == []:
628 628 # print "[Reading] Date range selected invalid [%s - %s]: No *%s files in %s)" %(startDate, endDate, ext, path)
629 629 return None, None
630 630
631 631 if len(dateList) > 1:
632 632 print "[Reading] Data found for date range [%s - %s]: total days = %d" %(startDate, endDate, len(dateList))
633 633 else:
634 634 print "[Reading] Data found for date range [%s - %s]: date = %s" %(startDate, endDate, dateList[0])
635 635
636 636 filenameList = []
637 637 datetimeList = []
638 638
639 639 for thisPath in pathList:
640 640 # thisPath = pathList[pathDict[file]]
641 641
642 642 fileList = glob.glob1(thisPath, "*%s" %ext)
643 643 fileList.sort()
644 644
645 645 skippedFileList = []
646 646
647 647 if cursor is not None and skip is not None:
648 648 # if cursor*skip > len(fileList):
649 649 if skip == 0:
650 650 if queue is not None:
651 651 queue.put(len(fileList))
652 652 skippedFileList = []
653 653 else:
654 654 skippedFileList = fileList[cursor*skip: cursor*skip + skip]
655 655
656 656 else:
657 657 skippedFileList = fileList
658 658
659 659 for file in skippedFileList:
660 660
661 661 filename = os.path.join(thisPath,file)
662 662
663 663 if not isFileInDateRange(filename, startDate, endDate):
664 664 continue
665 665
666 666 thisDatetime = isFileInTimeRange(filename, startDate, endDate, startTime, endTime)
667 667
668 668 if not(thisDatetime):
669 669 continue
670 670
671 671 filenameList.append(filename)
672 672 datetimeList.append(thisDatetime)
673 673
674 674 if not(filenameList):
675 675 print "[Reading] Time range selected invalid [%s - %s]: No *%s files in %s)" %(startTime, endTime, ext, path)
676 676 return None, None
677 677
678 678 print "[Reading] %d file(s) was(were) found in time range: %s - %s" %(len(filenameList), startTime, endTime)
679 679 print
680 680
681 681 for i in range(len(filenameList)):
682 682 print "[Reading] %s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
683 683
684 684 self.filenameList = filenameList
685 685 self.datetimeList = datetimeList
686 686
687 687 return pathList, filenameList
688 688
689 689 def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True, set=None):
690 690
691 691 """
692 692 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
693 693 devuelve el archivo encontrado ademas de otros datos.
694 694
695 695 Input:
696 696 path : carpeta donde estan contenidos los files que contiene data
697 697
698 698 expLabel : Nombre del subexperimento (subfolder)
699 699
700 700 ext : extension de los files
701 701
702 702 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
703 703
704 704 Return:
705 705 directory : eL directorio donde esta el file encontrado
706 706 filename : el ultimo file de una determinada carpeta
707 707 year : el anho
708 708 doy : el numero de dia del anho
709 709 set : el set del archivo
710 710
711 711
712 712 """
713 713 if not os.path.isdir(path):
714 714 return None, None, None, None, None, None
715 715
716 716 dirList = []
717 717
718 718 if not walk:
719 719 fullpath = path
720 720 foldercounter = 0
721 721 else:
722 722 #Filtra solo los directorios
723 723 for thisPath in os.listdir(path):
724 724 if not os.path.isdir(os.path.join(path,thisPath)):
725 725 continue
726 726 if not isRadarFolder(thisPath):
727 727 continue
728 728
729 729 dirList.append(thisPath)
730 730
731 731 if not(dirList):
732 732 return None, None, None, None, None, None
733 733
734 734 dirList = sorted( dirList, key=str.lower )
735 735
736 736 doypath = dirList[-1]
737 737 foldercounter = int(doypath.split('_')[1]) if len(doypath.split('_'))>1 else 0
738 738 fullpath = os.path.join(path, doypath, expLabel)
739 739
740 740
741 741 print "[Reading] %s folder was found: " %(fullpath )
742 742
743 743 if set == None:
744 744 filename = getlastFileFromPath(fullpath, ext)
745 745 else:
746 746 filename = getFileFromSet(fullpath, ext, set)
747 747
748 748 if not(filename):
749 749 return None, None, None, None, None, None
750 750
751 751 print "[Reading] %s file was found" %(filename)
752 752
753 753 if not(self.__verifyFile(os.path.join(fullpath, filename))):
754 754 return None, None, None, None, None, None
755 755
756 756 year = int( filename[1:5] )
757 757 doy = int( filename[5:8] )
758 758 set = int( filename[8:11] )
759 759
760 760 return fullpath, foldercounter, filename, year, doy, set
761 761
762 762 def __setNextFileOffline(self):
763 763
764 764 idFile = self.fileIndex
765 765
766 766 while (True):
767 767 idFile += 1
768 768 if not(idFile < len(self.filenameList)):
769 769 self.flagNoMoreFiles = 1
770 770 # print "[Reading] No more Files"
771 771 return 0
772 772
773 773 filename = self.filenameList[idFile]
774 774
775 775 if not(self.__verifyFile(filename)):
776 776 continue
777 777
778 778 fileSize = os.path.getsize(filename)
779 779 fp = open(filename,'rb')
780 780 break
781 781
782 782 self.flagIsNewFile = 1
783 783 self.fileIndex = idFile
784 784 self.filename = filename
785 785 self.fileSize = fileSize
786 786 self.fp = fp
787 787
788 788 # print "[Reading] Setting the file: %s"%self.filename
789 789
790 790 return 1
791 791
792 792 def __setNextFileOnline(self):
793 793 """
794 794 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
795 795 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
796 796 siguientes.
797 797
798 798 Affected:
799 799 self.flagIsNewFile
800 800 self.filename
801 801 self.fileSize
802 802 self.fp
803 803 self.set
804 804 self.flagNoMoreFiles
805 805
806 806 Return:
807 807 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
808 808 1 : si el file fue abierto con exito y esta listo a ser leido
809 809
810 810 Excepciones:
811 811 Si un determinado file no puede ser abierto
812 812 """
813 813 nFiles = 0
814 814 fileOk_flag = False
815 815 firstTime_flag = True
816 816
817 817 self.set += 1
818 818
819 819 if self.set > 999:
820 820 self.set = 0
821 821 self.foldercounter += 1
822 822
823 823 #busca el 1er file disponible
824 824 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
825 825 if fullfilename:
826 826 if self.__verifyFile(fullfilename, False):
827 827 fileOk_flag = True
828 828
829 829 #si no encuentra un file entonces espera y vuelve a buscar
830 830 if not(fileOk_flag):
831 831 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
832 832
833 833 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
834 834 tries = self.nTries
835 835 else:
836 836 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
837 837
838 838 for nTries in range( tries ):
839 839 if firstTime_flag:
840 840 print "\t[Reading] Waiting %0.2f sec for the next file: \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
841 841 sleep( self.delay )
842 842 else:
843 843 print "\t[Reading] Searching the next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
844 844
845 845 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
846 846 if fullfilename:
847 847 if self.__verifyFile(fullfilename):
848 848 fileOk_flag = True
849 849 break
850 850
851 851 if fileOk_flag:
852 852 break
853 853
854 854 firstTime_flag = False
855 855
856 856 print "\t[Reading] Skipping the file \"%s\" due to this file doesn't exist" % filename
857 857 self.set += 1
858 858
859 859 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
860 860 self.set = 0
861 861 self.doy += 1
862 862 self.foldercounter = 0
863 863
864 864 if fileOk_flag:
865 865 self.fileSize = os.path.getsize( fullfilename )
866 866 self.filename = fullfilename
867 867 self.flagIsNewFile = 1
868 868 if self.fp != None: self.fp.close()
869 869 self.fp = open(fullfilename, 'rb')
870 870 self.flagNoMoreFiles = 0
871 871 # print '[Reading] Setting the file: %s' % fullfilename
872 872 else:
873 873 self.fileSize = 0
874 874 self.filename = None
875 875 self.flagIsNewFile = 0
876 876 self.fp = None
877 877 self.flagNoMoreFiles = 1
878 878 # print '[Reading] No more files to read'
879 879
880 880 return fileOk_flag
881 881
882 882 def setNextFile(self):
883 883 if self.fp != None:
884 884 self.fp.close()
885 885
886 886 if self.online:
887 887 newFile = self.__setNextFileOnline()
888 888 else:
889 889 newFile = self.__setNextFileOffline()
890 890
891 891 if not(newFile):
892 892 print '[Reading] No more files to read'
893 893 return 0
894 894
895 895 if self.verbose:
896 896 print '[Reading] Setting the file: %s' % self.filename
897 897
898 898 self.__readFirstHeader()
899 899 self.nReadBlocks = 0
900 900 return 1
901 901
902 902 def __waitNewBlock(self):
903 903 """
904 904 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
905 905
906 906 Si el modo de lectura es OffLine siempre retorn 0
907 907 """
908 908 if not self.online:
909 909 return 0
910 910
911 911 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
912 912 return 0
913 913
914 914 currentPointer = self.fp.tell()
915 915
916 916 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
917 917
918 918 for nTries in range( self.nTries ):
919 919
920 920 self.fp.close()
921 921 self.fp = open( self.filename, 'rb' )
922 922 self.fp.seek( currentPointer )
923 923
924 924 self.fileSize = os.path.getsize( self.filename )
925 925 currentSize = self.fileSize - currentPointer
926 926
927 927 if ( currentSize >= neededSize ):
928 928 self.basicHeaderObj.read(self.fp)
929 929 return 1
930 930
931 931 if self.fileSize == self.fileSizeByHeader:
932 932 # self.flagEoF = True
933 933 return 0
934 934
935 935 print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
936 936 sleep( self.delay )
937 937
938 938
939 939 return 0
940 940
941 941 def waitDataBlock(self,pointer_location):
942 942
943 943 currentPointer = pointer_location
944 944
945 945 neededSize = self.processingHeaderObj.blockSize #+ self.basicHeaderSize
946 946
947 947 for nTries in range( self.nTries ):
948 948 self.fp.close()
949 949 self.fp = open( self.filename, 'rb' )
950 950 self.fp.seek( currentPointer )
951 951
952 952 self.fileSize = os.path.getsize( self.filename )
953 953 currentSize = self.fileSize - currentPointer
954 954
955 955 if ( currentSize >= neededSize ):
956 956 return 1
957 957
958 958 print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
959 959 sleep( self.delay )
960 960
961 961 return 0
962 962
963 963 def __jumpToLastBlock(self):
964 964
965 965 if not(self.__isFirstTimeOnline):
966 966 return
967 967
968 968 csize = self.fileSize - self.fp.tell()
969 969 blocksize = self.processingHeaderObj.blockSize
970 970
971 971 #salta el primer bloque de datos
972 972 if csize > self.processingHeaderObj.blockSize:
973 973 self.fp.seek(self.fp.tell() + blocksize)
974 974 else:
975 975 return
976 976
977 977 csize = self.fileSize - self.fp.tell()
978 978 neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
979 979 while True:
980 980
981 981 if self.fp.tell()<self.fileSize:
982 982 self.fp.seek(self.fp.tell() + neededsize)
983 983 else:
984 984 self.fp.seek(self.fp.tell() - neededsize)
985 985 break
986 986
987 987 # csize = self.fileSize - self.fp.tell()
988 988 # neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
989 989 # factor = int(csize/neededsize)
990 990 # if factor > 0:
991 991 # self.fp.seek(self.fp.tell() + factor*neededsize)
992 992
993 993 self.flagIsNewFile = 0
994 994 self.__isFirstTimeOnline = 0
995 995
996 996 def __setNewBlock(self):
997 997 #if self.server is None:
998 998 if self.fp == None:
999 999 return 0
1000 1000
1001 1001 # if self.online:
1002 # self.__jumpToLastBlock()
1003 print 'xxxx'
1002 # self.__jumpToLastBlock()
1004 1003
1005 1004 if self.flagIsNewFile:
1006 1005 self.lastUTTime = self.basicHeaderObj.utc
1007 1006 return 1
1008 1007
1009 1008 if self.realtime:
1010 1009 self.flagDiscontinuousBlock = 1
1011 1010 if not(self.setNextFile()):
1012 1011 return 0
1013 1012 else:
1014 return 1
1015 print 'xxxx'
1013 return 1
1016 1014 #if self.server is None:
1017 1015 currentSize = self.fileSize - self.fp.tell()
1018 1016 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
1019 1017 if (currentSize >= neededSize):
1020 1018 self.basicHeaderObj.read(self.fp)
1021 1019 self.lastUTTime = self.basicHeaderObj.utc
1022 1020 return 1
1023 1021 # else:
1024 1022 # self.basicHeaderObj.read(self.zHeader)
1025 1023 # self.lastUTTime = self.basicHeaderObj.utc
1026 1024 # return 1
1027 1025 if self.__waitNewBlock():
1028 1026 self.lastUTTime = self.basicHeaderObj.utc
1029 1027 return 1
1030 1028 #if self.server is None:
1031 1029 if not(self.setNextFile()):
1032 1030 return 0
1033 1031
1034 1032 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
1035 1033 self.lastUTTime = self.basicHeaderObj.utc
1036 1034
1037 1035 self.flagDiscontinuousBlock = 0
1038 1036
1039 1037 if deltaTime > self.maxTimeStep:
1040 1038 self.flagDiscontinuousBlock = 1
1041 1039
1042 1040 return 1
1043 1041
1044 1042 def readNextBlock(self):
1045 1043
1046 1044 #Skip block out of startTime and endTime
1047 while True:
1048 print 'cxxxx'
1045 while True:
1049 1046 if not(self.__setNewBlock()):
1050 1047 print 'returning'
1051 1048 return 0
1052 print 'dxxx'
1049
1053 1050 if not(self.readBlock()):
1054 1051 return 0
1055 1052
1056 1053 self.getBasicHeader()
1057 1054
1058 1055 if not isTimeInRange(self.dataOut.datatime.time(), self.startTime, self.endTime):
1059 1056
1060 1057 print "[Reading] Block No. %d/%d -> %s [Skipping]" %(self.nReadBlocks,
1061 1058 self.processingHeaderObj.dataBlocksPerFile,
1062 1059 self.dataOut.datatime.ctime())
1063 1060 continue
1064 1061
1065 1062 break
1066 1063
1067 1064 if self.verbose:
1068 1065 print "[Reading] Block No. %d/%d -> %s" %(self.nReadBlocks,
1069 1066 self.processingHeaderObj.dataBlocksPerFile,
1070 1067 self.dataOut.datatime.ctime())
1071 1068 return 1
1072 1069
1073 1070 def __readFirstHeader(self):
1074 1071
1075 1072 self.basicHeaderObj.read(self.fp)
1076 1073 self.systemHeaderObj.read(self.fp)
1077 1074 self.radarControllerHeaderObj.read(self.fp)
1078 1075 self.processingHeaderObj.read(self.fp)
1079 1076
1080 1077 self.firstHeaderSize = self.basicHeaderObj.size
1081 1078
1082 1079 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
1083 1080 if datatype == 0:
1084 1081 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
1085 1082 elif datatype == 1:
1086 1083 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
1087 1084 elif datatype == 2:
1088 1085 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
1089 1086 elif datatype == 3:
1090 1087 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
1091 1088 elif datatype == 4:
1092 1089 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
1093 1090 elif datatype == 5:
1094 1091 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
1095 1092 else:
1096 1093 raise ValueError, 'Data type was not defined'
1097 1094
1098 1095 self.dtype = datatype_str
1099 1096 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
1100 1097 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
1101 1098 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
1102 1099 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
1103 1100 self.getBlockDimension()
1104 1101
1105 1102 def __verifyFile(self, filename, msgFlag=True):
1106 1103
1107 1104 msg = None
1108 1105
1109 1106 try:
1110 1107 fp = open(filename, 'rb')
1111 1108 except IOError:
1112 1109
1113 1110 if msgFlag:
1114 1111 print "[Reading] File %s can't be opened" % (filename)
1115 1112
1116 1113 return False
1117 1114
1118 1115 currentPosition = fp.tell()
1119 1116 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
1120 1117
1121 1118 if neededSize == 0:
1122 1119 basicHeaderObj = BasicHeader(LOCALTIME)
1123 1120 systemHeaderObj = SystemHeader()
1124 1121 radarControllerHeaderObj = RadarControllerHeader()
1125 1122 processingHeaderObj = ProcessingHeader()
1126 1123
1127 1124 if not( basicHeaderObj.read(fp) ):
1128 1125 fp.close()
1129 1126 return False
1130 1127
1131 1128 if not( systemHeaderObj.read(fp) ):
1132 1129 fp.close()
1133 1130 return False
1134 1131
1135 1132 if not( radarControllerHeaderObj.read(fp) ):
1136 1133 fp.close()
1137 1134 return False
1138 1135
1139 1136 if not( processingHeaderObj.read(fp) ):
1140 1137 fp.close()
1141 1138 return False
1142 1139
1143 1140 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
1144 1141 else:
1145 1142 msg = "[Reading] Skipping the file %s due to it hasn't enough data" %filename
1146 1143
1147 1144 fp.close()
1148 1145
1149 1146 fileSize = os.path.getsize(filename)
1150 1147 currentSize = fileSize - currentPosition
1151 1148
1152 1149 if currentSize < neededSize:
1153 1150 if msgFlag and (msg != None):
1154 1151 print msg
1155 1152 return False
1156 1153
1157 1154 return True
1158 1155
1159 1156 def findDatafiles(self, path, startDate=None, endDate=None, expLabel='', ext='.r', walk=True, include_path=False):
1160 1157
1161 1158 path_empty = True
1162 1159
1163 1160 dateList = []
1164 1161 pathList = []
1165 1162
1166 1163 multi_path = path.split(',')
1167 1164
1168 1165 if not walk:
1169 1166
1170 1167 for single_path in multi_path:
1171 1168
1172 1169 if not os.path.isdir(single_path):
1173 1170 continue
1174 1171
1175 1172 fileList = glob.glob1(single_path, "*"+ext)
1176 1173
1177 1174 if not fileList:
1178 1175 continue
1179 1176
1180 1177 path_empty = False
1181 1178
1182 1179 fileList.sort()
1183 1180
1184 1181 for thisFile in fileList:
1185 1182
1186 1183 if not os.path.isfile(os.path.join(single_path, thisFile)):
1187 1184 continue
1188 1185
1189 1186 if not isRadarFile(thisFile):
1190 1187 continue
1191 1188
1192 1189 if not isFileInDateRange(thisFile, startDate, endDate):
1193 1190 continue
1194 1191
1195 1192 thisDate = getDateFromRadarFile(thisFile)
1196 1193
1197 1194 if thisDate in dateList:
1198 1195 continue
1199 1196
1200 1197 dateList.append(thisDate)
1201 1198 pathList.append(single_path)
1202 1199
1203 1200 else:
1204 1201 for single_path in multi_path:
1205 1202
1206 1203 if not os.path.isdir(single_path):
1207 1204 continue
1208 1205
1209 1206 dirList = []
1210 1207
1211 1208 for thisPath in os.listdir(single_path):
1212 1209
1213 1210 if not os.path.isdir(os.path.join(single_path,thisPath)):
1214 1211 continue
1215 1212
1216 1213 if not isRadarFolder(thisPath):
1217 1214 continue
1218 1215
1219 1216 if not isFolderInDateRange(thisPath, startDate, endDate):
1220 1217 continue
1221 1218
1222 1219 dirList.append(thisPath)
1223 1220
1224 1221 if not dirList:
1225 1222 continue
1226 1223
1227 1224 dirList.sort()
1228 1225
1229 1226 for thisDir in dirList:
1230 1227
1231 1228 datapath = os.path.join(single_path, thisDir, expLabel)
1232 1229 fileList = glob.glob1(datapath, "*"+ext)
1233 1230
1234 1231 if not fileList:
1235 1232 continue
1236 1233
1237 1234 path_empty = False
1238 1235
1239 1236 thisDate = getDateFromRadarFolder(thisDir)
1240 1237
1241 1238 pathList.append(datapath)
1242 1239 dateList.append(thisDate)
1243 1240
1244 1241 dateList.sort()
1245 1242
1246 1243 if walk:
1247 1244 pattern_path = os.path.join(multi_path[0], "[dYYYYDDD]", expLabel)
1248 1245 else:
1249 1246 pattern_path = multi_path[0]
1250 1247
1251 1248 if path_empty:
1252 1249 print "[Reading] No *%s files in %s for %s to %s" %(ext, pattern_path, startDate, endDate)
1253 1250 else:
1254 1251 if not dateList:
1255 1252 print "[Reading] Date range selected invalid [%s - %s]: No *%s files in %s)" %(startDate, endDate, ext, path)
1256 1253
1257 1254 if include_path:
1258 1255 return dateList, pathList
1259 1256
1260 1257 return dateList
1261 1258
1262 1259 def setup(self,
1263 1260 path=None,
1264 1261 startDate=None,
1265 1262 endDate=None,
1266 1263 startTime=datetime.time(0,0,0),
1267 1264 endTime=datetime.time(23,59,59),
1268 1265 set=None,
1269 1266 expLabel = "",
1270 1267 ext = None,
1271 1268 online = False,
1272 1269 delay = 60,
1273 1270 walk = True,
1274 1271 getblock = False,
1275 1272 nTxs = 1,
1276 1273 realtime=False,
1277 1274 blocksize=None,
1278 1275 blocktime=None,
1279 1276 queue=None,
1280 1277 skip=None,
1281 1278 cursor=None,
1282 1279 warnings=True,
1283 1280 verbose=True,
1284 1281 server=None):
1285 1282 if server is not None:
1286 1283 if 'tcp://' in server:
1287 1284 address = server
1288 1285 else:
1289 1286 address = 'ipc:///tmp/%s' % server
1290 1287 self.server = address
1291 1288 self.context = zmq.Context()
1292 1289 self.receiver = self.context.socket(zmq.PULL)
1293 1290 self.receiver.connect(self.server)
1294 1291 time.sleep(0.5)
1295 1292 print '[Starting] ReceiverData from {}'.format(self.server)
1296 1293 else:
1297 1294 self.server = None
1298 1295 if path == None:
1299 1296 raise ValueError, "[Reading] The path is not valid"
1300 1297
1301 1298 if ext == None:
1302 1299 ext = self.ext
1303 1300
1304 1301 if online:
1305 1302 print "[Reading] Searching files in online mode..."
1306 1303
1307 1304 for nTries in range( self.nTries ):
1308 1305 fullpath, foldercounter, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk, set=set)
1309 1306
1310 1307 if fullpath:
1311 1308 break
1312 1309
1313 1310 print '[Reading] Waiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
1314 1311 sleep( self.delay )
1315 1312
1316 1313 if not(fullpath):
1317 1314 print "[Reading] There 'isn't any valid file in %s" % path
1318 1315 return
1319 1316
1320 1317 self.year = year
1321 1318 self.doy = doy
1322 1319 self.set = set - 1
1323 1320 self.path = path
1324 1321 self.foldercounter = foldercounter
1325 1322 last_set = None
1326 1323 else:
1327 1324 print "[Reading] Searching files in offline mode ..."
1328 1325 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
1329 1326 startTime=startTime, endTime=endTime,
1330 1327 set=set, expLabel=expLabel, ext=ext,
1331 1328 walk=walk, cursor=cursor,
1332 1329 skip=skip, queue=queue)
1333 1330
1334 1331 if not(pathList):
1335 1332 # print "[Reading] No *%s files in %s (%s - %s)"%(ext, path,
1336 1333 # datetime.datetime.combine(startDate,startTime).ctime(),
1337 1334 # datetime.datetime.combine(endDate,endTime).ctime())
1338 1335
1339 1336 # sys.exit(-1)
1340 1337
1341 1338 self.fileIndex = -1
1342 1339 self.pathList = []
1343 1340 self.filenameList = []
1344 1341 return
1345 1342
1346 1343 self.fileIndex = -1
1347 1344 self.pathList = pathList
1348 1345 self.filenameList = filenameList
1349 1346 file_name = os.path.basename(filenameList[-1])
1350 1347 basename, ext = os.path.splitext(file_name)
1351 1348 last_set = int(basename[-3:])
1352 1349
1353 1350 self.online = online
1354 1351 self.realtime = realtime
1355 1352 self.delay = delay
1356 1353 ext = ext.lower()
1357 1354 self.ext = ext
1358 1355 self.getByBlock = getblock
1359 1356 self.nTxs = nTxs
1360 1357 self.startTime = startTime
1361 1358 self.endTime = endTime
1362 1359
1363 1360 #Added-----------------
1364 1361 self.selBlocksize = blocksize
1365 1362 self.selBlocktime = blocktime
1366 1363
1367 1364 # Verbose-----------
1368 1365 self.verbose = verbose
1369 1366 self.warnings = warnings
1370 1367
1371 1368 if not(self.setNextFile()):
1372 1369 if (startDate!=None) and (endDate!=None):
1373 1370 print "[Reading] No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
1374 1371 elif startDate != None:
1375 1372 print "[Reading] No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
1376 1373 else:
1377 1374 print "[Reading] No files"
1378 1375
1379 1376 self.fileIndex = -1
1380 1377 self.pathList = []
1381 1378 self.filenameList = []
1382 1379 return
1383 1380
1384 1381 # self.getBasicHeader()
1385 1382
1386 1383 if last_set != None:
1387 1384 self.dataOut.last_block = last_set * self.processingHeaderObj.dataBlocksPerFile + self.basicHeaderObj.dataBlock
1388 1385 return
1389 1386
1390 1387 def getBasicHeader(self):
1391 1388
1392 1389 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds
1393 1390
1394 1391 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
1395 1392
1396 1393 self.dataOut.timeZone = self.basicHeaderObj.timeZone
1397 1394
1398 1395 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
1399 1396
1400 1397 self.dataOut.errorCount = self.basicHeaderObj.errorCount
1401 1398
1402 1399 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
1403 1400
1404 1401 self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds/self.nTxs
1405 1402
1406 1403 # self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock*self.nTxs
1407 1404
1408 1405
1409 1406 def getFirstHeader(self):
1410 1407
1411 1408 raise NotImplementedError
1412 1409
1413 1410 def getData(self):
1414 1411
1415 1412 raise NotImplementedError
1416 1413
1417 1414 def hasNotDataInBuffer(self):
1418 1415
1419 1416 raise NotImplementedError
1420 1417
1421 1418 def readBlock(self):
1422 1419
1423 1420 raise NotImplementedError
1424 1421
1425 1422 def isEndProcess(self):
1426 1423
1427 1424 return self.flagNoMoreFiles
1428 1425
1429 1426 def printReadBlocks(self):
1430 1427
1431 1428 print "[Reading] Number of read blocks per file %04d" %self.nReadBlocks
1432 1429
1433 1430 def printTotalBlocks(self):
1434 1431
1435 1432 print "[Reading] Number of read blocks %04d" %self.nTotalBlocks
1436 1433
1437 1434 def printNumberOfBlock(self):
1438 1435
1439 1436 if self.flagIsNewBlock:
1440 1437 print "[Reading] Block No. %d/%d -> %s" %(self.nReadBlocks,
1441 1438 self.processingHeaderObj.dataBlocksPerFile,
1442 1439 self.dataOut.datatime.ctime())
1443 1440
1444 1441 def printInfo(self):
1445 1442
1446 1443 if self.__printInfo == False:
1447 1444 return
1448 1445
1449 1446 self.basicHeaderObj.printInfo()
1450 1447 self.systemHeaderObj.printInfo()
1451 1448 self.radarControllerHeaderObj.printInfo()
1452 1449 self.processingHeaderObj.printInfo()
1453 1450
1454 1451 self.__printInfo = False
1455 1452
1456 1453
1457 1454 def run(self,
1458 1455 path=None,
1459 1456 startDate=None,
1460 1457 endDate=None,
1461 1458 startTime=datetime.time(0,0,0),
1462 1459 endTime=datetime.time(23,59,59),
1463 1460 set=None,
1464 1461 expLabel = "",
1465 1462 ext = None,
1466 1463 online = False,
1467 1464 delay = 60,
1468 1465 walk = True,
1469 1466 getblock = False,
1470 1467 nTxs = 1,
1471 1468 realtime=False,
1472 1469 blocksize=None,
1473 1470 blocktime=None,
1474 1471 queue=None,
1475 1472 skip=None,
1476 1473 cursor=None,
1477 1474 warnings=True,
1478 1475 server=None,
1479 1476 verbose=True, **kwargs):
1480 1477
1481 1478 if not(self.isConfig):
1482 1479 # self.dataOut = dataOut
1483 1480 self.setup( path=path,
1484 1481 startDate=startDate,
1485 1482 endDate=endDate,
1486 1483 startTime=startTime,
1487 1484 endTime=endTime,
1488 1485 set=set,
1489 1486 expLabel=expLabel,
1490 1487 ext=ext,
1491 1488 online=online,
1492 1489 delay=delay,
1493 1490 walk=walk,
1494 1491 getblock=getblock,
1495 1492 nTxs=nTxs,
1496 1493 realtime=realtime,
1497 1494 blocksize=blocksize,
1498 1495 blocktime=blocktime,
1499 1496 queue=queue,
1500 1497 skip=skip,
1501 1498 cursor=cursor,
1502 1499 warnings=warnings,
1503 1500 server=server,
1504 1501 verbose=verbose)
1505 1502 self.isConfig = True
1506 1503 if server is None:
1507 1504 self.getData()
1508 1505 else:
1509 1506 self.getFromServer()
1510 1507
1511 1508 class JRODataWriter(JRODataIO):
1512 1509
1513 1510 """
1514 1511 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
1515 1512 de los datos siempre se realiza por bloques.
1516 1513 """
1517 1514
1518 1515 blockIndex = 0
1519 1516
1520 1517 path = None
1521 1518
1522 1519 setFile = None
1523 1520
1524 1521 profilesPerBlock = None
1525 1522
1526 1523 blocksPerFile = None
1527 1524
1528 1525 nWriteBlocks = 0
1529 1526
1530 1527 fileDate = None
1531 1528
1532 1529 def __init__(self, dataOut=None):
1533 1530 raise NotImplementedError
1534 1531
1535 1532
1536 1533 def hasAllDataInBuffer(self):
1537 1534 raise NotImplementedError
1538 1535
1539 1536
1540 1537 def setBlockDimension(self):
1541 1538 raise NotImplementedError
1542 1539
1543 1540
1544 1541 def writeBlock(self):
1545 1542 raise NotImplementedError
1546 1543
1547 1544
1548 1545 def putData(self):
1549 1546 raise NotImplementedError
1550 1547
1551 1548
1552 1549 def getProcessFlags(self):
1553 1550
1554 1551 processFlags = 0
1555 1552
1556 1553 dtype_index = get_dtype_index(self.dtype)
1557 1554 procflag_dtype = get_procflag_dtype(dtype_index)
1558 1555
1559 1556 processFlags += procflag_dtype
1560 1557
1561 1558 if self.dataOut.flagDecodeData:
1562 1559 processFlags += PROCFLAG.DECODE_DATA
1563 1560
1564 1561 if self.dataOut.flagDeflipData:
1565 1562 processFlags += PROCFLAG.DEFLIP_DATA
1566 1563
1567 1564 if self.dataOut.code is not None:
1568 1565 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1569 1566
1570 1567 if self.dataOut.nCohInt > 1:
1571 1568 processFlags += PROCFLAG.COHERENT_INTEGRATION
1572 1569
1573 1570 if self.dataOut.type == "Spectra":
1574 1571 if self.dataOut.nIncohInt > 1:
1575 1572 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
1576 1573
1577 1574 if self.dataOut.data_dc is not None:
1578 1575 processFlags += PROCFLAG.SAVE_CHANNELS_DC
1579 1576
1580 1577 if self.dataOut.flagShiftFFT:
1581 1578 processFlags += PROCFLAG.SHIFT_FFT_DATA
1582 1579
1583 1580 return processFlags
1584 1581
1585 1582 def setBasicHeader(self):
1586 1583
1587 1584 self.basicHeaderObj.size = self.basicHeaderSize #bytes
1588 1585 self.basicHeaderObj.version = self.versionFile
1589 1586 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1590 1587
1591 1588 utc = numpy.floor(self.dataOut.utctime)
1592 1589 milisecond = (self.dataOut.utctime - utc)* 1000.0
1593 1590
1594 1591 self.basicHeaderObj.utc = utc
1595 1592 self.basicHeaderObj.miliSecond = milisecond
1596 1593 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1597 1594 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1598 1595 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1599 1596
1600 1597 def setFirstHeader(self):
1601 1598 """
1602 1599 Obtiene una copia del First Header
1603 1600
1604 1601 Affected:
1605 1602
1606 1603 self.basicHeaderObj
1607 1604 self.systemHeaderObj
1608 1605 self.radarControllerHeaderObj
1609 1606 self.processingHeaderObj self.
1610 1607
1611 1608 Return:
1612 1609 None
1613 1610 """
1614 1611
1615 1612 raise NotImplementedError
1616 1613
1617 1614 def __writeFirstHeader(self):
1618 1615 """
1619 1616 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1620 1617
1621 1618 Affected:
1622 1619 __dataType
1623 1620
1624 1621 Return:
1625 1622 None
1626 1623 """
1627 1624
1628 1625 # CALCULAR PARAMETROS
1629 1626
1630 1627 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1631 1628 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1632 1629
1633 1630 self.basicHeaderObj.write(self.fp)
1634 1631 self.systemHeaderObj.write(self.fp)
1635 1632 self.radarControllerHeaderObj.write(self.fp)
1636 1633 self.processingHeaderObj.write(self.fp)
1637 1634
1638 1635 def __setNewBlock(self):
1639 1636 """
1640 1637 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1641 1638
1642 1639 Return:
1643 1640 0 : si no pudo escribir nada
1644 1641 1 : Si escribio el Basic el First Header
1645 1642 """
1646 1643 if self.fp == None:
1647 1644 self.setNextFile()
1648 1645
1649 1646 if self.flagIsNewFile:
1650 1647 return 1
1651 1648
1652 1649 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1653 1650 self.basicHeaderObj.write(self.fp)
1654 1651 return 1
1655 1652
1656 1653 if not( self.setNextFile() ):
1657 1654 return 0
1658 1655
1659 1656 return 1
1660 1657
1661 1658
1662 1659 def writeNextBlock(self):
1663 1660 """
1664 1661 Selecciona el bloque siguiente de datos y los escribe en un file
1665 1662
1666 1663 Return:
1667 1664 0 : Si no hizo pudo escribir el bloque de datos
1668 1665 1 : Si no pudo escribir el bloque de datos
1669 1666 """
1670 1667 if not( self.__setNewBlock() ):
1671 1668 return 0
1672 1669
1673 1670 self.writeBlock()
1674 1671
1675 1672 print "[Writing] Block No. %d/%d" %(self.blockIndex,
1676 1673 self.processingHeaderObj.dataBlocksPerFile)
1677 1674
1678 1675 return 1
1679 1676
1680 1677 def setNextFile(self):
1681 1678 """
1682 1679 Determina el siguiente file que sera escrito
1683 1680
1684 1681 Affected:
1685 1682 self.filename
1686 1683 self.subfolder
1687 1684 self.fp
1688 1685 self.setFile
1689 1686 self.flagIsNewFile
1690 1687
1691 1688 Return:
1692 1689 0 : Si el archivo no puede ser escrito
1693 1690 1 : Si el archivo esta listo para ser escrito
1694 1691 """
1695 1692 ext = self.ext
1696 1693 path = self.path
1697 1694
1698 1695 if self.fp != None:
1699 1696 self.fp.close()
1700 1697
1701 1698 timeTuple = time.localtime( self.dataOut.utctime)
1702 1699 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1703 1700
1704 1701 fullpath = os.path.join( path, subfolder )
1705 1702 setFile = self.setFile
1706 1703
1707 1704 if not( os.path.exists(fullpath) ):
1708 1705 os.mkdir(fullpath)
1709 1706 setFile = -1 #inicializo mi contador de seteo
1710 1707 else:
1711 1708 filesList = os.listdir( fullpath )
1712 1709 if len( filesList ) > 0:
1713 1710 filesList = sorted( filesList, key=str.lower )
1714 1711 filen = filesList[-1]
1715 1712 # el filename debera tener el siguiente formato
1716 1713 # 0 1234 567 89A BCDE (hex)
1717 1714 # x YYYY DDD SSS .ext
1718 1715 if isNumber( filen[8:11] ):
1719 1716 setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1720 1717 else:
1721 1718 setFile = -1
1722 1719 else:
1723 1720 setFile = -1 #inicializo mi contador de seteo
1724 1721
1725 1722 setFile += 1
1726 1723
1727 1724 #If this is a new day it resets some values
1728 1725 if self.dataOut.datatime.date() > self.fileDate:
1729 1726 setFile = 0
1730 1727 self.nTotalBlocks = 0
1731 1728
1732 1729 filen = '%s%4.4d%3.3d%3.3d%s' % (self.optchar, timeTuple.tm_year, timeTuple.tm_yday, setFile, ext )
1733 1730
1734 1731 filename = os.path.join( path, subfolder, filen )
1735 1732
1736 1733 fp = open( filename,'wb' )
1737 1734
1738 1735 self.blockIndex = 0
1739 1736
1740 1737 #guardando atributos
1741 1738 self.filename = filename
1742 1739 self.subfolder = subfolder
1743 1740 self.fp = fp
1744 1741 self.setFile = setFile
1745 1742 self.flagIsNewFile = 1
1746 1743 self.fileDate = self.dataOut.datatime.date()
1747 1744
1748 1745 self.setFirstHeader()
1749 1746
1750 1747 print '[Writing] Opening file: %s'%self.filename
1751 1748
1752 1749 self.__writeFirstHeader()
1753 1750
1754 1751 return 1
1755 1752
1756 1753 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=None, ext=None, datatype=4):
1757 1754 """
1758 1755 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1759 1756
1760 1757 Inputs:
1761 1758 path : directory where data will be saved
1762 1759 profilesPerBlock : number of profiles per block
1763 1760 set : initial file set
1764 1761 datatype : An integer number that defines data type:
1765 1762 0 : int8 (1 byte)
1766 1763 1 : int16 (2 bytes)
1767 1764 2 : int32 (4 bytes)
1768 1765 3 : int64 (8 bytes)
1769 1766 4 : float32 (4 bytes)
1770 1767 5 : double64 (8 bytes)
1771 1768
1772 1769 Return:
1773 1770 0 : Si no realizo un buen seteo
1774 1771 1 : Si realizo un buen seteo
1775 1772 """
1776 1773
1777 1774 if ext == None:
1778 1775 ext = self.ext
1779 1776
1780 1777 self.ext = ext.lower()
1781 1778
1782 1779 self.path = path
1783 1780
1784 1781 if set is None:
1785 1782 self.setFile = -1
1786 1783 else:
1787 1784 self.setFile = set - 1
1788 1785
1789 1786 self.blocksPerFile = blocksPerFile
1790 1787
1791 1788 self.profilesPerBlock = profilesPerBlock
1792 1789
1793 1790 self.dataOut = dataOut
1794 1791 self.fileDate = self.dataOut.datatime.date()
1795 1792 #By default
1796 1793 self.dtype = self.dataOut.dtype
1797 1794
1798 1795 if datatype is not None:
1799 1796 self.dtype = get_numpy_dtype(datatype)
1800 1797
1801 1798 if not(self.setNextFile()):
1802 1799 print "[Writing] There isn't a next file"
1803 1800 return 0
1804 1801
1805 1802 self.setBlockDimension()
1806 1803
1807 1804 return 1
1808 1805
1809 1806 def run(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=None, ext=None, datatype=4, **kwargs):
1810 1807
1811 1808 if not(self.isConfig):
1812 1809
1813 1810 self.setup(dataOut, path, blocksPerFile, profilesPerBlock=profilesPerBlock, set=set, ext=ext, datatype=datatype, **kwargs)
1814 1811 self.isConfig = True
1815 1812
1816 1813 self.putData()
@@ -1,739 +1,737
1 1 '''
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6
7 7 import numpy
8 8
9 9 from jroIO_base import LOCALTIME, JRODataReader, JRODataWriter
10 10 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
11 11 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
12 12 from schainpy.model.data.jrodata import Voltage
13 13 import zmq
14 14 import tempfile
15 15 from StringIO import StringIO
16 16 # from _sha import blocksize
17 17
18 18 class VoltageReader(JRODataReader, ProcessingUnit):
19 19 """
20 20 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
21 21 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
22 22 perfiles*alturas*canales) son almacenados en la variable "buffer".
23 23
24 24 perfiles * alturas * canales
25 25
26 26 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
27 27 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
28 28 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
29 29 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
30 30
31 31 Example:
32 32
33 33 dpath = "/home/myuser/data"
34 34
35 35 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
36 36
37 37 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
38 38
39 39 readerObj = VoltageReader()
40 40
41 41 readerObj.setup(dpath, startTime, endTime)
42 42
43 43 while(True):
44 44
45 45 #to get one profile
46 46 profile = readerObj.getData()
47 47
48 48 #print the profile
49 49 print profile
50 50
51 51 #If you want to see all datablock
52 52 print readerObj.datablock
53 53
54 54 if readerObj.flagNoMoreFiles:
55 55 break
56 56
57 57 """
58 58
59 59 ext = ".r"
60 60
61 61 optchar = "D"
62 62 dataOut = None
63 63
64 64 def __init__(self, **kwargs):
65 65 """
66 66 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
67 67
68 68 Input:
69 69 dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para
70 70 almacenar un perfil de datos cada vez que se haga un requerimiento
71 71 (getData). El perfil sera obtenido a partir del buffer de datos,
72 72 si el buffer esta vacio se hara un nuevo proceso de lectura de un
73 73 bloque de datos.
74 74 Si este parametro no es pasado se creara uno internamente.
75 75
76 76 Variables afectadas:
77 77 self.dataOut
78 78
79 79 Return:
80 80 None
81 81 """
82 82
83 83 ProcessingUnit.__init__(self, **kwargs)
84 84
85 85 self.isConfig = False
86 86
87 87 self.datablock = None
88 88
89 89 self.utc = 0
90 90
91 91 self.ext = ".r"
92 92
93 93 self.optchar = "D"
94 94
95 95 self.basicHeaderObj = BasicHeader(LOCALTIME)
96 96
97 97 self.systemHeaderObj = SystemHeader()
98 98
99 99 self.radarControllerHeaderObj = RadarControllerHeader()
100 100
101 101 self.processingHeaderObj = ProcessingHeader()
102 102
103 103 self.online = 0
104 104
105 105 self.fp = None
106 106
107 107 self.idFile = None
108 108
109 109 self.dtype = None
110 110
111 111 self.fileSizeByHeader = None
112 112
113 113 self.filenameList = []
114 114
115 115 self.filename = None
116 116
117 117 self.fileSize = None
118 118
119 119 self.firstHeaderSize = 0
120 120
121 121 self.basicHeaderSize = 24
122 122
123 123 self.pathList = []
124 124
125 125 self.filenameList = []
126 126
127 127 self.lastUTTime = 0
128 128
129 129 self.maxTimeStep = 30
130 130
131 131 self.flagNoMoreFiles = 0
132 132
133 133 self.set = 0
134 134
135 135 self.path = None
136 136
137 137 self.profileIndex = 2**32-1
138 138
139 139 self.delay = 3 #seconds
140 140
141 141 self.nTries = 3 #quantity tries
142 142
143 143 self.nFiles = 3 #number of files for searching
144 144
145 145 self.nReadBlocks = 0
146 146
147 147 self.flagIsNewFile = 1
148 148
149 149 self.__isFirstTimeOnline = 1
150 150
151 151 # self.ippSeconds = 0
152 152
153 153 self.flagDiscontinuousBlock = 0
154 154
155 155 self.flagIsNewBlock = 0
156 156
157 157 self.nTotalBlocks = 0
158 158
159 159 self.blocksize = 0
160 160
161 161 self.dataOut = self.createObjByDefault()
162 162
163 163 self.nTxs = 1
164 164
165 165 self.txIndex = 0
166 166
167 167 def createObjByDefault(self):
168 168
169 169 dataObj = Voltage()
170 170
171 171 return dataObj
172 172
173 173 def __hasNotDataInBuffer(self):
174 174
175 175 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock*self.nTxs:
176 176 return 1
177 177
178 178 return 0
179 179
180 180
181 181 def getBlockDimension(self):
182 182 """
183 183 Obtiene la cantidad de puntos a leer por cada bloque de datos
184 184
185 185 Affected:
186 186 self.blocksize
187 187
188 188 Return:
189 189 None
190 190 """
191 191 pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
192 192 self.blocksize = pts2read
193 193
194 194
195 195
196 196 def readBlock(self):
197 197 """
198 198 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
199 199 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
200 200 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
201 201 es seteado a 0
202 202
203 203 Inputs:
204 204 None
205 205
206 206 Return:
207 207 None
208 208
209 209 Affected:
210 210 self.profileIndex
211 211 self.datablock
212 212 self.flagIsNewFile
213 213 self.flagIsNewBlock
214 214 self.nTotalBlocks
215 215
216 216 Exceptions:
217 217 Si un bloque leido no es un bloque valido
218 218 """
219 219
220 print 'READ BLOCK'
221 220 # if self.server is not None:
222 221 # self.zBlock = self.receiver.recv()
223 222 # self.zHeader = self.zBlock[:24]
224 223 # self.zDataBlock = self.zBlock[24:]
225 224 # junk = numpy.fromstring(self.zDataBlock, numpy.dtype([('real','<i4'),('imag','<i4')]))
226 225 # self.processingHeaderObj.profilesPerBlock = 240
227 226 # self.processingHeaderObj.nHeights = 248
228 227 # self.systemHeaderObj.nChannels
229 228 # else:
230 229 current_pointer_location = self.fp.tell()
231 230 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
232 231
233 232 try:
234 233 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
235 print'junked'
236 234 except:
237 235 #print "The read block (%3d) has not enough data" %self.nReadBlocks
238 236
239 237 if self.waitDataBlock(pointer_location=current_pointer_location):
240 238 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
241 239 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
242 240 # return 0
243 241
244 242 #Dimensions : nChannels, nProfiles, nSamples
245 243
246 244 junk = numpy.transpose(junk, (2,0,1))
247 245 self.datablock = junk['real'] + junk['imag']*1j
248 246
249 247 self.profileIndex = 0
250 248
251 249 self.flagIsNewFile = 0
252 250 self.flagIsNewBlock = 1
253 251
254 252 self.nTotalBlocks += 1
255 253 self.nReadBlocks += 1
256 254
257 255 return 1
258 256
259 257 def getFirstHeader(self):
260 258
261 259 self.getBasicHeader()
262 260
263 261 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
264 262
265 263 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
266 264
267 265 if self.nTxs > 1:
268 266 self.dataOut.radarControllerHeaderObj.ippSeconds = self.radarControllerHeaderObj.ippSeconds/self.nTxs
269 267
270 268 #Time interval and code are propierties of dataOut. Its value depends of radarControllerHeaderObj.
271 269
272 270 # self.dataOut.timeInterval = self.radarControllerHeaderObj.ippSeconds * self.processingHeaderObj.nCohInt
273 271 #
274 272 # if self.radarControllerHeaderObj.code is not None:
275 273 #
276 274 # self.dataOut.nCode = self.radarControllerHeaderObj.nCode
277 275 #
278 276 # self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
279 277 #
280 278 # self.dataOut.code = self.radarControllerHeaderObj.code
281 279
282 280 self.dataOut.dtype = self.dtype
283 281
284 282 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
285 283
286 284 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.nHeights) *self.processingHeaderObj.deltaHeight + self.processingHeaderObj.firstHeight
287 285
288 286 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
289 287
290 288 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
291 289
292 290 self.dataOut.flagDecodeData = self.processingHeaderObj.flag_decode #asumo q la data no esta decodificada
293 291
294 292 self.dataOut.flagDeflipData = self.processingHeaderObj.flag_deflip #asumo q la data no esta sin flip
295 293
296 294 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
297 295
298 296 def reshapeData(self):
299 297
300 298 if self.nTxs < 0:
301 299 return
302 300
303 301 if self.nTxs == 1:
304 302 return
305 303
306 304 if self.nTxs < 1 and self.processingHeaderObj.profilesPerBlock % (1./self.nTxs) != 0:
307 305 raise ValueError, "1./nTxs (=%f), should be a multiple of nProfiles (=%d)" %(1./self.nTxs, self.processingHeaderObj.profilesPerBlock)
308 306
309 307 if self.nTxs > 1 and self.processingHeaderObj.nHeights % self.nTxs != 0:
310 308 raise ValueError, "nTxs (=%d), should be a multiple of nHeights (=%d)" %(self.nTxs, self.processingHeaderObj.nHeights)
311 309
312 310 self.datablock = self.datablock.reshape((self.systemHeaderObj.nChannels, self.processingHeaderObj.profilesPerBlock*self.nTxs, self.processingHeaderObj.nHeights/self.nTxs))
313 311
314 312 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock*self.nTxs
315 313 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.nHeights/self.nTxs) *self.processingHeaderObj.deltaHeight + self.processingHeaderObj.firstHeight
316 314 self.dataOut.radarControllerHeaderObj.ippSeconds = self.radarControllerHeaderObj.ippSeconds/self.nTxs
317 315
318 316 return
319 317
320 318 def readFirstHeaderFromServer(self):
321 319
322 320 self.getFirstHeader()
323 321
324 322 self.firstHeaderSize = self.basicHeaderObj.size
325 323
326 324 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
327 325 if datatype == 0:
328 326 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
329 327 elif datatype == 1:
330 328 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
331 329 elif datatype == 2:
332 330 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
333 331 elif datatype == 3:
334 332 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
335 333 elif datatype == 4:
336 334 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
337 335 elif datatype == 5:
338 336 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
339 337 else:
340 338 raise ValueError, 'Data type was not defined'
341 339
342 340 self.dtype = datatype_str
343 341 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
344 342 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
345 343 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
346 344 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
347 345 self.getBlockDimension()
348 346
349 347
350 348 def getFromServer(self):
351 349 self.flagDiscontinuousBlock = 0
352 350 self.profileIndex = 0
353 351 self.flagIsNewBlock = 1
354 352 self.dataOut.flagNoData = False
355 353 self.nTotalBlocks += 1
356 354 self.nReadBlocks += 1
357 355 self.blockPointer = 0
358 356
359 357 block = self.receiver.recv()
360 358
361 359 self.basicHeaderObj.read(block[self.blockPointer:])
362 360 self.blockPointer += self.basicHeaderObj.length
363 361 self.systemHeaderObj.read(block[self.blockPointer:])
364 362 self.blockPointer += self.systemHeaderObj.length
365 363 self.radarControllerHeaderObj.read(block[self.blockPointer:])
366 364 self.blockPointer += self.radarControllerHeaderObj.length
367 365 self.processingHeaderObj.read(block[self.blockPointer:])
368 366 self.blockPointer += self.processingHeaderObj.length
369 367 self.readFirstHeaderFromServer()
370 368
371 369 timestamp = self.basicHeaderObj.get_datatime()
372 370 print '[Reading] - Block {} - {}'.format(self.nTotalBlocks, timestamp)
373 371 current_pointer_location = self.blockPointer
374 372 junk = numpy.fromstring( block[self.blockPointer:], self.dtype, self.blocksize )
375 373
376 374 try:
377 375 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
378 376 except:
379 377 #print "The read block (%3d) has not enough data" %self.nReadBlocks
380 378 if self.waitDataBlock(pointer_location=current_pointer_location):
381 379 junk = numpy.fromstring( block[self.blockPointer:], self.dtype, self.blocksize )
382 380 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
383 381 # return 0
384 382
385 383 #Dimensions : nChannels, nProfiles, nSamples
386 384
387 385 junk = numpy.transpose(junk, (2,0,1))
388 386 self.datablock = junk['real'] + junk['imag'] * 1j
389 387 self.profileIndex = 0
390 388 if self.selBlocksize == None: self.selBlocksize = self.dataOut.nProfiles
391 389 if self.selBlocktime != None:
392 390 if self.dataOut.nCohInt is not None:
393 391 nCohInt = self.dataOut.nCohInt
394 392 else:
395 393 nCohInt = 1
396 394 self.selBlocksize = int(self.dataOut.nProfiles*round(self.selBlocktime/(nCohInt*self.dataOut.ippSeconds*self.dataOut.nProfiles)))
397 395 self.dataOut.data = self.datablock[:,self.profileIndex:self.profileIndex+self.selBlocksize,:]
398 396 datasize = self.dataOut.data.shape[1]
399 397 if datasize < self.selBlocksize:
400 398 buffer = numpy.zeros((self.dataOut.data.shape[0], self.selBlocksize, self.dataOut.data.shape[2]), dtype = 'complex')
401 399 buffer[:,:datasize,:] = self.dataOut.data
402 400 self.dataOut.data = buffer
403 401 self.profileIndex = blockIndex
404 402
405 403 self.dataOut.flagDataAsBlock = True
406 404 self.flagIsNewBlock = 1
407 405 self.dataOut.realtime = self.online
408 406
409 407 return self.dataOut.data
410 408
411 409 def getData(self):
412 410 """
413 411 getData obtiene una unidad de datos del buffer de lectura, un perfil, y la copia al objeto self.dataOut
414 412 del tipo "Voltage" con todos los parametros asociados a este (metadata). cuando no hay datos
415 413 en el buffer de lectura es necesario hacer una nueva lectura de los bloques de datos usando
416 414 "readNextBlock"
417 415
418 416 Ademas incrementa el contador del buffer "self.profileIndex" en 1.
419 417
420 418 Return:
421 419
422 420 Si el flag self.getByBlock ha sido seteado el bloque completo es copiado a self.dataOut y el self.profileIndex
423 421 es igual al total de perfiles leidos desde el archivo.
424 422
425 423 Si self.getByBlock == False:
426 424
427 425 self.dataOut.data = buffer[:, thisProfile, :]
428 426
429 427 shape = [nChannels, nHeis]
430 428
431 429 Si self.getByBlock == True:
432 430
433 431 self.dataOut.data = buffer[:, :, :]
434 432
435 433 shape = [nChannels, nProfiles, nHeis]
436 434
437 435 Variables afectadas:
438 436 self.dataOut
439 437 self.profileIndex
440 438
441 439 Affected:
442 440 self.dataOut
443 441 self.profileIndex
444 442 self.flagDiscontinuousBlock
445 443 self.flagIsNewBlock
446 444 """
447 445 if self.flagNoMoreFiles:
448 446 self.dataOut.flagNoData = True
449 447 print 'Process finished'
450 448 return 0
451 449 self.flagDiscontinuousBlock = 0
452 450 self.flagIsNewBlock = 0
453 451 if self.__hasNotDataInBuffer():
454 452 if not( self.readNextBlock() ):
455 453 return 0
456 454
457 455 self.getFirstHeader()
458 456
459 457 self.reshapeData()
460 458 if self.datablock is None:
461 459 self.dataOut.flagNoData = True
462 460 return 0
463 461
464 462 if not self.getByBlock:
465 463
466 464 """
467 465 Return profile by profile
468 466
469 467 If nTxs > 1 then one profile is divided by nTxs and number of total
470 468 blocks is increased by nTxs (nProfiles *= nTxs)
471 469 """
472 470 self.dataOut.flagDataAsBlock = False
473 471 self.dataOut.data = self.datablock[:,self.profileIndex,:]
474 472 self.dataOut.profileIndex = self.profileIndex
475 473
476 474 self.profileIndex += 1
477 475
478 476 # elif self.selBlocksize==None or self.selBlocksize==self.dataOut.nProfiles:
479 477 # """
480 478 # Return all block
481 479 # """
482 480 # self.dataOut.flagDataAsBlock = True
483 481 # self.dataOut.data = self.datablock
484 482 # self.dataOut.profileIndex = self.dataOut.nProfiles - 1
485 483 #
486 484 # self.profileIndex = self.dataOut.nProfiles
487 485
488 486 else:
489 487 """
490 488 Return a block
491 489 """
492 490 if self.selBlocksize == None: self.selBlocksize = self.dataOut.nProfiles
493 491 if self.selBlocktime != None:
494 492 if self.dataOut.nCohInt is not None:
495 493 nCohInt = self.dataOut.nCohInt
496 494 else:
497 495 nCohInt = 1
498 496 self.selBlocksize = int(self.dataOut.nProfiles*round(self.selBlocktime/(nCohInt*self.dataOut.ippSeconds*self.dataOut.nProfiles)))
499 497
500 498 self.dataOut.data = self.datablock[:,self.profileIndex:self.profileIndex+self.selBlocksize,:]
501 499 self.profileIndex += self.selBlocksize
502 500 datasize = self.dataOut.data.shape[1]
503 501
504 502 if datasize < self.selBlocksize:
505 503 buffer = numpy.zeros((self.dataOut.data.shape[0],self.selBlocksize,self.dataOut.data.shape[2]), dtype = 'complex')
506 504 buffer[:,:datasize,:] = self.dataOut.data
507 505
508 506 while datasize < self.selBlocksize: #Not enough profiles to fill the block
509 507 if not( self.readNextBlock() ):
510 508 return 0
511 509 self.getFirstHeader()
512 510 self.reshapeData()
513 511 if self.datablock is None:
514 512 self.dataOut.flagNoData = True
515 513 return 0
516 514 #stack data
517 515 blockIndex = self.selBlocksize - datasize
518 516 datablock1 = self.datablock[:,:blockIndex,:]
519 517
520 518 buffer[:,datasize:datasize+datablock1.shape[1],:] = datablock1
521 519 datasize += datablock1.shape[1]
522 520
523 521 self.dataOut.data = buffer
524 522 self.profileIndex = blockIndex
525 523
526 524 self.dataOut.flagDataAsBlock = True
527 525 self.dataOut.nProfiles = self.dataOut.data.shape[1]
528 526
529 527 self.dataOut.flagNoData = False
530 528
531 529 self.getBasicHeader()
532 530
533 531 self.dataOut.realtime = self.online
534 532
535 533 return self.dataOut.data
536 534
537 535 class VoltageWriter(JRODataWriter, Operation):
538 536 """
539 537 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
540 538 de los datos siempre se realiza por bloques.
541 539 """
542 540
543 541 ext = ".r"
544 542
545 543 optchar = "D"
546 544
547 545 shapeBuffer = None
548 546
549 547
550 548 def __init__(self, **kwargs):
551 549 """
552 550 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
553 551
554 552 Affected:
555 553 self.dataOut
556 554
557 555 Return: None
558 556 """
559 557 Operation.__init__(self, **kwargs)
560 558
561 559 self.nTotalBlocks = 0
562 560
563 561 self.profileIndex = 0
564 562
565 563 self.isConfig = False
566 564
567 565 self.fp = None
568 566
569 567 self.flagIsNewFile = 1
570 568
571 569 self.blockIndex = 0
572 570
573 571 self.flagIsNewBlock = 0
574 572
575 573 self.setFile = None
576 574
577 575 self.dtype = None
578 576
579 577 self.path = None
580 578
581 579 self.filename = None
582 580
583 581 self.basicHeaderObj = BasicHeader(LOCALTIME)
584 582
585 583 self.systemHeaderObj = SystemHeader()
586 584
587 585 self.radarControllerHeaderObj = RadarControllerHeader()
588 586
589 587 self.processingHeaderObj = ProcessingHeader()
590 588
591 589 def hasAllDataInBuffer(self):
592 590 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
593 591 return 1
594 592 return 0
595 593
596 594
597 595 def setBlockDimension(self):
598 596 """
599 597 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
600 598
601 599 Affected:
602 600 self.shape_spc_Buffer
603 601 self.shape_cspc_Buffer
604 602 self.shape_dc_Buffer
605 603
606 604 Return: None
607 605 """
608 606 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
609 607 self.processingHeaderObj.nHeights,
610 608 self.systemHeaderObj.nChannels)
611 609
612 610 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
613 611 self.processingHeaderObj.profilesPerBlock,
614 612 self.processingHeaderObj.nHeights),
615 613 dtype=numpy.dtype('complex64'))
616 614
617 615 def writeBlock(self):
618 616 """
619 617 Escribe el buffer en el file designado
620 618
621 619 Affected:
622 620 self.profileIndex
623 621 self.flagIsNewFile
624 622 self.flagIsNewBlock
625 623 self.nTotalBlocks
626 624 self.blockIndex
627 625
628 626 Return: None
629 627 """
630 628 data = numpy.zeros( self.shapeBuffer, self.dtype )
631 629
632 630 junk = numpy.transpose(self.datablock, (1,2,0))
633 631
634 632 data['real'] = junk.real
635 633 data['imag'] = junk.imag
636 634
637 635 data = data.reshape( (-1) )
638 636
639 637 data.tofile( self.fp )
640 638
641 639 self.datablock.fill(0)
642 640
643 641 self.profileIndex = 0
644 642 self.flagIsNewFile = 0
645 643 self.flagIsNewBlock = 1
646 644
647 645 self.blockIndex += 1
648 646 self.nTotalBlocks += 1
649 647
650 648 # print "[Writing] Block = %04d" %self.blockIndex
651 649
652 650 def putData(self):
653 651 """
654 652 Setea un bloque de datos y luego los escribe en un file
655 653
656 654 Affected:
657 655 self.flagIsNewBlock
658 656 self.profileIndex
659 657
660 658 Return:
661 659 0 : Si no hay data o no hay mas files que puedan escribirse
662 660 1 : Si se escribio la data de un bloque en un file
663 661 """
664 662 if self.dataOut.flagNoData:
665 663 return 0
666 664
667 665 self.flagIsNewBlock = 0
668 666
669 667 if self.dataOut.flagDiscontinuousBlock:
670 668 self.datablock.fill(0)
671 669 self.profileIndex = 0
672 670 self.setNextFile()
673 671
674 672 if self.profileIndex == 0:
675 673 self.setBasicHeader()
676 674
677 675 self.datablock[:,self.profileIndex,:] = self.dataOut.data
678 676
679 677 self.profileIndex += 1
680 678
681 679 if self.hasAllDataInBuffer():
682 680 #if self.flagIsNewFile:
683 681 self.writeNextBlock()
684 682 # self.setFirstHeader()
685 683
686 684 return 1
687 685
688 686 def __getBlockSize(self):
689 687 '''
690 688 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
691 689 '''
692 690
693 691 dtype_width = self.getDtypeWidth()
694 692
695 693 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels * self.profilesPerBlock * dtype_width * 2)
696 694
697 695 return blocksize
698 696
699 697 def setFirstHeader(self):
700 698
701 699 """
702 700 Obtiene una copia del First Header
703 701
704 702 Affected:
705 703 self.systemHeaderObj
706 704 self.radarControllerHeaderObj
707 705 self.dtype
708 706
709 707 Return:
710 708 None
711 709 """
712 710
713 711 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
714 712 self.systemHeaderObj.nChannels = self.dataOut.nChannels
715 713 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
716 714
717 715 self.processingHeaderObj.dtype = 0 # Voltage
718 716 self.processingHeaderObj.blockSize = self.__getBlockSize()
719 717 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
720 718 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
721 719 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
722 720 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
723 721 self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage
724 722 self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage
725 723
726 724 if self.dataOut.code is not None:
727 725 self.processingHeaderObj.code = self.dataOut.code
728 726 self.processingHeaderObj.nCode = self.dataOut.nCode
729 727 self.processingHeaderObj.nBaud = self.dataOut.nBaud
730 728
731 729 if self.processingHeaderObj.nWindows != 0:
732 730 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
733 731 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
734 732 self.processingHeaderObj.nHeights = self.dataOut.nHeights
735 733 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
736 734
737 735 self.processingHeaderObj.processFlags = self.getProcessFlags()
738 736
739 737 self.setBasicHeader()
@@ -1,1285 +1,1283
1 1 import sys
2 2 import numpy
3 3 from scipy import interpolate
4 4
5 5 from jroproc_base import ProcessingUnit, Operation
6 6 from schainpy.model.data.jrodata import Voltage
7 7
8 8 class VoltageProc(ProcessingUnit):
9 9
10 10
11 11 def __init__(self, **kwargs):
12 12
13 13 ProcessingUnit.__init__(self, **kwargs)
14 14
15 15 # self.objectDict = {}
16 16 self.dataOut = Voltage()
17 17 self.flip = 1
18 18
19 19 def run(self):
20 20 if self.dataIn.type == 'AMISR':
21 21 self.__updateObjFromAmisrInput()
22 22
23 23 if self.dataIn.type == 'Voltage':
24 24 self.dataOut.copy(self.dataIn)
25 25
26 26 # self.dataOut.copy(self.dataIn)
27 27
28 28 def __updateObjFromAmisrInput(self):
29 29
30 30 self.dataOut.timeZone = self.dataIn.timeZone
31 31 self.dataOut.dstFlag = self.dataIn.dstFlag
32 32 self.dataOut.errorCount = self.dataIn.errorCount
33 33 self.dataOut.useLocalTime = self.dataIn.useLocalTime
34 34
35 35 self.dataOut.flagNoData = self.dataIn.flagNoData
36 36 self.dataOut.data = self.dataIn.data
37 37 self.dataOut.utctime = self.dataIn.utctime
38 38 self.dataOut.channelList = self.dataIn.channelList
39 39 # self.dataOut.timeInterval = self.dataIn.timeInterval
40 40 self.dataOut.heightList = self.dataIn.heightList
41 41 self.dataOut.nProfiles = self.dataIn.nProfiles
42 42
43 43 self.dataOut.nCohInt = self.dataIn.nCohInt
44 44 self.dataOut.ippSeconds = self.dataIn.ippSeconds
45 45 self.dataOut.frequency = self.dataIn.frequency
46 46
47 47 self.dataOut.azimuth = self.dataIn.azimuth
48 48 self.dataOut.zenith = self.dataIn.zenith
49 49
50 50 self.dataOut.beam.codeList = self.dataIn.beam.codeList
51 51 self.dataOut.beam.azimuthList = self.dataIn.beam.azimuthList
52 52 self.dataOut.beam.zenithList = self.dataIn.beam.zenithList
53 53 #
54 54 # pass#
55 55 #
56 56 # def init(self):
57 57 #
58 58 #
59 59 # if self.dataIn.type == 'AMISR':
60 60 # self.__updateObjFromAmisrInput()
61 61 #
62 62 # if self.dataIn.type == 'Voltage':
63 63 # self.dataOut.copy(self.dataIn)
64 64 # # No necesita copiar en cada init() los atributos de dataIn
65 65 # # la copia deberia hacerse por cada nuevo bloque de datos
66 66
67 67 def selectChannels(self, channelList):
68 68
69 69 channelIndexList = []
70 70
71 71 for channel in channelList:
72 72 if channel not in self.dataOut.channelList:
73 73 raise ValueError, "Channel %d is not in %s" %(channel, str(self.dataOut.channelList))
74 74
75 75 index = self.dataOut.channelList.index(channel)
76 76 channelIndexList.append(index)
77 77
78 78 self.selectChannelsByIndex(channelIndexList)
79 79
80 80 def selectChannelsByIndex(self, channelIndexList):
81 81 """
82 82 Selecciona un bloque de datos en base a canales segun el channelIndexList
83 83
84 84 Input:
85 85 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
86 86
87 87 Affected:
88 88 self.dataOut.data
89 89 self.dataOut.channelIndexList
90 90 self.dataOut.nChannels
91 91 self.dataOut.m_ProcessingHeader.totalSpectra
92 92 self.dataOut.systemHeaderObj.numChannels
93 93 self.dataOut.m_ProcessingHeader.blockSize
94 94
95 95 Return:
96 96 None
97 97 """
98 98
99 99 for channelIndex in channelIndexList:
100 100 if channelIndex not in self.dataOut.channelIndexList:
101 101 print channelIndexList
102 102 raise ValueError, "The value %d in channelIndexList is not valid" %channelIndex
103 103
104 104 if self.dataOut.flagDataAsBlock:
105 105 """
106 106 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
107 107 """
108 108 data = self.dataOut.data[channelIndexList,:,:]
109 109 else:
110 110 data = self.dataOut.data[channelIndexList,:]
111 111
112 112 self.dataOut.data = data
113 113 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
114 114 # self.dataOut.nChannels = nChannels
115 115
116 116 return 1
117 117
118 118 def selectHeights(self, minHei=None, maxHei=None):
119 119 """
120 120 Selecciona un bloque de datos en base a un grupo de valores de alturas segun el rango
121 121 minHei <= height <= maxHei
122 122
123 123 Input:
124 124 minHei : valor minimo de altura a considerar
125 125 maxHei : valor maximo de altura a considerar
126 126
127 127 Affected:
128 128 Indirectamente son cambiados varios valores a travez del metodo selectHeightsByIndex
129 129
130 130 Return:
131 131 1 si el metodo se ejecuto con exito caso contrario devuelve 0
132 132 """
133 133
134 134 if minHei == None:
135 135 minHei = self.dataOut.heightList[0]
136 136
137 137 if maxHei == None:
138 138 maxHei = self.dataOut.heightList[-1]
139 139
140 140 if (minHei < self.dataOut.heightList[0]):
141 141 minHei = self.dataOut.heightList[0]
142 142
143 143 if (maxHei > self.dataOut.heightList[-1]):
144 144 maxHei = self.dataOut.heightList[-1]
145 145
146 146 minIndex = 0
147 147 maxIndex = 0
148 148 heights = self.dataOut.heightList
149 149
150 150 inda = numpy.where(heights >= minHei)
151 151 indb = numpy.where(heights <= maxHei)
152 152
153 153 try:
154 154 minIndex = inda[0][0]
155 155 except:
156 156 minIndex = 0
157 157
158 158 try:
159 159 maxIndex = indb[0][-1]
160 160 except:
161 161 maxIndex = len(heights)
162 162
163 163 self.selectHeightsByIndex(minIndex, maxIndex)
164 164
165 165 return 1
166 166
167 167
168 168 def selectHeightsByIndex(self, minIndex, maxIndex):
169 169 """
170 170 Selecciona un bloque de datos en base a un grupo indices de alturas segun el rango
171 171 minIndex <= index <= maxIndex
172 172
173 173 Input:
174 174 minIndex : valor de indice minimo de altura a considerar
175 175 maxIndex : valor de indice maximo de altura a considerar
176 176
177 177 Affected:
178 178 self.dataOut.data
179 179 self.dataOut.heightList
180 180
181 181 Return:
182 182 1 si el metodo se ejecuto con exito caso contrario devuelve 0
183 183 """
184 184
185 185 if (minIndex < 0) or (minIndex > maxIndex):
186 186 raise ValueError, "Height index range (%d,%d) is not valid" % (minIndex, maxIndex)
187 187
188 188 if (maxIndex >= self.dataOut.nHeights):
189 189 maxIndex = self.dataOut.nHeights
190 190
191 191 #voltage
192 192 if self.dataOut.flagDataAsBlock:
193 193 """
194 194 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
195 195 """
196 196 data = self.dataOut.data[:,:, minIndex:maxIndex]
197 197 else:
198 198 data = self.dataOut.data[:, minIndex:maxIndex]
199 199
200 200 # firstHeight = self.dataOut.heightList[minIndex]
201 201
202 202 self.dataOut.data = data
203 203 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex]
204 204
205 205 if self.dataOut.nHeights <= 1:
206 206 raise ValueError, "selectHeights: Too few heights. Current number of heights is %d" %(self.dataOut.nHeights)
207 207
208 208 return 1
209 209
210 210
211 211 def filterByHeights(self, window):
212 212
213 213 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
214 214
215 215 if window == None:
216 216 window = (self.dataOut.radarControllerHeaderObj.txA/self.dataOut.radarControllerHeaderObj.nBaud) / deltaHeight
217 217
218 218 newdelta = deltaHeight * window
219 219 r = self.dataOut.nHeights % window
220 220 newheights = (self.dataOut.nHeights-r)/window
221 221
222 222 if newheights <= 1:
223 223 raise ValueError, "filterByHeights: Too few heights. Current number of heights is %d and window is %d" %(self.dataOut.nHeights, window)
224 224
225 225 if self.dataOut.flagDataAsBlock:
226 226 """
227 227 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
228 228 """
229 229 buffer = self.dataOut.data[:, :, 0:self.dataOut.nHeights-r]
230 230 buffer = buffer.reshape(self.dataOut.nChannels,self.dataOut.nProfiles,self.dataOut.nHeights/window,window)
231 231 buffer = numpy.sum(buffer,3)
232 232
233 233 else:
234 234 buffer = self.dataOut.data[:,0:self.dataOut.nHeights-r]
235 235 buffer = buffer.reshape(self.dataOut.nChannels,self.dataOut.nHeights/window,window)
236 236 buffer = numpy.sum(buffer,2)
237 237
238 238 self.dataOut.data = buffer
239 239 self.dataOut.heightList = self.dataOut.heightList[0] + numpy.arange( newheights )*newdelta
240 240 self.dataOut.windowOfFilter = window
241 241
242 242 def setH0(self, h0, deltaHeight = None):
243 243
244 244 if not deltaHeight:
245 245 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
246 246
247 247 nHeights = self.dataOut.nHeights
248 248
249 249 newHeiRange = h0 + numpy.arange(nHeights)*deltaHeight
250 250
251 251 self.dataOut.heightList = newHeiRange
252 252
253 253 def deFlip(self, channelList = []):
254 254
255 255 data = self.dataOut.data.copy()
256 256
257 257 if self.dataOut.flagDataAsBlock:
258 258 flip = self.flip
259 259 profileList = range(self.dataOut.nProfiles)
260 260
261 261 if not channelList:
262 262 for thisProfile in profileList:
263 263 data[:,thisProfile,:] = data[:,thisProfile,:]*flip
264 264 flip *= -1.0
265 265 else:
266 266 for thisChannel in channelList:
267 267 if thisChannel not in self.dataOut.channelList:
268 268 continue
269 269
270 270 for thisProfile in profileList:
271 271 data[thisChannel,thisProfile,:] = data[thisChannel,thisProfile,:]*flip
272 272 flip *= -1.0
273 273
274 274 self.flip = flip
275 275
276 276 else:
277 277 if not channelList:
278 278 data[:,:] = data[:,:]*self.flip
279 279 else:
280 280 for thisChannel in channelList:
281 281 if thisChannel not in self.dataOut.channelList:
282 282 continue
283 283
284 284 data[thisChannel,:] = data[thisChannel,:]*self.flip
285 285
286 286 self.flip *= -1.
287 287
288 288 self.dataOut.data = data
289 289
290 290 def setRadarFrequency(self, frequency=None):
291 291
292 292 if frequency != None:
293 293 self.dataOut.frequency = frequency
294 294
295 295 return 1
296 296
297 297 def interpolateHeights(self, topLim, botLim):
298 298 #69 al 72 para julia
299 299 #82-84 para meteoros
300 300 if len(numpy.shape(self.dataOut.data))==2:
301 301 sampInterp = (self.dataOut.data[:,botLim-1] + self.dataOut.data[:,topLim+1])/2
302 302 sampInterp = numpy.transpose(numpy.tile(sampInterp,(topLim-botLim + 1,1)))
303 303 #self.dataOut.data[:,botLim:limSup+1] = sampInterp
304 304 self.dataOut.data[:,botLim:topLim+1] = sampInterp
305 305 else:
306 306 nHeights = self.dataOut.data.shape[2]
307 307 x = numpy.hstack((numpy.arange(botLim),numpy.arange(topLim+1,nHeights)))
308 308 y = self.dataOut.data[:,:,range(botLim)+range(topLim+1,nHeights)]
309 309 f = interpolate.interp1d(x, y, axis = 2)
310 310 xnew = numpy.arange(botLim,topLim+1)
311 311 ynew = f(xnew)
312 312
313 313 self.dataOut.data[:,:,botLim:topLim+1] = ynew
314 314
315 315 # import collections
316 316
317 317 class CohInt(Operation):
318 318
319 319 isConfig = False
320 320
321 321 __profIndex = 0
322 322 __withOverapping = False
323 323
324 324 __byTime = False
325 325 __initime = None
326 326 __lastdatatime = None
327 327 __integrationtime = None
328 328
329 329 __buffer = None
330 330
331 331 __dataReady = False
332 332
333 333 n = None
334 334
335 335
336 336 def __init__(self, **kwargs):
337 337
338 338 Operation.__init__(self, **kwargs)
339 339
340 340 # self.isConfig = False
341 341
342 342 def setup(self, n=None, timeInterval=None, overlapping=False, byblock=False):
343 343 """
344 344 Set the parameters of the integration class.
345 345
346 346 Inputs:
347 347
348 348 n : Number of coherent integrations
349 349 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
350 350 overlapping :
351 351
352 352 """
353 353
354 354 self.__initime = None
355 355 self.__lastdatatime = 0
356 356 self.__buffer = None
357 357 self.__dataReady = False
358 358 self.byblock = byblock
359 359
360 360 if n == None and timeInterval == None:
361 361 raise ValueError, "n or timeInterval should be specified ..."
362 362
363 363 if n != None:
364 364 self.n = n
365 365 self.__byTime = False
366 366 else:
367 367 self.__integrationtime = timeInterval #* 60. #if (type(timeInterval)!=integer) -> change this line
368 368 self.n = 9999
369 369 self.__byTime = True
370 370
371 371 if overlapping:
372 372 self.__withOverapping = True
373 373 self.__buffer = None
374 374 else:
375 375 self.__withOverapping = False
376 376 self.__buffer = 0
377 377
378 378 self.__profIndex = 0
379 379
380 380 def putData(self, data):
381 381
382 382 """
383 383 Add a profile to the __buffer and increase in one the __profileIndex
384 384
385 385 """
386 386
387 387 if not self.__withOverapping:
388 388 self.__buffer += data.copy()
389 389 self.__profIndex += 1
390 390 return
391 391
392 392 #Overlapping data
393 393 nChannels, nHeis = data.shape
394 394 data = numpy.reshape(data, (1, nChannels, nHeis))
395 395
396 396 #If the buffer is empty then it takes the data value
397 397 if self.__buffer is None:
398 398 self.__buffer = data
399 399 self.__profIndex += 1
400 400 return
401 401
402 402 #If the buffer length is lower than n then stakcing the data value
403 403 if self.__profIndex < self.n:
404 404 self.__buffer = numpy.vstack((self.__buffer, data))
405 405 self.__profIndex += 1
406 406 return
407 407
408 408 #If the buffer length is equal to n then replacing the last buffer value with the data value
409 409 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
410 410 self.__buffer[self.n-1] = data
411 411 self.__profIndex = self.n
412 412 return
413 413
414 414
415 415 def pushData(self):
416 416 """
417 417 Return the sum of the last profiles and the profiles used in the sum.
418 418
419 419 Affected:
420 420
421 421 self.__profileIndex
422 422
423 423 """
424 424
425 425 if not self.__withOverapping:
426 426 data = self.__buffer
427 427 n = self.__profIndex
428 428
429 429 self.__buffer = 0
430 430 self.__profIndex = 0
431 431
432 432 return data, n
433 433
434 434 #Integration with Overlapping
435 435 data = numpy.sum(self.__buffer, axis=0)
436 436 n = self.__profIndex
437 437
438 438 return data, n
439 439
440 440 def byProfiles(self, data):
441 441
442 442 self.__dataReady = False
443 443 avgdata = None
444 444 # n = None
445 445
446 446 self.putData(data)
447 447
448 448 if self.__profIndex == self.n:
449 449
450 450 avgdata, n = self.pushData()
451 451 self.__dataReady = True
452 452
453 453 return avgdata
454 454
455 455 def byTime(self, data, datatime):
456 456
457 457 self.__dataReady = False
458 458 avgdata = None
459 459 n = None
460 460
461 461 self.putData(data)
462 462
463 463 if (datatime - self.__initime) >= self.__integrationtime:
464 464 avgdata, n = self.pushData()
465 465 self.n = n
466 466 self.__dataReady = True
467 467
468 468 return avgdata
469 469
470 470 def integrate(self, data, datatime=None):
471 471
472 472 if self.__initime == None:
473 473 self.__initime = datatime
474 474
475 475 if self.__byTime:
476 476 avgdata = self.byTime(data, datatime)
477 477 else:
478 478 avgdata = self.byProfiles(data)
479 479
480 480
481 481 self.__lastdatatime = datatime
482 482
483 483 if avgdata is None:
484 484 return None, None
485 485
486 486 avgdatatime = self.__initime
487 487
488 488 deltatime = datatime -self.__lastdatatime
489 489
490 490 if not self.__withOverapping:
491 491 self.__initime = datatime
492 492 else:
493 493 self.__initime += deltatime
494 494
495 495 return avgdata, avgdatatime
496 496
497 497 def integrateByBlock(self, dataOut):
498 498
499 499 times = int(dataOut.data.shape[1]/self.n)
500 500 avgdata = numpy.zeros((dataOut.nChannels, times, dataOut.nHeights), dtype=numpy.complex)
501 501
502 502 id_min = 0
503 503 id_max = self.n
504 504
505 505 for i in range(times):
506 506 junk = dataOut.data[:,id_min:id_max,:]
507 507 avgdata[:,i,:] = junk.sum(axis=1)
508 508 id_min += self.n
509 509 id_max += self.n
510 510
511 511 timeInterval = dataOut.ippSeconds*self.n
512 512 avgdatatime = (times - 1) * timeInterval + dataOut.utctime
513 513 self.__dataReady = True
514 514 return avgdata, avgdatatime
515 515
516 516
517 517 def run(self, dataOut, n=None, timeInterval=None, overlapping=False, byblock=False, **kwargs):
518 518 if not self.isConfig:
519 519 self.setup(n=n, timeInterval=timeInterval, overlapping=overlapping, byblock=byblock, **kwargs)
520 520 self.isConfig = True
521 521
522 522 if dataOut.flagDataAsBlock:
523 523 """
524 524 Si la data es leida por bloques, dimension = [nChannels, nProfiles, nHeis]
525 525 """
526 526 avgdata, avgdatatime = self.integrateByBlock(dataOut)
527 527 dataOut.nProfiles /= self.n
528 528 else:
529 529 avgdata, avgdatatime = self.integrate(dataOut.data, dataOut.utctime)
530 530
531 531 # dataOut.timeInterval *= n
532 532 dataOut.flagNoData = True
533 533
534 534 if self.__dataReady:
535 535 dataOut.data = avgdata
536 536 dataOut.nCohInt *= self.n
537 537 dataOut.utctime = avgdatatime
538 538 # dataOut.timeInterval = dataOut.ippSeconds * dataOut.nCohInt
539 539 dataOut.flagNoData = False
540 540
541 541 class Decoder(Operation):
542 542
543 543 isConfig = False
544 544 __profIndex = 0
545 545
546 546 code = None
547 547
548 548 nCode = None
549 549 nBaud = None
550 550
551 551
552 552 def __init__(self, **kwargs):
553 553
554 554 Operation.__init__(self, **kwargs)
555 555
556 556 self.times = None
557 557 self.osamp = None
558 558 # self.__setValues = False
559 559 self.isConfig = False
560 560
561 561 def setup(self, code, osamp, dataOut):
562 562
563 563 self.__profIndex = 0
564 564
565 565 self.code = code
566 566
567 567 self.nCode = len(code)
568 568 self.nBaud = len(code[0])
569 569
570 570 if (osamp != None) and (osamp >1):
571 571 self.osamp = osamp
572 572 self.code = numpy.repeat(code, repeats=self.osamp, axis=1)
573 573 self.nBaud = self.nBaud*self.osamp
574 574
575 575 self.__nChannels = dataOut.nChannels
576 576 self.__nProfiles = dataOut.nProfiles
577 577 self.__nHeis = dataOut.nHeights
578 578
579 579 if self.__nHeis < self.nBaud:
580 580 raise ValueError, 'Number of heights (%d) should be greater than number of bauds (%d)' %(self.__nHeis, self.nBaud)
581 581
582 582 #Frequency
583 583 __codeBuffer = numpy.zeros((self.nCode, self.__nHeis), dtype=numpy.complex)
584 584
585 585 __codeBuffer[:,0:self.nBaud] = self.code
586 586
587 587 self.fft_code = numpy.conj(numpy.fft.fft(__codeBuffer, axis=1))
588 588
589 589 if dataOut.flagDataAsBlock:
590 590
591 591 self.ndatadec = self.__nHeis #- self.nBaud + 1
592 592
593 593 self.datadecTime = numpy.zeros((self.__nChannels, self.__nProfiles, self.ndatadec), dtype=numpy.complex)
594 594
595 595 else:
596 596
597 597 #Time
598 598 self.ndatadec = self.__nHeis #- self.nBaud + 1
599 599
600 600 self.datadecTime = numpy.zeros((self.__nChannels, self.ndatadec), dtype=numpy.complex)
601 601
602 602 def __convolutionInFreq(self, data):
603 603
604 604 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
605 605
606 606 fft_data = numpy.fft.fft(data, axis=1)
607 607
608 608 conv = fft_data*fft_code
609 609
610 610 data = numpy.fft.ifft(conv,axis=1)
611 611
612 612 return data
613 613
614 614 def __convolutionInFreqOpt(self, data):
615 615
616 616 raise NotImplementedError
617 617
618 618 def __convolutionInTime(self, data):
619 619
620 620 code = self.code[self.__profIndex]
621 621
622 622 for i in range(self.__nChannels):
623 623 self.datadecTime[i,:] = numpy.correlate(data[i,:], code, mode='full')[self.nBaud-1:]
624 624
625 625 return self.datadecTime
626 626
627 627 def __convolutionByBlockInTime(self, data):
628 628
629 629 repetitions = self.__nProfiles / self.nCode
630 630
631 631 junk = numpy.lib.stride_tricks.as_strided(self.code, (repetitions, self.code.size), (0, self.code.itemsize))
632 632 junk = junk.flatten()
633 633 code_block = numpy.reshape(junk, (self.nCode*repetitions, self.nBaud))
634 634
635 635 for i in range(self.__nChannels):
636 for j in range(self.__nProfiles):
637 print self.datadecTime[i,j,:].shape
638 print numpy.correlate(data[i,j,:], code_block[j,:], mode='full')[self.nBaud-1:].shape
636 for j in range(self.__nProfiles):
639 637 self.datadecTime[i,j,:] = numpy.correlate(data[i,j,:], code_block[j,:], mode='full')[self.nBaud-1:]
640 638
641 639 return self.datadecTime
642 640
643 641 def __convolutionByBlockInFreq(self, data):
644 642
645 643 raise NotImplementedError, "Decoder by frequency fro Blocks not implemented"
646 644
647 645
648 646 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
649 647
650 648 fft_data = numpy.fft.fft(data, axis=2)
651 649
652 650 conv = fft_data*fft_code
653 651
654 652 data = numpy.fft.ifft(conv,axis=2)
655 653
656 654 return data
657 655
658 656 def run(self, dataOut, code=None, nCode=None, nBaud=None, mode = 0, osamp=None, times=None):
659 657
660 658 if dataOut.flagDecodeData:
661 659 print "This data is already decoded, recoding again ..."
662 660
663 661 if not self.isConfig:
664 662
665 663 if code is None:
666 664 if dataOut.code is None:
667 665 raise ValueError, "Code could not be read from %s instance. Enter a value in Code parameter" %dataOut.type
668 666
669 667 code = dataOut.code
670 668 else:
671 669 code = numpy.array(code).reshape(nCode,nBaud)
672 670
673 671 self.setup(code, osamp, dataOut)
674 672
675 673 self.isConfig = True
676 674
677 675 if mode == 3:
678 676 sys.stderr.write("Decoder Warning: mode=%d is not valid, using mode=0\n" %mode)
679 677
680 678 if times != None:
681 679 sys.stderr.write("Decoder Warning: Argument 'times' in not used anymore\n")
682 680
683 681 if self.code is None:
684 682 print "Fail decoding: Code is not defined."
685 683 return
686 684
687 685 datadec = None
688 686 if mode == 3:
689 687 mode = 0
690 688
691 689 if dataOut.flagDataAsBlock:
692 690 """
693 691 Decoding when data have been read as block,
694 692 """
695 693
696 694 if mode == 0:
697 695 datadec = self.__convolutionByBlockInTime(dataOut.data)
698 696 if mode == 1:
699 697 datadec = self.__convolutionByBlockInFreq(dataOut.data)
700 698 else:
701 699 """
702 700 Decoding when data have been read profile by profile
703 701 """
704 702 if mode == 0:
705 703 datadec = self.__convolutionInTime(dataOut.data)
706 704
707 705 if mode == 1:
708 706 datadec = self.__convolutionInFreq(dataOut.data)
709 707
710 708 if mode == 2:
711 709 datadec = self.__convolutionInFreqOpt(dataOut.data)
712 710
713 711 if datadec is None:
714 712 raise ValueError, "Codification mode selected is not valid: mode=%d. Try selecting 0 or 1" %mode
715 713
716 714 dataOut.code = self.code
717 715 dataOut.nCode = self.nCode
718 716 dataOut.nBaud = self.nBaud
719 717
720 718 dataOut.data = datadec
721 719
722 720 dataOut.heightList = dataOut.heightList[0:datadec.shape[-1]]
723 721
724 722 dataOut.flagDecodeData = True #asumo q la data esta decodificada
725 723
726 724 if self.__profIndex == self.nCode-1:
727 725 self.__profIndex = 0
728 726 return 1
729 727
730 728 self.__profIndex += 1
731 729
732 730 return 1
733 731 # dataOut.flagDeflipData = True #asumo q la data no esta sin flip
734 732
735 733
736 734 class ProfileConcat(Operation):
737 735
738 736 isConfig = False
739 737 buffer = None
740 738
741 739 def __init__(self, **kwargs):
742 740
743 741 Operation.__init__(self, **kwargs)
744 742 self.profileIndex = 0
745 743
746 744 def reset(self):
747 745 self.buffer = numpy.zeros_like(self.buffer)
748 746 self.start_index = 0
749 747 self.times = 1
750 748
751 749 def setup(self, data, m, n=1):
752 750 self.buffer = numpy.zeros((data.shape[0],data.shape[1]*m),dtype=type(data[0,0]))
753 751 self.nHeights = data.shape[1]#.nHeights
754 752 self.start_index = 0
755 753 self.times = 1
756 754
757 755 def concat(self, data):
758 756
759 757 self.buffer[:,self.start_index:self.nHeights*self.times] = data.copy()
760 758 self.start_index = self.start_index + self.nHeights
761 759
762 760 def run(self, dataOut, m):
763 761
764 762 dataOut.flagNoData = True
765 763
766 764 if not self.isConfig:
767 765 self.setup(dataOut.data, m, 1)
768 766 self.isConfig = True
769 767
770 768 if dataOut.flagDataAsBlock:
771 769 raise ValueError, "ProfileConcat can only be used when voltage have been read profile by profile, getBlock = False"
772 770
773 771 else:
774 772 self.concat(dataOut.data)
775 773 self.times += 1
776 774 if self.times > m:
777 775 dataOut.data = self.buffer
778 776 self.reset()
779 777 dataOut.flagNoData = False
780 778 # se deben actualizar mas propiedades del header y del objeto dataOut, por ejemplo, las alturas
781 779 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
782 780 xf = dataOut.heightList[0] + dataOut.nHeights * deltaHeight * m
783 781 dataOut.heightList = numpy.arange(dataOut.heightList[0], xf, deltaHeight)
784 782 dataOut.ippSeconds *= m
785 783
786 784 class ProfileSelector(Operation):
787 785
788 786 profileIndex = None
789 787 # Tamanho total de los perfiles
790 788 nProfiles = None
791 789
792 790 def __init__(self, **kwargs):
793 791
794 792 Operation.__init__(self, **kwargs)
795 793 self.profileIndex = 0
796 794
797 795 def incProfileIndex(self):
798 796
799 797 self.profileIndex += 1
800 798
801 799 if self.profileIndex >= self.nProfiles:
802 800 self.profileIndex = 0
803 801
804 802 def isThisProfileInRange(self, profileIndex, minIndex, maxIndex):
805 803
806 804 if profileIndex < minIndex:
807 805 return False
808 806
809 807 if profileIndex > maxIndex:
810 808 return False
811 809
812 810 return True
813 811
814 812 def isThisProfileInList(self, profileIndex, profileList):
815 813
816 814 if profileIndex not in profileList:
817 815 return False
818 816
819 817 return True
820 818
821 819 def run(self, dataOut, profileList=None, profileRangeList=None, beam=None, byblock=False, rangeList = None, nProfiles=None):
822 820
823 821 """
824 822 ProfileSelector:
825 823
826 824 Inputs:
827 825 profileList : Index of profiles selected. Example: profileList = (0,1,2,7,8)
828 826
829 827 profileRangeList : Minimum and maximum profile indexes. Example: profileRangeList = (4, 30)
830 828
831 829 rangeList : List of profile ranges. Example: rangeList = ((4, 30), (32, 64), (128, 256))
832 830
833 831 """
834 832
835 833 if rangeList is not None:
836 834 if type(rangeList[0]) not in (tuple, list):
837 835 rangeList = [rangeList]
838 836
839 837 dataOut.flagNoData = True
840 838
841 839 if dataOut.flagDataAsBlock:
842 840 """
843 841 data dimension = [nChannels, nProfiles, nHeis]
844 842 """
845 843 if profileList != None:
846 844 dataOut.data = dataOut.data[:,profileList,:]
847 845
848 846 if profileRangeList != None:
849 847 minIndex = profileRangeList[0]
850 848 maxIndex = profileRangeList[1]
851 849 profileList = range(minIndex, maxIndex+1)
852 850
853 851 dataOut.data = dataOut.data[:,minIndex:maxIndex+1,:]
854 852
855 853 if rangeList != None:
856 854
857 855 profileList = []
858 856
859 857 for thisRange in rangeList:
860 858 minIndex = thisRange[0]
861 859 maxIndex = thisRange[1]
862 860
863 861 profileList.extend(range(minIndex, maxIndex+1))
864 862
865 863 dataOut.data = dataOut.data[:,profileList,:]
866 864
867 865 dataOut.nProfiles = len(profileList)
868 866 dataOut.profileIndex = dataOut.nProfiles - 1
869 867 dataOut.flagNoData = False
870 868
871 869 return True
872 870
873 871 """
874 872 data dimension = [nChannels, nHeis]
875 873 """
876 874
877 875 if profileList != None:
878 876
879 877 if self.isThisProfileInList(dataOut.profileIndex, profileList):
880 878
881 879 self.nProfiles = len(profileList)
882 880 dataOut.nProfiles = self.nProfiles
883 881 dataOut.profileIndex = self.profileIndex
884 882 dataOut.flagNoData = False
885 883
886 884 self.incProfileIndex()
887 885 return True
888 886
889 887 if profileRangeList != None:
890 888
891 889 minIndex = profileRangeList[0]
892 890 maxIndex = profileRangeList[1]
893 891
894 892 if self.isThisProfileInRange(dataOut.profileIndex, minIndex, maxIndex):
895 893
896 894 self.nProfiles = maxIndex - minIndex + 1
897 895 dataOut.nProfiles = self.nProfiles
898 896 dataOut.profileIndex = self.profileIndex
899 897 dataOut.flagNoData = False
900 898
901 899 self.incProfileIndex()
902 900 return True
903 901
904 902 if rangeList != None:
905 903
906 904 nProfiles = 0
907 905
908 906 for thisRange in rangeList:
909 907 minIndex = thisRange[0]
910 908 maxIndex = thisRange[1]
911 909
912 910 nProfiles += maxIndex - minIndex + 1
913 911
914 912 for thisRange in rangeList:
915 913
916 914 minIndex = thisRange[0]
917 915 maxIndex = thisRange[1]
918 916
919 917 if self.isThisProfileInRange(dataOut.profileIndex, minIndex, maxIndex):
920 918
921 919 self.nProfiles = nProfiles
922 920 dataOut.nProfiles = self.nProfiles
923 921 dataOut.profileIndex = self.profileIndex
924 922 dataOut.flagNoData = False
925 923
926 924 self.incProfileIndex()
927 925
928 926 break
929 927
930 928 return True
931 929
932 930
933 931 if beam != None: #beam is only for AMISR data
934 932 if self.isThisProfileInList(dataOut.profileIndex, dataOut.beamRangeDict[beam]):
935 933 dataOut.flagNoData = False
936 934 dataOut.profileIndex = self.profileIndex
937 935
938 936 self.incProfileIndex()
939 937
940 938 return True
941 939
942 940 raise ValueError, "ProfileSelector needs profileList, profileRangeList or rangeList parameter"
943 941
944 942 return False
945 943
946 944 class Reshaper(Operation):
947 945
948 946 def __init__(self, **kwargs):
949 947
950 948 Operation.__init__(self, **kwargs)
951 949
952 950 self.__buffer = None
953 951 self.__nitems = 0
954 952
955 953 def __appendProfile(self, dataOut, nTxs):
956 954
957 955 if self.__buffer is None:
958 956 shape = (dataOut.nChannels, int(dataOut.nHeights/nTxs) )
959 957 self.__buffer = numpy.empty(shape, dtype = dataOut.data.dtype)
960 958
961 959 ini = dataOut.nHeights * self.__nitems
962 960 end = ini + dataOut.nHeights
963 961
964 962 self.__buffer[:, ini:end] = dataOut.data
965 963
966 964 self.__nitems += 1
967 965
968 966 return int(self.__nitems*nTxs)
969 967
970 968 def __getBuffer(self):
971 969
972 970 if self.__nitems == int(1./self.__nTxs):
973 971
974 972 self.__nitems = 0
975 973
976 974 return self.__buffer.copy()
977 975
978 976 return None
979 977
980 978 def __checkInputs(self, dataOut, shape, nTxs):
981 979
982 980 if shape is None and nTxs is None:
983 981 raise ValueError, "Reshaper: shape of factor should be defined"
984 982
985 983 if nTxs:
986 984 if nTxs < 0:
987 985 raise ValueError, "nTxs should be greater than 0"
988 986
989 987 if nTxs < 1 and dataOut.nProfiles % (1./nTxs) != 0:
990 988 raise ValueError, "nProfiles= %d is not divisibled by (1./nTxs) = %f" %(dataOut.nProfiles, (1./nTxs))
991 989
992 990 shape = [dataOut.nChannels, dataOut.nProfiles*nTxs, dataOut.nHeights/nTxs]
993 991
994 992 return shape, nTxs
995 993
996 994 if len(shape) != 2 and len(shape) != 3:
997 995 raise ValueError, "shape dimension should be equal to 2 or 3. shape = (nProfiles, nHeis) or (nChannels, nProfiles, nHeis). Actually shape = (%d, %d, %d)" %(dataOut.nChannels, dataOut.nProfiles, dataOut.nHeights)
998 996
999 997 if len(shape) == 2:
1000 998 shape_tuple = [dataOut.nChannels]
1001 999 shape_tuple.extend(shape)
1002 1000 else:
1003 1001 shape_tuple = list(shape)
1004 1002
1005 1003 nTxs = 1.0*shape_tuple[1]/dataOut.nProfiles
1006 1004
1007 1005 return shape_tuple, nTxs
1008 1006
1009 1007 def run(self, dataOut, shape=None, nTxs=None):
1010 1008
1011 1009 shape_tuple, self.__nTxs = self.__checkInputs(dataOut, shape, nTxs)
1012 1010
1013 1011 dataOut.flagNoData = True
1014 1012 profileIndex = None
1015 1013
1016 1014 if dataOut.flagDataAsBlock:
1017 1015
1018 1016 dataOut.data = numpy.reshape(dataOut.data, shape_tuple)
1019 1017 dataOut.flagNoData = False
1020 1018
1021 1019 profileIndex = int(dataOut.nProfiles*self.__nTxs) - 1
1022 1020
1023 1021 else:
1024 1022
1025 1023 if self.__nTxs < 1:
1026 1024
1027 1025 self.__appendProfile(dataOut, self.__nTxs)
1028 1026 new_data = self.__getBuffer()
1029 1027
1030 1028 if new_data is not None:
1031 1029 dataOut.data = new_data
1032 1030 dataOut.flagNoData = False
1033 1031
1034 1032 profileIndex = dataOut.profileIndex*nTxs
1035 1033
1036 1034 else:
1037 1035 raise ValueError, "nTxs should be greater than 0 and lower than 1, or use VoltageReader(..., getblock=True)"
1038 1036
1039 1037 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1040 1038
1041 1039 dataOut.heightList = numpy.arange(dataOut.nHeights/self.__nTxs) * deltaHeight + dataOut.heightList[0]
1042 1040
1043 1041 dataOut.nProfiles = int(dataOut.nProfiles*self.__nTxs)
1044 1042
1045 1043 dataOut.profileIndex = profileIndex
1046 1044
1047 1045 dataOut.ippSeconds /= self.__nTxs
1048 1046
1049 1047 class SplitProfiles(Operation):
1050 1048
1051 1049 def __init__(self, **kwargs):
1052 1050
1053 1051 Operation.__init__(self, **kwargs)
1054 1052
1055 1053 def run(self, dataOut, n):
1056 1054
1057 1055 dataOut.flagNoData = True
1058 1056 profileIndex = None
1059 1057
1060 1058 if dataOut.flagDataAsBlock:
1061 1059
1062 1060 #nchannels, nprofiles, nsamples
1063 1061 shape = dataOut.data.shape
1064 1062
1065 1063 if shape[2] % n != 0:
1066 1064 raise ValueError, "Could not split the data, n=%d has to be multiple of %d" %(n, shape[2])
1067 1065
1068 1066 new_shape = shape[0], shape[1]*n, shape[2]/n
1069 1067
1070 1068 dataOut.data = numpy.reshape(dataOut.data, new_shape)
1071 1069 dataOut.flagNoData = False
1072 1070
1073 1071 profileIndex = int(dataOut.nProfiles/n) - 1
1074 1072
1075 1073 else:
1076 1074
1077 1075 raise ValueError, "Could not split the data when is read Profile by Profile. Use VoltageReader(..., getblock=True)"
1078 1076
1079 1077 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1080 1078
1081 1079 dataOut.heightList = numpy.arange(dataOut.nHeights/n) * deltaHeight + dataOut.heightList[0]
1082 1080
1083 1081 dataOut.nProfiles = int(dataOut.nProfiles*n)
1084 1082
1085 1083 dataOut.profileIndex = profileIndex
1086 1084
1087 1085 dataOut.ippSeconds /= n
1088 1086
1089 1087 class CombineProfiles(Operation):
1090 1088
1091 1089 def __init__(self, **kwargs):
1092 1090
1093 1091 Operation.__init__(self, **kwargs)
1094 1092
1095 1093 self.__remData = None
1096 1094 self.__profileIndex = 0
1097 1095
1098 1096 def run(self, dataOut, n):
1099 1097
1100 1098 dataOut.flagNoData = True
1101 1099 profileIndex = None
1102 1100
1103 1101 if dataOut.flagDataAsBlock:
1104 1102
1105 1103 #nchannels, nprofiles, nsamples
1106 1104 shape = dataOut.data.shape
1107 1105 new_shape = shape[0], shape[1]/n, shape[2]*n
1108 1106
1109 1107 if shape[1] % n != 0:
1110 1108 raise ValueError, "Could not split the data, n=%d has to be multiple of %d" %(n, shape[1])
1111 1109
1112 1110 dataOut.data = numpy.reshape(dataOut.data, new_shape)
1113 1111 dataOut.flagNoData = False
1114 1112
1115 1113 profileIndex = int(dataOut.nProfiles*n) - 1
1116 1114
1117 1115 else:
1118 1116
1119 1117 #nchannels, nsamples
1120 1118 if self.__remData is None:
1121 1119 newData = dataOut.data
1122 1120 else:
1123 1121 newData = numpy.concatenate((self.__remData, dataOut.data), axis=1)
1124 1122
1125 1123 self.__profileIndex += 1
1126 1124
1127 1125 if self.__profileIndex < n:
1128 1126 self.__remData = newData
1129 1127 #continue
1130 1128 return
1131 1129
1132 1130 self.__profileIndex = 0
1133 1131 self.__remData = None
1134 1132
1135 1133 dataOut.data = newData
1136 1134 dataOut.flagNoData = False
1137 1135
1138 1136 profileIndex = dataOut.profileIndex/n
1139 1137
1140 1138
1141 1139 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1142 1140
1143 1141 dataOut.heightList = numpy.arange(dataOut.nHeights*n) * deltaHeight + dataOut.heightList[0]
1144 1142
1145 1143 dataOut.nProfiles = int(dataOut.nProfiles/n)
1146 1144
1147 1145 dataOut.profileIndex = profileIndex
1148 1146
1149 1147 dataOut.ippSeconds *= n
1150 1148
1151 1149 # import collections
1152 1150 # from scipy.stats import mode
1153 1151 #
1154 1152 # class Synchronize(Operation):
1155 1153 #
1156 1154 # isConfig = False
1157 1155 # __profIndex = 0
1158 1156 #
1159 1157 # def __init__(self, **kwargs):
1160 1158 #
1161 1159 # Operation.__init__(self, **kwargs)
1162 1160 # # self.isConfig = False
1163 1161 # self.__powBuffer = None
1164 1162 # self.__startIndex = 0
1165 1163 # self.__pulseFound = False
1166 1164 #
1167 1165 # def __findTxPulse(self, dataOut, channel=0, pulse_with = None):
1168 1166 #
1169 1167 # #Read data
1170 1168 #
1171 1169 # powerdB = dataOut.getPower(channel = channel)
1172 1170 # noisedB = dataOut.getNoise(channel = channel)[0]
1173 1171 #
1174 1172 # self.__powBuffer.extend(powerdB.flatten())
1175 1173 #
1176 1174 # dataArray = numpy.array(self.__powBuffer)
1177 1175 #
1178 1176 # filteredPower = numpy.correlate(dataArray, dataArray[0:self.__nSamples], "same")
1179 1177 #
1180 1178 # maxValue = numpy.nanmax(filteredPower)
1181 1179 #
1182 1180 # if maxValue < noisedB + 10:
1183 1181 # #No se encuentra ningun pulso de transmision
1184 1182 # return None
1185 1183 #
1186 1184 # maxValuesIndex = numpy.where(filteredPower > maxValue - 0.1*abs(maxValue))[0]
1187 1185 #
1188 1186 # if len(maxValuesIndex) < 2:
1189 1187 # #Solo se encontro un solo pulso de transmision de un baudio, esperando por el siguiente TX
1190 1188 # return None
1191 1189 #
1192 1190 # phasedMaxValuesIndex = maxValuesIndex - self.__nSamples
1193 1191 #
1194 1192 # #Seleccionar solo valores con un espaciamiento de nSamples
1195 1193 # pulseIndex = numpy.intersect1d(maxValuesIndex, phasedMaxValuesIndex)
1196 1194 #
1197 1195 # if len(pulseIndex) < 2:
1198 1196 # #Solo se encontro un pulso de transmision con ancho mayor a 1
1199 1197 # return None
1200 1198 #
1201 1199 # spacing = pulseIndex[1:] - pulseIndex[:-1]
1202 1200 #
1203 1201 # #remover senales que se distancien menos de 10 unidades o muestras
1204 1202 # #(No deberian existir IPP menor a 10 unidades)
1205 1203 #
1206 1204 # realIndex = numpy.where(spacing > 10 )[0]
1207 1205 #
1208 1206 # if len(realIndex) < 2:
1209 1207 # #Solo se encontro un pulso de transmision con ancho mayor a 1
1210 1208 # return None
1211 1209 #
1212 1210 # #Eliminar pulsos anchos (deja solo la diferencia entre IPPs)
1213 1211 # realPulseIndex = pulseIndex[realIndex]
1214 1212 #
1215 1213 # period = mode(realPulseIndex[1:] - realPulseIndex[:-1])[0][0]
1216 1214 #
1217 1215 # print "IPP = %d samples" %period
1218 1216 #
1219 1217 # self.__newNSamples = dataOut.nHeights #int(period)
1220 1218 # self.__startIndex = int(realPulseIndex[0])
1221 1219 #
1222 1220 # return 1
1223 1221 #
1224 1222 #
1225 1223 # def setup(self, nSamples, nChannels, buffer_size = 4):
1226 1224 #
1227 1225 # self.__powBuffer = collections.deque(numpy.zeros( buffer_size*nSamples,dtype=numpy.float),
1228 1226 # maxlen = buffer_size*nSamples)
1229 1227 #
1230 1228 # bufferList = []
1231 1229 #
1232 1230 # for i in range(nChannels):
1233 1231 # bufferByChannel = collections.deque(numpy.zeros( buffer_size*nSamples, dtype=numpy.complex) + numpy.NAN,
1234 1232 # maxlen = buffer_size*nSamples)
1235 1233 #
1236 1234 # bufferList.append(bufferByChannel)
1237 1235 #
1238 1236 # self.__nSamples = nSamples
1239 1237 # self.__nChannels = nChannels
1240 1238 # self.__bufferList = bufferList
1241 1239 #
1242 1240 # def run(self, dataOut, channel = 0):
1243 1241 #
1244 1242 # if not self.isConfig:
1245 1243 # nSamples = dataOut.nHeights
1246 1244 # nChannels = dataOut.nChannels
1247 1245 # self.setup(nSamples, nChannels)
1248 1246 # self.isConfig = True
1249 1247 #
1250 1248 # #Append new data to internal buffer
1251 1249 # for thisChannel in range(self.__nChannels):
1252 1250 # bufferByChannel = self.__bufferList[thisChannel]
1253 1251 # bufferByChannel.extend(dataOut.data[thisChannel])
1254 1252 #
1255 1253 # if self.__pulseFound:
1256 1254 # self.__startIndex -= self.__nSamples
1257 1255 #
1258 1256 # #Finding Tx Pulse
1259 1257 # if not self.__pulseFound:
1260 1258 # indexFound = self.__findTxPulse(dataOut, channel)
1261 1259 #
1262 1260 # if indexFound == None:
1263 1261 # dataOut.flagNoData = True
1264 1262 # return
1265 1263 #
1266 1264 # self.__arrayBuffer = numpy.zeros((self.__nChannels, self.__newNSamples), dtype = numpy.complex)
1267 1265 # self.__pulseFound = True
1268 1266 # self.__startIndex = indexFound
1269 1267 #
1270 1268 # #If pulse was found ...
1271 1269 # for thisChannel in range(self.__nChannels):
1272 1270 # bufferByChannel = self.__bufferList[thisChannel]
1273 1271 # #print self.__startIndex
1274 1272 # x = numpy.array(bufferByChannel)
1275 1273 # self.__arrayBuffer[thisChannel] = x[self.__startIndex:self.__startIndex+self.__newNSamples]
1276 1274 #
1277 1275 # deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1278 1276 # dataOut.heightList = numpy.arange(self.__newNSamples)*deltaHeight
1279 1277 # # dataOut.ippSeconds = (self.__newNSamples / deltaHeight)/1e6
1280 1278 #
1281 1279 # dataOut.data = self.__arrayBuffer
1282 1280 #
1283 1281 # self.__startIndex += self.__newNSamples
1284 1282 #
1285 1283 # return
General Comments 0
You need to be logged in to leave comments. Login now