##// END OF EJS Templates
letras
José Chávez -
r1069:1b24a2b59c23
parent child
Show More
@@ -1,1795 +1,1795
1 1 '''
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6 import os
7 7 import sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import inspect
13 13 import time, datetime
14 14 import traceback
15 15 import zmq
16 16
17 17 try:
18 18 from gevent import sleep
19 19 except:
20 20 from time import sleep
21 21
22 22 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
23 23 from schainpy.model.data.jroheaderIO import get_dtype_index, get_numpy_dtype, get_procflag_dtype, get_dtype_width
24 24
25 25 LOCALTIME = True
26 26
27 27 def isNumber(cad):
28 28 """
29 29 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
30 30
31 31 Excepciones:
32 32 Si un determinado string no puede ser convertido a numero
33 33 Input:
34 34 str, string al cual se le analiza para determinar si convertible a un numero o no
35 35
36 36 Return:
37 37 True : si el string es uno numerico
38 38 False : no es un string numerico
39 39 """
40 40 try:
41 41 float( cad )
42 42 return True
43 43 except:
44 44 return False
45 45
46 46 def isFileInEpoch(filename, startUTSeconds, endUTSeconds):
47 47 """
48 48 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
49 49
50 50 Inputs:
51 51 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
52 52
53 53 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
54 54 segundos contados desde 01/01/1970.
55 55 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
56 56 segundos contados desde 01/01/1970.
57 57
58 58 Return:
59 59 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
60 60 fecha especificado, de lo contrario retorna False.
61 61
62 62 Excepciones:
63 63 Si el archivo no existe o no puede ser abierto
64 64 Si la cabecera no puede ser leida.
65 65
66 66 """
67 67 basicHeaderObj = BasicHeader(LOCALTIME)
68 68
69 69 try:
70 70 fp = open(filename,'rb')
71 71 except IOError:
72 72 print "The file %s can't be opened" %(filename)
73 73 return 0
74 74
75 75 sts = basicHeaderObj.read(fp)
76 76 fp.close()
77 77
78 78 if not(sts):
79 79 print "Skipping the file %s because it has not a valid header" %(filename)
80 80 return 0
81 81
82 82 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
83 83 return 0
84 84
85 85 return 1
86 86
87 87 def isTimeInRange(thisTime, startTime, endTime):
88 88
89 89 if endTime >= startTime:
90 90 if (thisTime < startTime) or (thisTime > endTime):
91 91 return 0
92 92
93 93 return 1
94 94 else:
95 95 if (thisTime < startTime) and (thisTime > endTime):
96 96 return 0
97 97
98 98 return 1
99 99
100 100 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
101 101 """
102 102 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
103 103
104 104 Inputs:
105 105 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
106 106
107 107 startDate : fecha inicial del rango seleccionado en formato datetime.date
108 108
109 109 endDate : fecha final del rango seleccionado en formato datetime.date
110 110
111 111 startTime : tiempo inicial del rango seleccionado en formato datetime.time
112 112
113 113 endTime : tiempo final del rango seleccionado en formato datetime.time
114 114
115 115 Return:
116 116 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
117 117 fecha especificado, de lo contrario retorna False.
118 118
119 119 Excepciones:
120 120 Si el archivo no existe o no puede ser abierto
121 121 Si la cabecera no puede ser leida.
122 122
123 123 """
124 124
125 125
126 126 try:
127 127 fp = open(filename,'rb')
128 128 except IOError:
129 129 print "The file %s can't be opened" %(filename)
130 130 return None
131 131
132 132 firstBasicHeaderObj = BasicHeader(LOCALTIME)
133 133 systemHeaderObj = SystemHeader()
134 134 radarControllerHeaderObj = RadarControllerHeader()
135 135 processingHeaderObj = ProcessingHeader()
136 136
137 137 lastBasicHeaderObj = BasicHeader(LOCALTIME)
138 138
139 139 sts = firstBasicHeaderObj.read(fp)
140 140
141 141 if not(sts):
142 142 print "[Reading] Skipping the file %s because it has not a valid header" %(filename)
143 143 return None
144 144
145 145 if not systemHeaderObj.read(fp):
146 146 return None
147 147
148 148 if not radarControllerHeaderObj.read(fp):
149 149 return None
150 150
151 151 if not processingHeaderObj.read(fp):
152 152 return None
153 153
154 154 filesize = os.path.getsize(filename)
155 155
156 156 offset = processingHeaderObj.blockSize + 24 #header size
157 157
158 158 if filesize <= offset:
159 159 print "[Reading] %s: This file has not enough data" %filename
160 160 return None
161 161
162 162 fp.seek(-offset, 2)
163 163
164 164 sts = lastBasicHeaderObj.read(fp)
165 165
166 166 fp.close()
167 167
168 168 thisDatetime = lastBasicHeaderObj.datatime
169 169 thisTime_last_block = thisDatetime.time()
170 170
171 171 thisDatetime = firstBasicHeaderObj.datatime
172 172 thisDate = thisDatetime.date()
173 173 thisTime_first_block = thisDatetime.time()
174 174
175 175 #General case
176 176 # o>>>>>>>>>>>>>><<<<<<<<<<<<<<o
177 177 #-----------o----------------------------o-----------
178 178 # startTime endTime
179 179
180 180 if endTime >= startTime:
181 181 if (thisTime_last_block < startTime) or (thisTime_first_block > endTime):
182 182 return None
183 183
184 184 return thisDatetime
185 185
186 186 #If endTime < startTime then endTime belongs to the next day
187 187
188 188
189 189 #<<<<<<<<<<<o o>>>>>>>>>>>
190 190 #-----------o----------------------------o-----------
191 191 # endTime startTime
192 192
193 193 if (thisDate == startDate) and (thisTime_last_block < startTime):
194 194 return None
195 195
196 196 if (thisDate == endDate) and (thisTime_first_block > endTime):
197 197 return None
198 198
199 199 if (thisTime_last_block < startTime) and (thisTime_first_block > endTime):
200 200 return None
201 201
202 202 return thisDatetime
203 203
204 204 def isFolderInDateRange(folder, startDate=None, endDate=None):
205 205 """
206 206 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
207 207
208 208 Inputs:
209 209 folder : nombre completo del directorio.
210 210 Su formato deberia ser "/path_root/?YYYYDDD"
211 211
212 212 siendo:
213 213 YYYY : Anio (ejemplo 2015)
214 214 DDD : Dia del anio (ejemplo 305)
215 215
216 216 startDate : fecha inicial del rango seleccionado en formato datetime.date
217 217
218 218 endDate : fecha final del rango seleccionado en formato datetime.date
219 219
220 220 Return:
221 221 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
222 222 fecha especificado, de lo contrario retorna False.
223 223 Excepciones:
224 224 Si el directorio no tiene el formato adecuado
225 225 """
226 226
227 227 basename = os.path.basename(folder)
228 228
229 229 if not isRadarFolder(basename):
230 230 print "The folder %s has not the rigth format" %folder
231 231 return 0
232 232
233 233 if startDate and endDate:
234 234 thisDate = getDateFromRadarFolder(basename)
235 235
236 236 if thisDate < startDate:
237 237 return 0
238 238
239 239 if thisDate > endDate:
240 240 return 0
241 241
242 242 return 1
243 243
244 244 def isFileInDateRange(filename, startDate=None, endDate=None):
245 245 """
246 246 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
247 247
248 248 Inputs:
249 249 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
250 250
251 251 Su formato deberia ser "?YYYYDDDsss"
252 252
253 253 siendo:
254 254 YYYY : Anio (ejemplo 2015)
255 255 DDD : Dia del anio (ejemplo 305)
256 256 sss : set
257 257
258 258 startDate : fecha inicial del rango seleccionado en formato datetime.date
259 259
260 260 endDate : fecha final del rango seleccionado en formato datetime.date
261 261
262 262 Return:
263 263 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
264 264 fecha especificado, de lo contrario retorna False.
265 265 Excepciones:
266 266 Si el archivo no tiene el formato adecuado
267 267 """
268 268
269 269 basename = os.path.basename(filename)
270 270
271 271 if not isRadarFile(basename):
272 272 print "The filename %s has not the rigth format" %filename
273 273 return 0
274 274
275 275 if startDate and endDate:
276 276 thisDate = getDateFromRadarFile(basename)
277 277
278 278 if thisDate < startDate:
279 279 return 0
280 280
281 281 if thisDate > endDate:
282 282 return 0
283 283
284 284 return 1
285 285
286 286 def getFileFromSet(path, ext, set):
287 287 validFilelist = []
288 288 fileList = os.listdir(path)
289 289
290 290 # 0 1234 567 89A BCDE
291 291 # H YYYY DDD SSS .ext
292 292
293 293 for thisFile in fileList:
294 294 try:
295 295 year = int(thisFile[1:5])
296 296 doy = int(thisFile[5:8])
297 297 except:
298 298 continue
299 299
300 300 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
301 301 continue
302 302
303 303 validFilelist.append(thisFile)
304 304
305 305 myfile = fnmatch.filter(validFilelist,'*%4.4d%3.3d%3.3d*'%(year,doy,set))
306 306
307 307 if len(myfile)!= 0:
308 308 return myfile[0]
309 309 else:
310 310 filename = '*%4.4d%3.3d%3.3d%s'%(year,doy,set,ext.lower())
311 311 print 'the filename %s does not exist'%filename
312 312 print '...going to the last file: '
313 313
314 314 if validFilelist:
315 315 validFilelist = sorted( validFilelist, key=str.lower )
316 316 return validFilelist[-1]
317 317
318 318 return None
319 319
320 320 def getlastFileFromPath(path, ext):
321 321 """
322 322 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
323 323 al final de la depuracion devuelve el ultimo file de la lista que quedo.
324 324
325 325 Input:
326 326 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
327 327 ext : extension de los files contenidos en una carpeta
328 328
329 329 Return:
330 330 El ultimo file de una determinada carpeta, no se considera el path.
331 331 """
332 332 validFilelist = []
333 333 fileList = os.listdir(path)
334 334
335 335 # 0 1234 567 89A BCDE
336 336 # H YYYY DDD SSS .ext
337 337
338 338 for thisFile in fileList:
339 339
340 340 year = thisFile[1:5]
341 341 if not isNumber(year):
342 342 continue
343 343
344 344 doy = thisFile[5:8]
345 345 if not isNumber(doy):
346 346 continue
347 347
348 348 year = int(year)
349 349 doy = int(doy)
350 350
351 351 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
352 352 continue
353 353
354 354 validFilelist.append(thisFile)
355 355
356 356 if validFilelist:
357 357 validFilelist = sorted( validFilelist, key=str.lower )
358 358 return validFilelist[-1]
359 359
360 360 return None
361 361
362 362 def checkForRealPath(path, foldercounter, year, doy, set, ext):
363 363 """
364 364 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
365 365 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
366 366 el path exacto de un determinado file.
367 367
368 368 Example :
369 369 nombre correcto del file es .../.../D2009307/P2009307367.ext
370 370
371 371 Entonces la funcion prueba con las siguientes combinaciones
372 372 .../.../y2009307367.ext
373 373 .../.../Y2009307367.ext
374 374 .../.../x2009307/y2009307367.ext
375 375 .../.../x2009307/Y2009307367.ext
376 376 .../.../X2009307/y2009307367.ext
377 377 .../.../X2009307/Y2009307367.ext
378 378 siendo para este caso, la ultima combinacion de letras, identica al file buscado
379 379
380 380 Return:
381 381 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
382 382 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
383 383 para el filename
384 384 """
385 385 fullfilename = None
386 386 find_flag = False
387 387 filename = None
388 388
389 389 prefixDirList = [None,'d','D']
390 390 if ext.lower() == ".r": #voltage
391 391 prefixFileList = ['d','D']
392 392 elif ext.lower() == ".pdata": #spectra
393 393 prefixFileList = ['p','P']
394 394 else:
395 395 return None, filename
396 396
397 397 #barrido por las combinaciones posibles
398 398 for prefixDir in prefixDirList:
399 399 thispath = path
400 400 if prefixDir != None:
401 401 #formo el nombre del directorio xYYYYDDD (x=d o x=D)
402 402 if foldercounter == 0:
403 403 thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy ))
404 404 else:
405 405 thispath = os.path.join(path, "%s%04d%03d_%02d" % ( prefixDir, year, doy , foldercounter))
406 406 for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D"
407 407 filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
408 408 fullfilename = os.path.join( thispath, filename ) #formo el path completo
409 409
410 410 if os.path.exists( fullfilename ): #verifico que exista
411 411 find_flag = True
412 412 break
413 413 if find_flag:
414 414 break
415 415
416 416 if not(find_flag):
417 417 return None, filename
418 418
419 419 return fullfilename, filename
420 420
421 421 def isRadarFolder(folder):
422 422 try:
423 423 year = int(folder[1:5])
424 424 doy = int(folder[5:8])
425 425 except:
426 426 return 0
427 427
428 428 return 1
429 429
430 430 def isRadarFile(file):
431 431 try:
432 432 year = int(file[1:5])
433 433 doy = int(file[5:8])
434 434 set = int(file[8:11])
435 435 except:
436 436 return 0
437 437
438 438 return 1
439 439
440 440 def getDateFromRadarFile(file):
441 441 try:
442 442 year = int(file[1:5])
443 443 doy = int(file[5:8])
444 444 set = int(file[8:11])
445 445 except:
446 446 return None
447 447
448 448 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy-1)
449 449 return thisDate
450 450
451 451 def getDateFromRadarFolder(folder):
452 452 try:
453 453 year = int(folder[1:5])
454 454 doy = int(folder[5:8])
455 455 except:
456 456 return None
457 457
458 458 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy-1)
459 459 return thisDate
460 460
461 461 class JRODataIO:
462 462
463 463 c = 3E8
464 464
465 465 isConfig = False
466 466
467 467 basicHeaderObj = None
468 468
469 469 systemHeaderObj = None
470 470
471 471 radarControllerHeaderObj = None
472 472
473 473 processingHeaderObj = None
474 474
475 475 dtype = None
476 476
477 477 pathList = []
478 478
479 479 filenameList = []
480 480
481 481 filename = None
482 482
483 483 ext = None
484 484
485 485 flagIsNewFile = 1
486 486
487 487 flagDiscontinuousBlock = 0
488 488
489 489 flagIsNewBlock = 0
490 490
491 491 fp = None
492 492
493 493 firstHeaderSize = 0
494 494
495 495 basicHeaderSize = 24
496 496
497 497 versionFile = 1103
498 498
499 499 fileSize = None
500 500
501 501 # ippSeconds = None
502 502
503 503 fileSizeByHeader = None
504 504
505 505 fileIndex = None
506 506
507 507 profileIndex = None
508 508
509 509 blockIndex = None
510 510
511 511 nTotalBlocks = None
512 512
513 513 maxTimeStep = 30
514 514
515 515 lastUTTime = None
516 516
517 517 datablock = None
518 518
519 519 dataOut = None
520 520
521 521 blocksize = None
522 522
523 523 getByBlock = False
524 524
525 525 def __init__(self):
526 526
527 527 raise NotImplementedError
528 528
529 529 def run(self):
530 530
531 531 raise NotImplementedError
532 532
533 533 def getDtypeWidth(self):
534 534
535 535 dtype_index = get_dtype_index(self.dtype)
536 536 dtype_width = get_dtype_width(dtype_index)
537 537
538 538 return dtype_width
539 539
540 540 def getAllowedArgs(self):
541 541 return inspect.getargspec(self.run).args
542 542
543 543 class JRODataReader(JRODataIO):
544 544
545 545 online = 0
546 546
547 547 realtime = 0
548 548
549 549 nReadBlocks = 0
550 550
551 551 delay = 10 #number of seconds waiting a new file
552 552
553 553 nTries = 3 #quantity tries
554 554
555 555 nFiles = 3 #number of files for searching
556 556
557 557 path = None
558 558
559 559 foldercounter = 0
560 560
561 561 flagNoMoreFiles = 0
562 562
563 563 datetimeList = []
564 564
565 565 __isFirstTimeOnline = 1
566 566
567 567 __printInfo = True
568 568
569 569 profileIndex = None
570 570
571 571 nTxs = 1
572 572
573 573 txIndex = None
574 574
575 575 #Added--------------------
576 576
577 577 selBlocksize = None
578 578
579 579 selBlocktime = None
580 580
581 581 def __init__(self):
582 582
583 583 """
584 584 This class is used to find data files
585 585
586 586 Example:
587 587 reader = JRODataReader()
588 588 fileList = reader.findDataFiles()
589 589
590 590 """
591 591 pass
592 592
593 593
594 594 def createObjByDefault(self):
595 595 """
596 596
597 597 """
598 598 raise NotImplementedError
599 599
600 600 def getBlockDimension(self):
601 601
602 602 raise NotImplementedError
603 603
604 604 def searchFilesOffLine(self,
605 605 path,
606 606 startDate=None,
607 607 endDate=None,
608 608 startTime=datetime.time(0,0,0),
609 609 endTime=datetime.time(23,59,59),
610 610 set=None,
611 611 expLabel='',
612 612 ext='.r',
613 613 cursor=None,
614 614 skip=None,
615 615 walk=True):
616 616
617 617 self.filenameList = []
618 618 self.datetimeList = []
619 619
620 620 pathList = []
621 621
622 622 dateList, pathList = self.findDatafiles(path, startDate, endDate, expLabel, ext, walk, include_path=True)
623 623
624 624 if dateList == []:
625 625 return [], []
626 626
627 627 if len(dateList) > 1:
628 628 print "[Reading] Data found for date range [%s - %s]: total days = %d" %(startDate, endDate, len(dateList))
629 629 else:
630 630 print "[Reading] Data found for date range [%s - %s]: date = %s" %(startDate, endDate, dateList[0])
631 631
632 632 filenameList = []
633 633 datetimeList = []
634 634
635 635 for thisPath in pathList:
636 636
637 637 fileList = glob.glob1(thisPath, "*%s" %ext)
638 638 fileList.sort()
639 639
640 640 skippedFileList = []
641 641
642 if cursor is not None andk skip is not None:
642 if cursor is not None and skip is not None:
643 643
644 644 if skip == 0:
645 645 skippedFileList = []
646 646 else:
647 647 skippedFileList = fileList[cursor*skip: cursor*skip + skip]
648 648
649 649 else:
650 650 skippedFileList = fileList
651 651
652 652 for file in skippedFileList:
653 653
654 654 filename = os.path.join(thisPath,file)
655 655
656 656 if not isFileInDateRange(filename, startDate, endDate):
657 657 continue
658 658
659 659 thisDatetime = isFileInTimeRange(filename, startDate, endDate, startTime, endTime)
660 660
661 661 if not(thisDatetime):
662 662 continue
663 663
664 664 filenameList.append(filename)
665 665 datetimeList.append(thisDatetime)
666 666
667 667 if not(filenameList):
668 668 print "[Reading] Time range selected invalid [%s - %s]: No *%s files in %s)" %(startTime, endTime, ext, path)
669 669 return [], []
670 670
671 671 print "[Reading] %d file(s) was(were) found in time range: %s - %s" %(len(filenameList), startTime, endTime)
672 672 print
673 673
674 674 # for i in range(len(filenameList)):
675 675 # print "[Reading] %s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
676 676
677 677 self.filenameList = filenameList
678 678 self.datetimeList = datetimeList
679 679
680 680 return pathList, filenameList
681 681
682 682 def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True, set=None):
683 683
684 684 """
685 685 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
686 686 devuelve el archivo encontrado ademas de otros datos.
687 687
688 688 Input:
689 689 path : carpeta donde estan contenidos los files que contiene data
690 690
691 691 expLabel : Nombre del subexperimento (subfolder)
692 692
693 693 ext : extension de los files
694 694
695 695 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
696 696
697 697 Return:
698 698 directory : eL directorio donde esta el file encontrado
699 699 filename : el ultimo file de una determinada carpeta
700 700 year : el anho
701 701 doy : el numero de dia del anho
702 702 set : el set del archivo
703 703
704 704
705 705 """
706 706 if not os.path.isdir(path):
707 707 return None, None, None, None, None, None
708 708
709 709 dirList = []
710 710
711 711 if not walk:
712 712 fullpath = path
713 713 foldercounter = 0
714 714 else:
715 715 #Filtra solo los directorios
716 716 for thisPath in os.listdir(path):
717 717 if not os.path.isdir(os.path.join(path,thisPath)):
718 718 continue
719 719 if not isRadarFolder(thisPath):
720 720 continue
721 721
722 722 dirList.append(thisPath)
723 723
724 724 if not(dirList):
725 725 return None, None, None, None, None, None
726 726
727 727 dirList = sorted( dirList, key=str.lower )
728 728
729 729 doypath = dirList[-1]
730 730 foldercounter = int(doypath.split('_')[1]) if len(doypath.split('_'))>1 else 0
731 731 fullpath = os.path.join(path, doypath, expLabel)
732 732
733 733
734 734 print "[Reading] %s folder was found: " %(fullpath )
735 735
736 736 if set == None:
737 737 filename = getlastFileFromPath(fullpath, ext)
738 738 else:
739 739 filename = getFileFromSet(fullpath, ext, set)
740 740
741 741 if not(filename):
742 742 return None, None, None, None, None, None
743 743
744 744 print "[Reading] %s file was found" %(filename)
745 745
746 746 if not(self.__verifyFile(os.path.join(fullpath, filename))):
747 747 return None, None, None, None, None, None
748 748
749 749 year = int( filename[1:5] )
750 750 doy = int( filename[5:8] )
751 751 set = int( filename[8:11] )
752 752
753 753 return fullpath, foldercounter, filename, year, doy, set
754 754
755 755 def __setNextFileOffline(self):
756 756
757 757 idFile = self.fileIndex
758 758
759 759 while (True):
760 760 idFile += 1
761 761 if not(idFile < len(self.filenameList)):
762 762 self.flagNoMoreFiles = 1
763 763 # print "[Reading] No more Files"
764 764 return 0
765 765
766 766 filename = self.filenameList[idFile]
767 767
768 768 if not(self.__verifyFile(filename)):
769 769 continue
770 770
771 771 fileSize = os.path.getsize(filename)
772 772 fp = open(filename,'rb')
773 773 break
774 774
775 775 self.flagIsNewFile = 1
776 776 self.fileIndex = idFile
777 777 self.filename = filename
778 778 self.fileSize = fileSize
779 779 self.fp = fp
780 780
781 781 # print "[Reading] Setting the file: %s"%self.filename
782 782
783 783 return 1
784 784
785 785 def __setNextFileOnline(self):
786 786 """
787 787 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
788 788 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
789 789 siguientes.
790 790
791 791 Affected:
792 792 self.flagIsNewFile
793 793 self.filename
794 794 self.fileSize
795 795 self.fp
796 796 self.set
797 797 self.flagNoMoreFiles
798 798
799 799 Return:
800 800 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
801 801 1 : si el file fue abierto con exito y esta listo a ser leido
802 802
803 803 Excepciones:
804 804 Si un determinado file no puede ser abierto
805 805 """
806 806 nFiles = 0
807 807 fileOk_flag = False
808 808 firstTime_flag = True
809 809
810 810 self.set += 1
811 811
812 812 if self.set > 999:
813 813 self.set = 0
814 814 self.foldercounter += 1
815 815
816 816 #busca el 1er file disponible
817 817 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
818 818 if fullfilename:
819 819 if self.__verifyFile(fullfilename, False):
820 820 fileOk_flag = True
821 821
822 822 #si no encuentra un file entonces espera y vuelve a buscar
823 823 if not(fileOk_flag):
824 824 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
825 825
826 826 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
827 827 tries = self.nTries
828 828 else:
829 829 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
830 830
831 831 for nTries in range( tries ):
832 832 if firstTime_flag:
833 833 print "\t[Reading] Waiting %0.2f sec for the next file: \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
834 834 sleep( self.delay )
835 835 else:
836 836 print "\t[Reading] Searching the next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
837 837
838 838 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
839 839 if fullfilename:
840 840 if self.__verifyFile(fullfilename):
841 841 fileOk_flag = True
842 842 break
843 843
844 844 if fileOk_flag:
845 845 break
846 846
847 847 firstTime_flag = False
848 848
849 849 print "\t[Reading] Skipping the file \"%s\" due to this file doesn't exist" % filename
850 850 self.set += 1
851 851
852 852 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
853 853 self.set = 0
854 854 self.doy += 1
855 855 self.foldercounter = 0
856 856
857 857 if fileOk_flag:
858 858 self.fileSize = os.path.getsize( fullfilename )
859 859 self.filename = fullfilename
860 860 self.flagIsNewFile = 1
861 861 if self.fp != None: self.fp.close()
862 862 self.fp = open(fullfilename, 'rb')
863 863 self.flagNoMoreFiles = 0
864 864 # print '[Reading] Setting the file: %s' % fullfilename
865 865 else:
866 866 self.fileSize = 0
867 867 self.filename = None
868 868 self.flagIsNewFile = 0
869 869 self.fp = None
870 870 self.flagNoMoreFiles = 1
871 871 # print '[Reading] No more files to read'
872 872
873 873 return fileOk_flag
874 874
875 875 def setNextFile(self):
876 876 if self.fp != None:
877 877 self.fp.close()
878 878
879 879 if self.online:
880 880 newFile = self.__setNextFileOnline()
881 881 else:
882 882 newFile = self.__setNextFileOffline()
883 883
884 884 if not(newFile):
885 885 print '[Reading] No more files to read'
886 886 return 0
887 887
888 888 if self.verbose:
889 889 print '[Reading] Setting the file: %s' % self.filename
890 890
891 891 self.__readFirstHeader()
892 892 self.nReadBlocks = 0
893 893 return 1
894 894
895 895 def __waitNewBlock(self):
896 896 """
897 897 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
898 898
899 899 Si el modo de lectura es OffLine siempre retorn 0
900 900 """
901 901 if not self.online:
902 902 return 0
903 903
904 904 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
905 905 return 0
906 906
907 907 currentPointer = self.fp.tell()
908 908
909 909 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
910 910
911 911 for nTries in range( self.nTries ):
912 912
913 913 self.fp.close()
914 914 self.fp = open( self.filename, 'rb' )
915 915 self.fp.seek( currentPointer )
916 916
917 917 self.fileSize = os.path.getsize( self.filename )
918 918 currentSize = self.fileSize - currentPointer
919 919
920 920 if ( currentSize >= neededSize ):
921 921 self.basicHeaderObj.read(self.fp)
922 922 return 1
923 923
924 924 if self.fileSize == self.fileSizeByHeader:
925 925 # self.flagEoF = True
926 926 return 0
927 927
928 928 print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
929 929 sleep( self.delay )
930 930
931 931
932 932 return 0
933 933
934 934 def waitDataBlock(self,pointer_location):
935 935
936 936 currentPointer = pointer_location
937 937
938 938 neededSize = self.processingHeaderObj.blockSize #+ self.basicHeaderSize
939 939
940 940 for nTries in range( self.nTries ):
941 941 self.fp.close()
942 942 self.fp = open( self.filename, 'rb' )
943 943 self.fp.seek( currentPointer )
944 944
945 945 self.fileSize = os.path.getsize( self.filename )
946 946 currentSize = self.fileSize - currentPointer
947 947
948 948 if ( currentSize >= neededSize ):
949 949 return 1
950 950
951 951 print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
952 952 sleep( self.delay )
953 953
954 954 return 0
955 955
956 956 def __jumpToLastBlock(self):
957 957
958 958 if not(self.__isFirstTimeOnline):
959 959 return
960 960
961 961 csize = self.fileSize - self.fp.tell()
962 962 blocksize = self.processingHeaderObj.blockSize
963 963
964 964 #salta el primer bloque de datos
965 965 if csize > self.processingHeaderObj.blockSize:
966 966 self.fp.seek(self.fp.tell() + blocksize)
967 967 else:
968 968 return
969 969
970 970 csize = self.fileSize - self.fp.tell()
971 971 neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
972 972 while True:
973 973
974 974 if self.fp.tell()<self.fileSize:
975 975 self.fp.seek(self.fp.tell() + neededsize)
976 976 else:
977 977 self.fp.seek(self.fp.tell() - neededsize)
978 978 break
979 979
980 980 # csize = self.fileSize - self.fp.tell()
981 981 # neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
982 982 # factor = int(csize/neededsize)
983 983 # if factor > 0:
984 984 # self.fp.seek(self.fp.tell() + factor*neededsize)
985 985
986 986 self.flagIsNewFile = 0
987 987 self.__isFirstTimeOnline = 0
988 988
989 989 def __setNewBlock(self):
990 990 #if self.server is None:
991 991 if self.fp == None:
992 992 return 0
993 993
994 994 # if self.online:
995 995 # self.__jumpToLastBlock()
996 996
997 997 if self.flagIsNewFile:
998 998 self.lastUTTime = self.basicHeaderObj.utc
999 999 return 1
1000 1000
1001 1001 if self.realtime:
1002 1002 self.flagDiscontinuousBlock = 1
1003 1003 if not(self.setNextFile()):
1004 1004 return 0
1005 1005 else:
1006 1006 return 1
1007 1007 #if self.server is None:
1008 1008 currentSize = self.fileSize - self.fp.tell()
1009 1009 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
1010 1010 if (currentSize >= neededSize):
1011 1011 self.basicHeaderObj.read(self.fp)
1012 1012 self.lastUTTime = self.basicHeaderObj.utc
1013 1013 return 1
1014 1014 # else:
1015 1015 # self.basicHeaderObj.read(self.zHeader)
1016 1016 # self.lastUTTime = self.basicHeaderObj.utc
1017 1017 # return 1
1018 1018 if self.__waitNewBlock():
1019 1019 self.lastUTTime = self.basicHeaderObj.utc
1020 1020 return 1
1021 1021 #if self.server is None:
1022 1022 if not(self.setNextFile()):
1023 1023 return 0
1024 1024
1025 1025 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
1026 1026 self.lastUTTime = self.basicHeaderObj.utc
1027 1027
1028 1028 self.flagDiscontinuousBlock = 0
1029 1029
1030 1030 if deltaTime > self.maxTimeStep:
1031 1031 self.flagDiscontinuousBlock = 1
1032 1032
1033 1033 return 1
1034 1034
1035 1035 def readNextBlock(self):
1036 1036
1037 1037 #Skip block out of startTime and endTime
1038 1038 while True:
1039 1039 if not(self.__setNewBlock()):
1040 1040 return 0
1041 1041
1042 1042 if not(self.readBlock()):
1043 1043 return 0
1044 1044
1045 1045 self.getBasicHeader()
1046 1046
1047 1047 if not isTimeInRange(self.dataOut.datatime.time(), self.startTime, self.endTime):
1048 1048
1049 1049 print "[Reading] Block No. %d/%d -> %s [Skipping]" %(self.nReadBlocks,
1050 1050 self.processingHeaderObj.dataBlocksPerFile,
1051 1051 self.dataOut.datatime.ctime())
1052 1052 continue
1053 1053
1054 1054 break
1055 1055
1056 1056 if self.verbose:
1057 1057 print "[Reading] Block No. %d/%d -> %s" %(self.nReadBlocks,
1058 1058 self.processingHeaderObj.dataBlocksPerFile,
1059 1059 self.dataOut.datatime.ctime())
1060 1060 return 1
1061 1061
1062 1062 def __readFirstHeader(self):
1063 1063
1064 1064 self.basicHeaderObj.read(self.fp)
1065 1065 self.systemHeaderObj.read(self.fp)
1066 1066 self.radarControllerHeaderObj.read(self.fp)
1067 1067 self.processingHeaderObj.read(self.fp)
1068 1068
1069 1069 self.firstHeaderSize = self.basicHeaderObj.size
1070 1070
1071 1071 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
1072 1072 if datatype == 0:
1073 1073 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
1074 1074 elif datatype == 1:
1075 1075 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
1076 1076 elif datatype == 2:
1077 1077 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
1078 1078 elif datatype == 3:
1079 1079 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
1080 1080 elif datatype == 4:
1081 1081 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
1082 1082 elif datatype == 5:
1083 1083 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
1084 1084 else:
1085 1085 raise ValueError, 'Data type was not defined'
1086 1086
1087 1087 self.dtype = datatype_str
1088 1088 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
1089 1089 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
1090 1090 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
1091 1091 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
1092 1092 self.getBlockDimension()
1093 1093
1094 1094 def __verifyFile(self, filename, msgFlag=True):
1095 1095
1096 1096 msg = None
1097 1097
1098 1098 try:
1099 1099 fp = open(filename, 'rb')
1100 1100 except IOError:
1101 1101
1102 1102 if msgFlag:
1103 1103 print "[Reading] File %s can't be opened" % (filename)
1104 1104
1105 1105 return False
1106 1106
1107 1107 currentPosition = fp.tell()
1108 1108 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
1109 1109
1110 1110 if neededSize == 0:
1111 1111 basicHeaderObj = BasicHeader(LOCALTIME)
1112 1112 systemHeaderObj = SystemHeader()
1113 1113 radarControllerHeaderObj = RadarControllerHeader()
1114 1114 processingHeaderObj = ProcessingHeader()
1115 1115
1116 1116 if not( basicHeaderObj.read(fp) ):
1117 1117 fp.close()
1118 1118 return False
1119 1119
1120 1120 if not( systemHeaderObj.read(fp) ):
1121 1121 fp.close()
1122 1122 return False
1123 1123
1124 1124 if not( radarControllerHeaderObj.read(fp) ):
1125 1125 fp.close()
1126 1126 return False
1127 1127
1128 1128 if not( processingHeaderObj.read(fp) ):
1129 1129 fp.close()
1130 1130 return False
1131 1131
1132 1132 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
1133 1133 else:
1134 1134 msg = "[Reading] Skipping the file %s due to it hasn't enough data" %filename
1135 1135
1136 1136 fp.close()
1137 1137
1138 1138 fileSize = os.path.getsize(filename)
1139 1139 currentSize = fileSize - currentPosition
1140 1140
1141 1141 if currentSize < neededSize:
1142 1142 if msgFlag and (msg != None):
1143 1143 print msg
1144 1144 return False
1145 1145
1146 1146 return True
1147 1147
1148 1148 def findDatafiles(self, path, startDate=None, endDate=None, expLabel='', ext='.r', walk=True, include_path=False):
1149 1149
1150 1150 path_empty = True
1151 1151
1152 1152 dateList = []
1153 1153 pathList = []
1154 1154
1155 1155 multi_path = path.split(',')
1156 1156
1157 1157 if not walk:
1158 1158
1159 1159 for single_path in multi_path:
1160 1160
1161 1161 if not os.path.isdir(single_path):
1162 1162 continue
1163 1163
1164 1164 fileList = glob.glob1(single_path, "*"+ext)
1165 1165
1166 1166 if not fileList:
1167 1167 continue
1168 1168
1169 1169 path_empty = False
1170 1170
1171 1171 fileList.sort()
1172 1172
1173 1173 for thisFile in fileList:
1174 1174
1175 1175 if not os.path.isfile(os.path.join(single_path, thisFile)):
1176 1176 continue
1177 1177
1178 1178 if not isRadarFile(thisFile):
1179 1179 continue
1180 1180
1181 1181 if not isFileInDateRange(thisFile, startDate, endDate):
1182 1182 continue
1183 1183
1184 1184 thisDate = getDateFromRadarFile(thisFile)
1185 1185
1186 1186 if thisDate in dateList:
1187 1187 continue
1188 1188
1189 1189 dateList.append(thisDate)
1190 1190 pathList.append(single_path)
1191 1191
1192 1192 else:
1193 1193 for single_path in multi_path:
1194 1194
1195 1195 if not os.path.isdir(single_path):
1196 1196 continue
1197 1197
1198 1198 dirList = []
1199 1199
1200 1200 for thisPath in os.listdir(single_path):
1201 1201
1202 1202 if not os.path.isdir(os.path.join(single_path,thisPath)):
1203 1203 continue
1204 1204
1205 1205 if not isRadarFolder(thisPath):
1206 1206 continue
1207 1207
1208 1208 if not isFolderInDateRange(thisPath, startDate, endDate):
1209 1209 continue
1210 1210
1211 1211 dirList.append(thisPath)
1212 1212
1213 1213 if not dirList:
1214 1214 continue
1215 1215
1216 1216 dirList.sort()
1217 1217
1218 1218 for thisDir in dirList:
1219 1219
1220 1220 datapath = os.path.join(single_path, thisDir, expLabel)
1221 1221 fileList = glob.glob1(datapath, "*"+ext)
1222 1222
1223 1223 if not fileList:
1224 1224 continue
1225 1225
1226 1226 path_empty = False
1227 1227
1228 1228 thisDate = getDateFromRadarFolder(thisDir)
1229 1229
1230 1230 pathList.append(datapath)
1231 1231 dateList.append(thisDate)
1232 1232
1233 1233 dateList.sort()
1234 1234
1235 1235 if walk:
1236 1236 pattern_path = os.path.join(multi_path[0], "[dYYYYDDD]", expLabel)
1237 1237 else:
1238 1238 pattern_path = multi_path[0]
1239 1239
1240 1240 if path_empty:
1241 1241 print "[Reading] No *%s files in %s for %s to %s" %(ext, pattern_path, startDate, endDate)
1242 1242 else:
1243 1243 if not dateList:
1244 1244 print "[Reading] Date range selected invalid [%s - %s]: No *%s files in %s)" %(startDate, endDate, ext, path)
1245 1245
1246 1246 if include_path:
1247 1247 return dateList, pathList
1248 1248
1249 1249 return dateList
1250 1250
1251 1251 def setup(self,
1252 1252 path=None,
1253 1253 startDate=None,
1254 1254 endDate=None,
1255 1255 startTime=datetime.time(0,0,0),
1256 1256 endTime=datetime.time(23,59,59),
1257 1257 set=None,
1258 1258 expLabel = "",
1259 1259 ext = None,
1260 1260 online = False,
1261 1261 delay = 60,
1262 1262 walk = True,
1263 1263 getblock = False,
1264 1264 nTxs = 1,
1265 1265 realtime=False,
1266 1266 blocksize=None,
1267 1267 blocktime=None,
1268 1268 skip=None,
1269 1269 cursor=None,
1270 1270 warnings=True,
1271 1271 verbose=True,
1272 1272 server=None,
1273 1273 **kwargs):
1274 1274 if server is not None:
1275 1275 if 'tcp://' in server:
1276 1276 address = server
1277 1277 else:
1278 1278 address = 'ipc:///tmp/%s' % server
1279 1279 self.server = address
1280 1280 self.context = zmq.Context()
1281 1281 self.receiver = self.context.socket(zmq.PULL)
1282 1282 self.receiver.connect(self.server)
1283 1283 time.sleep(0.5)
1284 1284 print '[Starting] ReceiverData from {}'.format(self.server)
1285 1285 else:
1286 1286 self.server = None
1287 1287 if path == None:
1288 1288 raise ValueError, "[Reading] The path is not valid"
1289 1289
1290 1290 if ext == None:
1291 1291 ext = self.ext
1292 1292
1293 1293 if online:
1294 1294 print "[Reading] Searching files in online mode..."
1295 1295
1296 1296 for nTries in range( self.nTries ):
1297 1297 fullpath, foldercounter, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk, set=set)
1298 1298
1299 1299 if fullpath:
1300 1300 break
1301 1301
1302 1302 print '[Reading] Waiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
1303 1303 sleep( self.delay )
1304 1304
1305 1305 if not(fullpath):
1306 1306 print "[Reading] There 'isn't any valid file in %s" % path
1307 1307 return
1308 1308
1309 1309 self.year = year
1310 1310 self.doy = doy
1311 1311 self.set = set - 1
1312 1312 self.path = path
1313 1313 self.foldercounter = foldercounter
1314 1314 last_set = None
1315 1315 else:
1316 1316 print "[Reading] Searching files in offline mode ..."
1317 1317 pathList, filenameList = self.searchFilesOffLine(path, startDate=startDate, endDate=endDate,
1318 1318 startTime=startTime, endTime=endTime,
1319 1319 set=set, expLabel=expLabel, ext=ext,
1320 1320 walk=walk, cursor=cursor,
1321 1321 skip=skip)
1322 1322
1323 1323 if not(pathList):
1324 1324 self.fileIndex = -1
1325 1325 self.pathList = []
1326 1326 self.filenameList = []
1327 1327 return
1328 1328
1329 1329 self.fileIndex = -1
1330 1330 self.pathList = pathList
1331 1331 self.filenameList = filenameList
1332 1332 file_name = os.path.basename(filenameList[-1])
1333 1333 basename, ext = os.path.splitext(file_name)
1334 1334 last_set = int(basename[-3:])
1335 1335
1336 1336 self.online = online
1337 1337 self.realtime = realtime
1338 1338 self.delay = delay
1339 1339 ext = ext.lower()
1340 1340 self.ext = ext
1341 1341 self.getByBlock = getblock
1342 1342 self.nTxs = nTxs
1343 1343 self.startTime = startTime
1344 1344 self.endTime = endTime
1345 1345
1346 1346 #Added-----------------
1347 1347 self.selBlocksize = blocksize
1348 1348 self.selBlocktime = blocktime
1349 1349
1350 1350 # Verbose-----------
1351 1351 self.verbose = verbose
1352 1352 self.warnings = warnings
1353 1353
1354 1354 if not(self.setNextFile()):
1355 1355 if (startDate!=None) and (endDate!=None):
1356 1356 print "[Reading] No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
1357 1357 elif startDate != None:
1358 1358 print "[Reading] No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
1359 1359 else:
1360 1360 print "[Reading] No files"
1361 1361
1362 1362 self.fileIndex = -1
1363 1363 self.pathList = []
1364 1364 self.filenameList = []
1365 1365 return
1366 1366
1367 1367 # self.getBasicHeader()
1368 1368
1369 1369 if last_set != None:
1370 1370 self.dataOut.last_block = last_set * self.processingHeaderObj.dataBlocksPerFile + self.basicHeaderObj.dataBlock
1371 1371 return
1372 1372
1373 1373 def getBasicHeader(self):
1374 1374
1375 1375 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds
1376 1376
1377 1377 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
1378 1378
1379 1379 self.dataOut.timeZone = self.basicHeaderObj.timeZone
1380 1380
1381 1381 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
1382 1382
1383 1383 self.dataOut.errorCount = self.basicHeaderObj.errorCount
1384 1384
1385 1385 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
1386 1386
1387 1387 self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds/self.nTxs
1388 1388
1389 1389 # self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock*self.nTxs
1390 1390
1391 1391
1392 1392 def getFirstHeader(self):
1393 1393
1394 1394 raise NotImplementedError
1395 1395
1396 1396 def getData(self):
1397 1397
1398 1398 raise NotImplementedError
1399 1399
1400 1400 def hasNotDataInBuffer(self):
1401 1401
1402 1402 raise NotImplementedError
1403 1403
1404 1404 def readBlock(self):
1405 1405
1406 1406 raise NotImplementedError
1407 1407
1408 1408 def isEndProcess(self):
1409 1409
1410 1410 return self.flagNoMoreFiles
1411 1411
1412 1412 def printReadBlocks(self):
1413 1413
1414 1414 print "[Reading] Number of read blocks per file %04d" %self.nReadBlocks
1415 1415
1416 1416 def printTotalBlocks(self):
1417 1417
1418 1418 print "[Reading] Number of read blocks %04d" %self.nTotalBlocks
1419 1419
1420 1420 def printNumberOfBlock(self):
1421 1421
1422 1422 if self.flagIsNewBlock:
1423 1423 print "[Reading] Block No. %d/%d -> %s" %(self.nReadBlocks,
1424 1424 self.processingHeaderObj.dataBlocksPerFile,
1425 1425 self.dataOut.datatime.ctime())
1426 1426
1427 1427 def printInfo(self):
1428 1428
1429 1429 if self.__printInfo == False:
1430 1430 return
1431 1431
1432 1432 self.basicHeaderObj.printInfo()
1433 1433 self.systemHeaderObj.printInfo()
1434 1434 self.radarControllerHeaderObj.printInfo()
1435 1435 self.processingHeaderObj.printInfo()
1436 1436
1437 1437 self.__printInfo = False
1438 1438
1439 1439 def run(self,
1440 1440 path=None,
1441 1441 startDate=None,
1442 1442 endDate=None,
1443 1443 startTime=datetime.time(0,0,0),
1444 1444 endTime=datetime.time(23,59,59),
1445 1445 set=None,
1446 1446 expLabel = "",
1447 1447 ext = None,
1448 1448 online = False,
1449 1449 delay = 60,
1450 1450 walk = True,
1451 1451 getblock = False,
1452 1452 nTxs = 1,
1453 1453 realtime=False,
1454 1454 blocksize=None,
1455 1455 blocktime=None,
1456 1456 queue=None,
1457 1457 skip=None,
1458 1458 cursor=None,
1459 1459 warnings=True,
1460 1460 server=None,
1461 1461 verbose=True, **kwargs):
1462 1462 if not(self.isConfig):
1463 1463 self.setup(path=path,
1464 1464 startDate=startDate,
1465 1465 endDate=endDate,
1466 1466 startTime=startTime,
1467 1467 endTime=endTime,
1468 1468 set=set,
1469 1469 expLabel=expLabel,
1470 1470 ext=ext,
1471 1471 online=online,
1472 1472 delay=delay,
1473 1473 walk=walk,
1474 1474 getblock=getblock,
1475 1475 nTxs=nTxs,
1476 1476 realtime=realtime,
1477 1477 blocksize=blocksize,
1478 1478 blocktime=blocktime,
1479 1479 skip=skip,
1480 1480 cursor=cursor,
1481 1481 warnings=warnings,
1482 1482 server=server,
1483 1483 verbose=verbose)
1484 1484 self.isConfig = True
1485 1485 if server is None:
1486 1486 self.getData()
1487 1487 else:
1488 1488 self.getFromServer()
1489 1489
1490 1490 class JRODataWriter(JRODataIO):
1491 1491
1492 1492 """
1493 1493 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
1494 1494 de los datos siempre se realiza por bloques.
1495 1495 """
1496 1496
1497 1497 blockIndex = 0
1498 1498
1499 1499 path = None
1500 1500
1501 1501 setFile = None
1502 1502
1503 1503 profilesPerBlock = None
1504 1504
1505 1505 blocksPerFile = None
1506 1506
1507 1507 nWriteBlocks = 0
1508 1508
1509 1509 fileDate = None
1510 1510
1511 1511 def __init__(self, dataOut=None):
1512 1512 raise NotImplementedError
1513 1513
1514 1514
1515 1515 def hasAllDataInBuffer(self):
1516 1516 raise NotImplementedError
1517 1517
1518 1518
1519 1519 def setBlockDimension(self):
1520 1520 raise NotImplementedError
1521 1521
1522 1522
1523 1523 def writeBlock(self):
1524 1524 raise NotImplementedError
1525 1525
1526 1526
1527 1527 def putData(self):
1528 1528 raise NotImplementedError
1529 1529
1530 1530
1531 1531 def getProcessFlags(self):
1532 1532
1533 1533 processFlags = 0
1534 1534
1535 1535 dtype_index = get_dtype_index(self.dtype)
1536 1536 procflag_dtype = get_procflag_dtype(dtype_index)
1537 1537
1538 1538 processFlags += procflag_dtype
1539 1539
1540 1540 if self.dataOut.flagDecodeData:
1541 1541 processFlags += PROCFLAG.DECODE_DATA
1542 1542
1543 1543 if self.dataOut.flagDeflipData:
1544 1544 processFlags += PROCFLAG.DEFLIP_DATA
1545 1545
1546 1546 if self.dataOut.code is not None:
1547 1547 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1548 1548
1549 1549 if self.dataOut.nCohInt > 1:
1550 1550 processFlags += PROCFLAG.COHERENT_INTEGRATION
1551 1551
1552 1552 if self.dataOut.type == "Spectra":
1553 1553 if self.dataOut.nIncohInt > 1:
1554 1554 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
1555 1555
1556 1556 if self.dataOut.data_dc is not None:
1557 1557 processFlags += PROCFLAG.SAVE_CHANNELS_DC
1558 1558
1559 1559 if self.dataOut.flagShiftFFT:
1560 1560 processFlags += PROCFLAG.SHIFT_FFT_DATA
1561 1561
1562 1562 return processFlags
1563 1563
1564 1564 def setBasicHeader(self):
1565 1565
1566 1566 self.basicHeaderObj.size = self.basicHeaderSize #bytes
1567 1567 self.basicHeaderObj.version = self.versionFile
1568 1568 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1569 1569
1570 1570 utc = numpy.floor(self.dataOut.utctime)
1571 1571 milisecond = (self.dataOut.utctime - utc)* 1000.0
1572 1572
1573 1573 self.basicHeaderObj.utc = utc
1574 1574 self.basicHeaderObj.miliSecond = milisecond
1575 1575 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1576 1576 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1577 1577 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1578 1578
1579 1579 def setFirstHeader(self):
1580 1580 """
1581 1581 Obtiene una copia del First Header
1582 1582
1583 1583 Affected:
1584 1584
1585 1585 self.basicHeaderObj
1586 1586 self.systemHeaderObj
1587 1587 self.radarControllerHeaderObj
1588 1588 self.processingHeaderObj self.
1589 1589
1590 1590 Return:
1591 1591 None
1592 1592 """
1593 1593
1594 1594 raise NotImplementedError
1595 1595
1596 1596 def __writeFirstHeader(self):
1597 1597 """
1598 1598 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1599 1599
1600 1600 Affected:
1601 1601 __dataType
1602 1602
1603 1603 Return:
1604 1604 None
1605 1605 """
1606 1606
1607 1607 # CALCULAR PARAMETROS
1608 1608
1609 1609 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1610 1610 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1611 1611
1612 1612 self.basicHeaderObj.write(self.fp)
1613 1613 self.systemHeaderObj.write(self.fp)
1614 1614 self.radarControllerHeaderObj.write(self.fp)
1615 1615 self.processingHeaderObj.write(self.fp)
1616 1616
1617 1617 def __setNewBlock(self):
1618 1618 """
1619 1619 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1620 1620
1621 1621 Return:
1622 1622 0 : si no pudo escribir nada
1623 1623 1 : Si escribio el Basic el First Header
1624 1624 """
1625 1625 if self.fp == None:
1626 1626 self.setNextFile()
1627 1627
1628 1628 if self.flagIsNewFile:
1629 1629 return 1
1630 1630
1631 1631 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1632 1632 self.basicHeaderObj.write(self.fp)
1633 1633 return 1
1634 1634
1635 1635 if not( self.setNextFile() ):
1636 1636 return 0
1637 1637
1638 1638 return 1
1639 1639
1640 1640
1641 1641 def writeNextBlock(self):
1642 1642 """
1643 1643 Selecciona el bloque siguiente de datos y los escribe en un file
1644 1644
1645 1645 Return:
1646 1646 0 : Si no hizo pudo escribir el bloque de datos
1647 1647 1 : Si no pudo escribir el bloque de datos
1648 1648 """
1649 1649 if not( self.__setNewBlock() ):
1650 1650 return 0
1651 1651
1652 1652 self.writeBlock()
1653 1653
1654 1654 print "[Writing] Block No. %d/%d" %(self.blockIndex,
1655 1655 self.processingHeaderObj.dataBlocksPerFile)
1656 1656
1657 1657 return 1
1658 1658
1659 1659 def setNextFile(self):
1660 1660 """
1661 1661 Determina el siguiente file que sera escrito
1662 1662
1663 1663 Affected:
1664 1664 self.filename
1665 1665 self.subfolder
1666 1666 self.fp
1667 1667 self.setFile
1668 1668 self.flagIsNewFile
1669 1669
1670 1670 Return:
1671 1671 0 : Si el archivo no puede ser escrito
1672 1672 1 : Si el archivo esta listo para ser escrito
1673 1673 """
1674 1674 ext = self.ext
1675 1675 path = self.path
1676 1676
1677 1677 if self.fp != None:
1678 1678 self.fp.close()
1679 1679
1680 1680 timeTuple = time.localtime( self.dataOut.utctime)
1681 1681 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1682 1682
1683 1683 fullpath = os.path.join( path, subfolder )
1684 1684 setFile = self.setFile
1685 1685
1686 1686 if not( os.path.exists(fullpath) ):
1687 1687 os.mkdir(fullpath)
1688 1688 setFile = -1 #inicializo mi contador de seteo
1689 1689 else:
1690 1690 filesList = os.listdir( fullpath )
1691 1691 if len( filesList ) > 0:
1692 1692 filesList = sorted( filesList, key=str.lower )
1693 1693 filen = filesList[-1]
1694 1694 # el filename debera tener el siguiente formato
1695 1695 # 0 1234 567 89A BCDE (hex)
1696 1696 # x YYYY DDD SSS .ext
1697 1697 if isNumber( filen[8:11] ):
1698 1698 setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1699 1699 else:
1700 1700 setFile = -1
1701 1701 else:
1702 1702 setFile = -1 #inicializo mi contador de seteo
1703 1703
1704 1704 setFile += 1
1705 1705
1706 1706 #If this is a new day it resets some values
1707 1707 if self.dataOut.datatime.date() > self.fileDate:
1708 1708 setFile = 0
1709 1709 self.nTotalBlocks = 0
1710 1710
1711 1711 filen = '%s%4.4d%3.3d%3.3d%s' % (self.optchar, timeTuple.tm_year, timeTuple.tm_yday, setFile, ext )
1712 1712
1713 1713 filename = os.path.join( path, subfolder, filen )
1714 1714
1715 1715 fp = open( filename,'wb' )
1716 1716
1717 1717 self.blockIndex = 0
1718 1718
1719 1719 #guardando atributos
1720 1720 self.filename = filename
1721 1721 self.subfolder = subfolder
1722 1722 self.fp = fp
1723 1723 self.setFile = setFile
1724 1724 self.flagIsNewFile = 1
1725 1725 self.fileDate = self.dataOut.datatime.date()
1726 1726
1727 1727 self.setFirstHeader()
1728 1728
1729 1729 print '[Writing] Opening file: %s'%self.filename
1730 1730
1731 1731 self.__writeFirstHeader()
1732 1732
1733 1733 return 1
1734 1734
1735 1735 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=None, ext=None, datatype=4):
1736 1736 """
1737 1737 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1738 1738
1739 1739 Inputs:
1740 1740 path : directory where data will be saved
1741 1741 profilesPerBlock : number of profiles per block
1742 1742 set : initial file set
1743 1743 datatype : An integer number that defines data type:
1744 1744 0 : int8 (1 byte)
1745 1745 1 : int16 (2 bytes)
1746 1746 2 : int32 (4 bytes)
1747 1747 3 : int64 (8 bytes)
1748 1748 4 : float32 (4 bytes)
1749 1749 5 : double64 (8 bytes)
1750 1750
1751 1751 Return:
1752 1752 0 : Si no realizo un buen seteo
1753 1753 1 : Si realizo un buen seteo
1754 1754 """
1755 1755
1756 1756 if ext == None:
1757 1757 ext = self.ext
1758 1758
1759 1759 self.ext = ext.lower()
1760 1760
1761 1761 self.path = path
1762 1762
1763 1763 if set is None:
1764 1764 self.setFile = -1
1765 1765 else:
1766 1766 self.setFile = set - 1
1767 1767
1768 1768 self.blocksPerFile = blocksPerFile
1769 1769
1770 1770 self.profilesPerBlock = profilesPerBlock
1771 1771
1772 1772 self.dataOut = dataOut
1773 1773 self.fileDate = self.dataOut.datatime.date()
1774 1774 #By default
1775 1775 self.dtype = self.dataOut.dtype
1776 1776
1777 1777 if datatype is not None:
1778 1778 self.dtype = get_numpy_dtype(datatype)
1779 1779
1780 1780 if not(self.setNextFile()):
1781 1781 print "[Writing] There isn't a next file"
1782 1782 return 0
1783 1783
1784 1784 self.setBlockDimension()
1785 1785
1786 1786 return 1
1787 1787
1788 1788 def run(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=None, ext=None, datatype=4, **kwargs):
1789 1789
1790 1790 if not(self.isConfig):
1791 1791
1792 1792 self.setup(dataOut, path, blocksPerFile, profilesPerBlock=profilesPerBlock, set=set, ext=ext, datatype=datatype, **kwargs)
1793 1793 self.isConfig = True
1794 1794
1795 1795 self.putData()
@@ -1,751 +1,751
1 1
2 2 '''
3 3 Created on Jul 3, 2014
4 4
5 5 @author: roj-idl71
6 6 '''
7 7 # SUBCHANNELS EN VEZ DE CHANNELS
8 8 # BENCHMARKS -> PROBLEMAS CON ARCHIVOS GRANDES -> INCONSTANTE EN EL TIEMPO
9 9 # ACTUALIZACION DE VERSION
10 10 # HEADERS
11 11 # MODULO DE ESCRITURA
12 12 # METADATA
13 13
14 14 import os
15 15 import datetime
16 16 import numpy
17 17 import timeit
18 18 from profilehooks import coverage, profile
19 19 from fractions import Fraction
20 20
21 21 try:
22 22 from gevent import sleep
23 23 except:
24 24 from time import sleep
25 25
26 26 from schainpy.model.data.jroheaderIO import RadarControllerHeader, SystemHeader
27 27 from schainpy.model.data.jrodata import Voltage
28 28 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
29 29 from time import time
30 30
31 31 import cPickle
32 32 try:
33 33 import digital_rf
34 34 except:
35 35 print 'You should install "digital_rf" module if you want to read Digital RF data'
36 36
37 37 class DigitalRFReader(ProcessingUnit):
38 38 '''
39 39 classdocs
40 40 '''
41 41
42 42 def __init__(self, **kwargs):
43 43 '''
44 44 Constructor
45 45 '''
46 46
47 47 ProcessingUnit.__init__(self, **kwargs)
48 48
49 49 self.dataOut = Voltage()
50 50 self.__printInfo = True
51 51 self.__flagDiscontinuousBlock = False
52 52 self.__bufferIndex = 9999999
53 53 self.__ippKm = None
54 54 self.__codeType = 0
55 55 self.__nCode = None
56 56 self.__nBaud = None
57 57 self.__code = None
58 58 self.dtype = None
59 59
60 60 def close(self):
61 61 print 'Average of writing to digital rf format is ', self.oldAverage * 1000
62 62 return
63 63
64 64 def __getCurrentSecond(self):
65 65
66 66 return self.__thisUnixSample/self.__sample_rate
67 67
68 68 thisSecond = property(__getCurrentSecond, "I'm the 'thisSecond' property.")
69 69
70 70 def __setFileHeader(self):
71 71 '''
72 72 In this method will be initialized every parameter of dataOut object (header, no data)
73 73 '''
74 74 ippSeconds = 1.0*self.__nSamples/self.__sample_rate
75 75
76 76 nProfiles = 1.0/ippSeconds # Number of profiles in one second
77 77
78 78 try:
79 79 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(self.__radarControllerHeader)
80 80 except:
81 81 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(
82 82 txA=0,
83 83 txB=0,
84 84 nWindows=1,
85 85 nHeights=self.__nSamples,
86 86 firstHeight=self.__firstHeigth,
87 87 deltaHeight=self.__deltaHeigth,
88 88 codeType=self.__codeType,
89 89 nCode=self.__nCode, nBaud=self.__nBaud,
90 90 code = self.__code)
91 91
92 92 try:
93 93 self.dataOut.systemHeaderObj = SystemHeader(self.__systemHeader)
94 94 except:
95 95 self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples,
96 96 nProfiles=nProfiles,
97 97 nChannels=len(self.__channelList),
98 98 adcResolution=14)
99 99 self.dataOut.type = "Voltage"
100 100
101 101 self.dataOut.data = None
102 102
103 103 self.dataOut.dtype = self.dtype
104 104
105 105 # self.dataOut.nChannels = 0
106 106
107 107 # self.dataOut.nHeights = 0
108 108
109 self.dataOut.nProfiles = nProfiles
109 self.dataOut.nProfiles = int(nProfiles)
110 110
111 111 self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype = numpy.float)*self.__deltaHeigth
112 112
113 113 self.dataOut.channelList = range(self.__num_subchannels)
114 114
115 115 self.dataOut.blocksize = self.dataOut.getNChannels() * self.dataOut.getNHeights()
116 116
117 117 # self.dataOut.channelIndexList = None
118 118
119 119 self.dataOut.flagNoData = True
120 120
121 121 self.dataOut.flagDataAsBlock = False
122 122 # Set to TRUE if the data is discontinuous
123 123 self.dataOut.flagDiscontinuousBlock = False
124 124
125 125 self.dataOut.utctime = None
126 126
127 127 self.dataOut.timeZone = self.__timezone/60 # timezone like jroheader, difference in minutes between UTC and localtime
128 128
129 129 self.dataOut.dstFlag = 0
130 130
131 131 self.dataOut.errorCount = 0
132 132
133 133 try:
134 134 self.dataOut.nCohInt = self.fixed_metadata_dict.get('nCohInt', 1)
135 135
136 136 self.dataOut.flagDecodeData = self.fixed_metadata_dict['flagDecodeData'] # asumo que la data esta decodificada
137 137
138 138 self.dataOut.flagDeflipData = self.fixed_metadata_dict['flagDeflipData'] # asumo que la data esta sin flip
139 139
140 140 self.dataOut.flagShiftFFT = self.fixed_metadata_dict['flagShiftFFT']
141 141
142 142 self.dataOut.useLocalTime = self.fixed_metadata_dict['useLocalTime']
143 143 except:
144 144 pass
145 145
146 146
147 147 self.dataOut.ippSeconds = ippSeconds
148 148
149 149 # Time interval between profiles
150 150 # self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
151 151
152 152 self.dataOut.frequency = self.__frequency
153 153
154 154 self.dataOut.realtime = self.__online
155 155
156 156 def findDatafiles(self, path, startDate=None, endDate=None):
157 157
158 158 if not os.path.isdir(path):
159 159 return []
160 160
161 161 try:
162 162 digitalReadObj = digital_rf.DigitalRFReader(path, load_all_metadata=True)
163 163 except:
164 164 digitalReadObj = digital_rf.DigitalRFReader(path)
165 165
166 166 channelNameList = digitalReadObj.get_channels()
167 167
168 168 if not channelNameList:
169 169 return []
170 170
171 171 metadata_dict = digitalReadObj.get_rf_file_metadata(channelNameList[0])
172 172
173 173 sample_rate = metadata_dict['sample_rate'][0]
174 174
175 175 this_metadata_file = digitalReadObj.get_metadata(channelNameList[0])
176 176
177 177 try:
178 178 timezone = this_metadata_file['timezone'].value
179 179 except:
180 180 timezone = 0
181 181
182 182 startUTCSecond, endUTCSecond = digitalReadObj.get_bounds(channelNameList[0])/sample_rate - timezone
183 183
184 184 startDatetime = datetime.datetime.utcfromtimestamp(startUTCSecond)
185 185 endDatatime = datetime.datetime.utcfromtimestamp(endUTCSecond)
186 186
187 187 if not startDate:
188 188 startDate = startDatetime.date()
189 189
190 190 if not endDate:
191 191 endDate = endDatatime.date()
192 192
193 193 dateList = []
194 194
195 195 thisDatetime = startDatetime
196 196
197 197 while(thisDatetime<=endDatatime):
198 198
199 199 thisDate = thisDatetime.date()
200 200
201 201 if thisDate < startDate:
202 202 continue
203 203
204 204 if thisDate > endDate:
205 205 break
206 206
207 207 dateList.append(thisDate)
208 208 thisDatetime += datetime.timedelta(1)
209 209
210 210 return dateList
211 211
212 212 def setup(self, path = None,
213 213 startDate = None,
214 214 endDate = None,
215 215 startTime = datetime.time(0,0,0),
216 216 endTime = datetime.time(23,59,59),
217 217 channelList = None,
218 218 nSamples = None,
219 219 online = False,
220 220 delay = 60,
221 221 buffer_size = 1024,
222 222 ippKm=None,
223 223 **kwargs):
224 224 '''
225 225 In this method we should set all initial parameters.
226 226
227 227 Inputs:
228 228 path
229 229 startDate
230 230 endDate
231 231 startTime
232 232 endTime
233 233 set
234 234 expLabel
235 235 ext
236 236 online
237 237 delay
238 238 '''
239 239 self.i = 0
240 240 if not os.path.isdir(path):
241 241 raise ValueError, "[Reading] Directory %s does not exist" %path
242 242
243 243 try:
244 244 self.digitalReadObj = digital_rf.DigitalRFReader(path, load_all_metadata=True)
245 245 except:
246 246 self.digitalReadObj = digital_rf.DigitalRFReader(path)
247 247
248 248 channelNameList = self.digitalReadObj.get_channels()
249 249
250 250 if not channelNameList:
251 251 raise ValueError, "[Reading] Directory %s does not have any files" %path
252 252
253 253 if not channelList:
254 254 channelList = range(len(channelNameList))
255 255
256 256
257 257 ########## Reading metadata ######################
258 258
259 259 top_properties = self.digitalReadObj.get_properties(channelNameList[channelList[0]])
260 260
261 261
262 262 self.__num_subchannels = top_properties['num_subchannels']
263 263 self.__sample_rate = 1.0 * top_properties['sample_rate_numerator'] / top_properties['sample_rate_denominator']
264 264 # self.__samples_per_file = top_properties['samples_per_file'][0]
265 265 self.__deltaHeigth = 1e6*0.15/self.__sample_rate ## why 0.15?
266 266
267 267 this_metadata_file = self.digitalReadObj.get_digital_metadata(channelNameList[channelList[0]])
268 268 metadata_bounds = this_metadata_file.get_bounds()
269 269 self.fixed_metadata_dict = this_metadata_file.read(metadata_bounds[0])[metadata_bounds[0]] ## GET FIRST HEADER
270 270
271 271 try:
272 272 self.__processingHeader = self.fixed_metadata_dict['processingHeader']
273 273 self.__radarControllerHeader = self.fixed_metadata_dict['radarControllerHeader']
274 274 self.__systemHeader = self.fixed_metadata_dict['systemHeader']
275 275 self.dtype = cPickle.loads(self.fixed_metadata_dict['dtype'])
276 276 except:
277 277 pass
278 278
279 279
280 280 self.__frequency = None
281 281
282 282 self.__frequency = self.fixed_metadata_dict.get('frequency', 1)
283 283
284 284 self.__timezone = self.fixed_metadata_dict.get('timezone', 300)
285 285
286 286
287 287 try:
288 288 nSamples = self.fixed_metadata_dict['nSamples']
289 289 except:
290 290 nSamples = None
291 291
292 292 self.__firstHeigth = 0
293 293
294 294 try:
295 295 codeType = self.__radarControllerHeader['codeType']
296 296 except:
297 297 codeType = 0
298 298
299 299 nCode = 1
300 300 nBaud = 1
301 301 code = numpy.ones((nCode, nBaud), dtype=numpy.int)
302 302
303 303 try:
304 304 if codeType:
305 305 nCode = self.__radarControllerHeader['nCode']
306 306 nBaud = self.__radarControllerHeader['nBaud']
307 307 code = self.__radarControllerHeader['code']
308 308 except:
309 309 pass
310 310
311 311
312 312 if not ippKm:
313 313 try:
314 314 # seconds to km
315 315 ippKm = self.__radarControllerHeader['ipp']
316 316 except:
317 317 ippKm = None
318 318 ####################################################
319 319 self.__ippKm = ippKm
320 320 startUTCSecond = None
321 321 endUTCSecond = None
322 322
323 323 if startDate:
324 324 startDatetime = datetime.datetime.combine(startDate, startTime)
325 325 startUTCSecond = (startDatetime-datetime.datetime(1970,1,1)).total_seconds() + self.__timezone
326 326
327 327 if endDate:
328 328 endDatetime = datetime.datetime.combine(endDate, endTime)
329 329 endUTCSecond = (endDatetime-datetime.datetime(1970,1,1)).total_seconds() + self.__timezone
330 330
331 331 start_index, end_index = self.digitalReadObj.get_bounds(channelNameList[channelList[0]])
332 332
333 333 if not startUTCSecond:
334 334 startUTCSecond = start_index/self.__sample_rate
335 335
336 336 if start_index > startUTCSecond*self.__sample_rate:
337 337 startUTCSecond = start_index/self.__sample_rate
338 338
339 339 if not endUTCSecond:
340 340 endUTCSecond = end_index/self.__sample_rate
341 341
342 342 if end_index < endUTCSecond*self.__sample_rate:
343 343 endUTCSecond = end_index/self.__sample_rate
344 344 if not nSamples:
345 345 if not ippKm:
346 346 raise ValueError, "[Reading] nSamples or ippKm should be defined"
347 347 nSamples = int(ippKm / (1e6*0.15/self.__sample_rate))
348 348 channelBoundList = []
349 349 channelNameListFiltered = []
350 350
351 351 for thisIndexChannel in channelList:
352 352 thisChannelName = channelNameList[thisIndexChannel]
353 353 start_index, end_index = self.digitalReadObj.get_bounds(thisChannelName)
354 354 channelBoundList.append((start_index, end_index))
355 355 channelNameListFiltered.append(thisChannelName)
356 356
357 357 self.profileIndex = 0
358 358 self.i= 0
359 359 self.__delay = delay
360 360
361 361 self.__codeType = codeType
362 362 self.__nCode = nCode
363 363 self.__nBaud = nBaud
364 364 self.__code = code
365 365
366 366 self.__datapath = path
367 367 self.__online = online
368 368 self.__channelList = channelList
369 369 self.__channelNameList = channelNameListFiltered
370 370 self.__channelBoundList = channelBoundList
371 371 self.__nSamples = nSamples
372 372 self.__samples_to_read = long(nSamples) # FIJO: AHORA 40
373 373 self.__nChannels = len(self.__channelList)
374 374
375 375 self.__startUTCSecond = startUTCSecond
376 376 self.__endUTCSecond = endUTCSecond
377 377
378 378 self.__timeInterval = 1.0 * self.__samples_to_read/self.__sample_rate # Time interval
379 379
380 380 if online:
381 381 # self.__thisUnixSample = int(endUTCSecond*self.__sample_rate - 4*self.__samples_to_read)
382 382 startUTCSecond = numpy.floor(endUTCSecond)
383 383
384 384 self.__thisUnixSample = long(startUTCSecond*self.__sample_rate) - self.__samples_to_read ## por que en el otro metodo lo primero q se hace es sumar samplestoread
385 385
386 386 self.__data_buffer = numpy.zeros((self.__num_subchannels, self.__samples_to_read), dtype = numpy.complex)
387 387
388 388 self.__setFileHeader()
389 389 self.isConfig = True
390 390
391 391 print "[Reading] Digital RF Data was found from %s to %s " %(
392 392 datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
393 393 datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
394 394 )
395 395
396 396 print "[Reading] Starting process from %s to %s" %(datetime.datetime.utcfromtimestamp(startUTCSecond - self.__timezone),
397 397 datetime.datetime.utcfromtimestamp(endUTCSecond - self.__timezone)
398 398 )
399 399 self.oldAverage = None
400 400 self.count = 0
401 401 self.executionTime = 0
402 402 def __reload(self):
403 403 # print
404 404 # print "%s not in range [%s, %s]" %(
405 405 # datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
406 406 # datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
407 407 # datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
408 408 # )
409 409 print "[Reading] reloading metadata ..."
410 410
411 411 try:
412 412 self.digitalReadObj.reload(complete_update=True)
413 413 except:
414 414 self.digitalReadObj.reload()
415 415
416 416 start_index, end_index = self.digitalReadObj.get_bounds(self.__channelNameList[self.__channelList[0]])
417 417
418 418 if start_index > self.__startUTCSecond*self.__sample_rate:
419 419 self.__startUTCSecond = 1.0*start_index/self.__sample_rate
420 420
421 421 if end_index > self.__endUTCSecond*self.__sample_rate:
422 422 self.__endUTCSecond = 1.0*end_index/self.__sample_rate
423 423 print
424 424 print "[Reading] New timerange found [%s, %s] " %(
425 425 datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
426 426 datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
427 427 )
428 428
429 429 return True
430 430
431 431 return False
432 432
433 433 def timeit(self, toExecute):
434 434 t0 = time()
435 435 toExecute()
436 436 self.executionTime = time() - t0
437 437 if self.oldAverage is None: self.oldAverage = self.executionTime
438 438 self.oldAverage = (self.executionTime + self.count*self.oldAverage) / (self.count + 1.0)
439 439 self.count = self.count + 1.0
440 440 return
441 441
442 442 def __readNextBlock(self, seconds=30, volt_scale = 1):
443 443 '''
444 444 '''
445 445
446 446 # Set the next data
447 447 self.__flagDiscontinuousBlock = False
448 448 self.__thisUnixSample += self.__samples_to_read
449 449
450 450 if self.__thisUnixSample + 2*self.__samples_to_read > self.__endUTCSecond*self.__sample_rate:
451 451 print "[Reading] There are no more data into selected time-range"
452 452 if self.__online:
453 453 self.__reload()
454 454 else:
455 455 return False
456 456
457 457 if self.__thisUnixSample + 2*self.__samples_to_read > self.__endUTCSecond*self.__sample_rate:
458 458 return False
459 459 self.__thisUnixSample -= self.__samples_to_read
460 460
461 461 indexChannel = 0
462 462
463 463 dataOk = False
464 464 for thisChannelName in self.__channelNameList: ##TODO VARIOS CHANNELS?
465 465 for indexSubchannel in range(self.__num_subchannels):
466 466 try:
467 467 t0 = time()
468 468 result = self.digitalReadObj.read_vector_c81d(self.__thisUnixSample,
469 469 self.__samples_to_read,
470 470 thisChannelName, sub_channel=indexSubchannel)
471 471 self.executionTime = time() - t0
472 472 if self.oldAverage is None: self.oldAverage = self.executionTime
473 473 self.oldAverage = (self.executionTime + self.count*self.oldAverage) / (self.count + 1.0)
474 474 self.count = self.count + 1.0
475 475
476 476 except IOError, e:
477 477 #read next profile
478 478 self.__flagDiscontinuousBlock = True
479 479 print "[Reading] %s" %datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), e
480 480 break
481 481
482 482 if result.shape[0] != self.__samples_to_read:
483 483 self.__flagDiscontinuousBlock = True
484 484 print "[Reading] %s: Too few samples were found, just %d/%d samples" %(datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
485 485 result.shape[0],
486 486 self.__samples_to_read)
487 487 break
488 488
489 489 self.__data_buffer[indexSubchannel,:] = result*volt_scale
490 490
491 491 indexChannel += 1
492 492
493 493 dataOk = True
494 494
495 495 self.__utctime = self.__thisUnixSample/self.__sample_rate
496 496
497 497 if not dataOk:
498 498 return False
499 499
500 500 print "[Reading] %s: %d samples <> %f sec" %(datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
501 501 self.__samples_to_read,
502 502 self.__timeInterval)
503 503
504 504 self.__bufferIndex = 0
505 505
506 506 return True
507 507
508 508 def __isBufferEmpty(self):
509 509 return self.__bufferIndex > self.__samples_to_read - self.__nSamples #40960 - 40
510 510
511 511 def getData(self, seconds=30, nTries=5):
512 512
513 513 '''
514 514 This method gets the data from files and put the data into the dataOut object
515 515
516 516 In addition, increase el the buffer counter in one.
517 517
518 518 Return:
519 519 data : retorna un perfil de voltages (alturas * canales) copiados desde el
520 520 buffer. Si no hay mas archivos a leer retorna None.
521 521
522 522 Affected:
523 523 self.dataOut
524 524 self.profileIndex
525 525 self.flagDiscontinuousBlock
526 526 self.flagIsNewBlock
527 527 '''
528 528
529 529 err_counter = 0
530 530 self.dataOut.flagNoData = True
531 531
532 532 if self.__isBufferEmpty():
533 533 self.__flagDiscontinuousBlock = False
534 534
535 535 while True:
536 536 if self.__readNextBlock():
537 537 break
538 538 if self.__thisUnixSample > self.__endUTCSecond*self.__sample_rate:
539 539 return False
540 540
541 541 if self.__flagDiscontinuousBlock:
542 542 print '[Reading] discontinuous block found ... continue with the next block'
543 543 continue
544 544
545 545 if not self.__online:
546 546 return False
547 547
548 548 err_counter += 1
549 549 if err_counter > nTries:
550 550 return False
551 551
552 552 print '[Reading] waiting %d seconds to read a new block' %seconds
553 553 sleep(seconds)
554 554
555 555 self.dataOut.data = self.__data_buffer[:,self.__bufferIndex:self.__bufferIndex+self.__nSamples]
556 556 self.dataOut.utctime = (self.__thisUnixSample + self.__bufferIndex)/self.__sample_rate
557 557 self.dataOut.flagNoData = False
558 558 self.dataOut.flagDiscontinuousBlock = self.__flagDiscontinuousBlock
559 559 self.dataOut.profileIndex = self.profileIndex
560 560
561 561 self.__bufferIndex += self.__nSamples
562 562 self.profileIndex += 1
563 563
564 564 if self.profileIndex == self.dataOut.nProfiles:
565 565 self.profileIndex = 0
566 566
567 567 return True
568 568
569 569 def printInfo(self):
570 570 '''
571 571 '''
572 572 if self.__printInfo == False:
573 573 return
574 574
575 575 # self.systemHeaderObj.printInfo()
576 576 # self.radarControllerHeaderObj.printInfo()
577 577
578 578 self.__printInfo = False
579 579
580 580 def printNumberOfBlock(self):
581 581 '''
582 582 '''
583 583 return
584 584 # print self.profileIndex
585 585
586 586
587 587 def run(self, **kwargs):
588 588 '''
589 589 This method will be called many times so here you should put all your code
590 590 '''
591 591
592 592 if not self.isConfig:
593 593 self.setup(**kwargs)
594 594 #self.i = self.i+1
595 595 self.getData(seconds=self.__delay)
596 596
597 597 return
598 598
599 599 class DigitalRFWriter(Operation):
600 600 '''
601 601 classdocs
602 602 '''
603 603
604 604 def __init__(self, **kwargs):
605 605 '''
606 606 Constructor
607 607 '''
608 608 Operation.__init__(self, **kwargs)
609 609 self.metadata_dict = {}
610 610 self.dataOut = None
611 611 self.dtype = None
612 612
613 613 def setHeader(self):
614 614
615 615 self.metadata_dict['frequency'] = self.dataOut.frequency
616 616 self.metadata_dict['timezone'] = self.dataOut.timeZone
617 617 self.metadata_dict['dtype'] = cPickle.dumps(self.dataOut.dtype)
618 618 self.metadata_dict['nProfiles'] = self.dataOut.nProfiles
619 619 self.metadata_dict['heightList'] = self.dataOut.heightList
620 620 self.metadata_dict['channelList'] = self.dataOut.channelList
621 621 self.metadata_dict['flagDecodeData'] = self.dataOut.flagDecodeData
622 622 self.metadata_dict['flagDeflipData'] = self.dataOut.flagDeflipData
623 623 self.metadata_dict['flagShiftFFT'] = self.dataOut.flagShiftFFT
624 624 self.metadata_dict['flagDataAsBlock'] = self.dataOut.flagDataAsBlock
625 625 self.metadata_dict['useLocalTime'] = self.dataOut.useLocalTime
626 626 self.metadata_dict['nCohInt'] = self.dataOut.nCohInt
627 627
628 628 return
629 629
630 630 def setup(self, dataOut, path, frequency, fileCadence, dirCadence, metadataCadence, set=0, metadataFile='metadata', ext='.h5'):
631 631 '''
632 632 In this method we should set all initial parameters.
633 633 Input:
634 634 dataOut: Input data will also be outputa data
635 635 '''
636 636 self.setHeader()
637 637 self.__ippSeconds = dataOut.ippSeconds
638 638 self.__deltaH = dataOut.getDeltaH()
639 639 self.__sample_rate = 1e6*0.15/self.__deltaH
640 640 self.__dtype = dataOut.dtype
641 641 if len(dataOut.dtype) == 2:
642 642 self.__dtype = dataOut.dtype[0]
643 643 self.__nSamples = dataOut.systemHeaderObj.nSamples
644 644 self.__nProfiles = dataOut.nProfiles
645 645 self.__blocks_per_file = dataOut.processingHeaderObj.dataBlocksPerFile
646 646
647 647 self.arr_data = arr_data = numpy.ones((self.__nSamples, len(self.dataOut.channelList)), dtype=[('r', self.__dtype), ('i', self.__dtype)])
648 648
649 649 file_cadence_millisecs = long(1.0 * self.__blocks_per_file * self.__nProfiles * self.__nSamples / self.__sample_rate) * 1000
650 650 sub_cadence_secs = file_cadence_millisecs / 500
651 651
652 652 sample_rate_fraction = Fraction(self.__sample_rate).limit_denominator()
653 653 sample_rate_numerator = long(sample_rate_fraction.numerator)
654 654 sample_rate_denominator = long(sample_rate_fraction.denominator)
655 655 start_global_index = dataOut.utctime * self.__sample_rate
656 656
657 657 uuid = 'prueba'
658 658 compression_level = 1
659 659 checksum = False
660 660 is_complex = True
661 661 num_subchannels = len(dataOut.channelList)
662 662 is_continuous = True
663 663 marching_periods = False
664 664
665 665 self.digitalWriteObj = digital_rf.DigitalRFWriter(path, self.__dtype, dirCadence,
666 666 fileCadence, start_global_index,
667 667 sample_rate_numerator, sample_rate_denominator, uuid, compression_level, checksum,
668 668 is_complex, num_subchannels, is_continuous, marching_periods)
669 669
670 670 metadata_dir = os.path.join(path, 'metadata')
671 671 os.system('mkdir %s' % (metadata_dir))
672 672
673 673 self.digitalMetadataWriteObj = digital_rf.DigitalMetadataWriter(metadata_dir, dirCadence, 1, ##236, file_cadence_millisecs / 1000
674 674 sample_rate_numerator, sample_rate_denominator,
675 675 metadataFile)
676 676
677 677
678 678 self.isConfig = True
679 679 self.currentSample = 0
680 680 self.oldAverage = 0
681 681 self.count = 0
682 682 return
683 683
684 684 def writeMetadata(self):
685 685 print '[Writing] - Writing metadata'
686 686 start_idx = self.__sample_rate * self.dataOut.utctime
687 687
688 688 self.metadata_dict['processingHeader'] = self.dataOut.processingHeaderObj.getAsDict()
689 689 self.metadata_dict['radarControllerHeader'] = self.dataOut.radarControllerHeaderObj.getAsDict()
690 690 self.metadata_dict['systemHeader'] = self.dataOut.systemHeaderObj.getAsDict()
691 691 self.digitalMetadataWriteObj.write(start_idx, self.metadata_dict)
692 692 return
693 693
694 694
695 695 def timeit(self, toExecute):
696 696 t0 = time()
697 697 toExecute()
698 698 self.executionTime = time() - t0
699 699 if self.oldAverage is None: self.oldAverage = self.executionTime
700 700 self.oldAverage = (self.executionTime + self.count*self.oldAverage) / (self.count + 1.0)
701 701 self.count = self.count + 1.0
702 702 return
703 703
704 704
705 705 def writeData(self):
706 706 for i in range(self.dataOut.systemHeaderObj.nSamples):
707 707 for channel in self.dataOut.channelList:
708 708 self.arr_data[i][channel]['r'] = self.dataOut.data[channel][i].real
709 709 self.arr_data[i][channel]['i'] = self.dataOut.data[channel][i].imag
710 710
711 711 def f(): return self.digitalWriteObj.rf_write(self.arr_data)
712 712 self.timeit(f)
713 713
714 714 return
715 715
716 716 def run(self, dataOut, frequency=49.92e6, path=None, fileCadence=100, dirCadence=25, metadataCadence=1, **kwargs):
717 717 '''
718 718 This method will be called many times so here you should put all your code
719 719 Inputs:
720 720 dataOut: object with the data
721 721 '''
722 722 # print dataOut.__dict__
723 723 self.dataOut = dataOut
724 724 if not self.isConfig:
725 725 self.setup(dataOut, path, frequency, fileCadence, dirCadence, metadataCadence, **kwargs)
726 726 self.writeMetadata()
727 727
728 728 self.writeData()
729 729
730 730 ## self.currentSample += 1
731 731 ## if self.dataOut.flagDataAsBlock or self.currentSample == 1:
732 732 ## self.writeMetadata()
733 733 ## if self.currentSample == self.__nProfiles: self.currentSample = 0
734 734
735 735 def close(self):
736 736 print '[Writing] - Closing files '
737 737 print 'Average of writing to digital rf format is ', self.oldAverage * 1000
738 738 try:
739 739 self.digitalWriteObj.close()
740 740 except:
741 741 pass
742 742
743 743 # raise
744 744 if __name__ == '__main__':
745 745
746 746 readObj = DigitalRFReader()
747 747
748 748 while True:
749 749 readObj.run(path='/home/jchavez/jicamarca/mocked_data/')
750 750 # readObj.printInfo()
751 751 # readObj.printNumberOfBlock()
General Comments 0
You need to be logged in to leave comments. Login now