##// END OF EJS Templates
writing rf data, falta metadata
Jose Chavez -
r979:f421d9b2ac03
parent child
Show More
@@ -1,1816 +1,1812
1 1 '''
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6 import os
7 7 import sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import inspect
13 13 import time, datetime
14 14 import traceback
15 15 import zmq
16 16
17 17 try:
18 18 from gevent import sleep
19 19 except:
20 20 from time import sleep
21 21
22 22 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
23 23 from schainpy.model.data.jroheaderIO import get_dtype_index, get_numpy_dtype, get_procflag_dtype, get_dtype_width
24 24
25 25 LOCALTIME = True
26 26
27 27 def isNumber(cad):
28 28 """
29 29 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
30 30
31 31 Excepciones:
32 32 Si un determinado string no puede ser convertido a numero
33 33 Input:
34 34 str, string al cual se le analiza para determinar si convertible a un numero o no
35 35
36 36 Return:
37 37 True : si el string es uno numerico
38 38 False : no es un string numerico
39 39 """
40 40 try:
41 41 float( cad )
42 42 return True
43 43 except:
44 44 return False
45 45
46 46 def isFileInEpoch(filename, startUTSeconds, endUTSeconds):
47 47 """
48 48 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
49 49
50 50 Inputs:
51 51 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
52 52
53 53 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
54 54 segundos contados desde 01/01/1970.
55 55 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
56 56 segundos contados desde 01/01/1970.
57 57
58 58 Return:
59 59 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
60 60 fecha especificado, de lo contrario retorna False.
61 61
62 62 Excepciones:
63 63 Si el archivo no existe o no puede ser abierto
64 64 Si la cabecera no puede ser leida.
65 65
66 66 """
67 67 basicHeaderObj = BasicHeader(LOCALTIME)
68 68
69 69 try:
70 70 fp = open(filename,'rb')
71 71 except IOError:
72 72 print "The file %s can't be opened" %(filename)
73 73 return 0
74 74
75 75 sts = basicHeaderObj.read(fp)
76 76 fp.close()
77 77
78 78 if not(sts):
79 79 print "Skipping the file %s because it has not a valid header" %(filename)
80 80 return 0
81 81
82 82 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
83 83 return 0
84 84
85 85 return 1
86 86
87 87 def isTimeInRange(thisTime, startTime, endTime):
88 88
89 89 if endTime >= startTime:
90 90 if (thisTime < startTime) or (thisTime > endTime):
91 91 return 0
92 92
93 93 return 1
94 94 else:
95 95 if (thisTime < startTime) and (thisTime > endTime):
96 96 return 0
97 97
98 98 return 1
99 99
100 100 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
101 101 """
102 102 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
103 103
104 104 Inputs:
105 105 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
106 106
107 107 startDate : fecha inicial del rango seleccionado en formato datetime.date
108 108
109 109 endDate : fecha final del rango seleccionado en formato datetime.date
110 110
111 111 startTime : tiempo inicial del rango seleccionado en formato datetime.time
112 112
113 113 endTime : tiempo final del rango seleccionado en formato datetime.time
114 114
115 115 Return:
116 116 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
117 117 fecha especificado, de lo contrario retorna False.
118 118
119 119 Excepciones:
120 120 Si el archivo no existe o no puede ser abierto
121 121 Si la cabecera no puede ser leida.
122 122
123 123 """
124 124
125 125
126 126 try:
127 127 fp = open(filename,'rb')
128 128 except IOError:
129 129 print "The file %s can't be opened" %(filename)
130 130 return None
131 131
132 132 firstBasicHeaderObj = BasicHeader(LOCALTIME)
133 133 systemHeaderObj = SystemHeader()
134 134 radarControllerHeaderObj = RadarControllerHeader()
135 135 processingHeaderObj = ProcessingHeader()
136 136
137 137 lastBasicHeaderObj = BasicHeader(LOCALTIME)
138 138
139 139 sts = firstBasicHeaderObj.read(fp)
140 140
141 141 if not(sts):
142 142 print "[Reading] Skipping the file %s because it has not a valid header" %(filename)
143 143 return None
144 144
145 145 if not systemHeaderObj.read(fp):
146 146 return None
147 147
148 148 if not radarControllerHeaderObj.read(fp):
149 149 return None
150 150
151 151 if not processingHeaderObj.read(fp):
152 152 return None
153 153
154 154 filesize = os.path.getsize(filename)
155 155
156 156 offset = processingHeaderObj.blockSize + 24 #header size
157 157
158 158 if filesize <= offset:
159 159 print "[Reading] %s: This file has not enough data" %filename
160 160 return None
161 161
162 162 fp.seek(-offset, 2)
163 163
164 164 sts = lastBasicHeaderObj.read(fp)
165 165
166 166 fp.close()
167 167
168 168 thisDatetime = lastBasicHeaderObj.datatime
169 169 thisTime_last_block = thisDatetime.time()
170 170
171 171 thisDatetime = firstBasicHeaderObj.datatime
172 172 thisDate = thisDatetime.date()
173 173 thisTime_first_block = thisDatetime.time()
174 174
175 175 #General case
176 176 # o>>>>>>>>>>>>>><<<<<<<<<<<<<<o
177 177 #-----------o----------------------------o-----------
178 178 # startTime endTime
179 179
180 180 if endTime >= startTime:
181 181 if (thisTime_last_block < startTime) or (thisTime_first_block > endTime):
182 182 return None
183 183
184 184 return thisDatetime
185 185
186 186 #If endTime < startTime then endTime belongs to the next day
187 187
188 188
189 189 #<<<<<<<<<<<o o>>>>>>>>>>>
190 190 #-----------o----------------------------o-----------
191 191 # endTime startTime
192 192
193 193 if (thisDate == startDate) and (thisTime_last_block < startTime):
194 194 return None
195 195
196 196 if (thisDate == endDate) and (thisTime_first_block > endTime):
197 197 return None
198 198
199 199 if (thisTime_last_block < startTime) and (thisTime_first_block > endTime):
200 200 return None
201 201
202 202 return thisDatetime
203 203
204 204 def isFolderInDateRange(folder, startDate=None, endDate=None):
205 205 """
206 206 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
207 207
208 208 Inputs:
209 209 folder : nombre completo del directorio.
210 210 Su formato deberia ser "/path_root/?YYYYDDD"
211 211
212 212 siendo:
213 213 YYYY : Anio (ejemplo 2015)
214 214 DDD : Dia del anio (ejemplo 305)
215 215
216 216 startDate : fecha inicial del rango seleccionado en formato datetime.date
217 217
218 218 endDate : fecha final del rango seleccionado en formato datetime.date
219 219
220 220 Return:
221 221 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
222 222 fecha especificado, de lo contrario retorna False.
223 223 Excepciones:
224 224 Si el directorio no tiene el formato adecuado
225 225 """
226 226
227 227 basename = os.path.basename(folder)
228 228
229 229 if not isRadarFolder(basename):
230 230 print "The folder %s has not the rigth format" %folder
231 231 return 0
232 232
233 233 if startDate and endDate:
234 234 thisDate = getDateFromRadarFolder(basename)
235 235
236 236 if thisDate < startDate:
237 237 return 0
238 238
239 239 if thisDate > endDate:
240 240 return 0
241 241
242 242 return 1
243 243
244 244 def isFileInDateRange(filename, startDate=None, endDate=None):
245 245 """
246 246 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
247 247
248 248 Inputs:
249 249 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
250 250
251 251 Su formato deberia ser "?YYYYDDDsss"
252 252
253 253 siendo:
254 254 YYYY : Anio (ejemplo 2015)
255 255 DDD : Dia del anio (ejemplo 305)
256 256 sss : set
257 257
258 258 startDate : fecha inicial del rango seleccionado en formato datetime.date
259 259
260 260 endDate : fecha final del rango seleccionado en formato datetime.date
261 261
262 262 Return:
263 263 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
264 264 fecha especificado, de lo contrario retorna False.
265 265 Excepciones:
266 266 Si el archivo no tiene el formato adecuado
267 267 """
268 268
269 269 basename = os.path.basename(filename)
270 270
271 271 if not isRadarFile(basename):
272 272 print "The filename %s has not the rigth format" %filename
273 273 return 0
274 274
275 275 if startDate and endDate:
276 276 thisDate = getDateFromRadarFile(basename)
277 277
278 278 if thisDate < startDate:
279 279 return 0
280 280
281 281 if thisDate > endDate:
282 282 return 0
283 283
284 284 return 1
285 285
286 286 def getFileFromSet(path, ext, set):
287 287 validFilelist = []
288 288 fileList = os.listdir(path)
289 289
290 290 # 0 1234 567 89A BCDE
291 291 # H YYYY DDD SSS .ext
292 292
293 293 for thisFile in fileList:
294 294 try:
295 295 year = int(thisFile[1:5])
296 296 doy = int(thisFile[5:8])
297 297 except:
298 298 continue
299 299
300 300 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
301 301 continue
302 302
303 303 validFilelist.append(thisFile)
304 304
305 305 myfile = fnmatch.filter(validFilelist,'*%4.4d%3.3d%3.3d*'%(year,doy,set))
306 306
307 307 if len(myfile)!= 0:
308 308 return myfile[0]
309 309 else:
310 310 filename = '*%4.4d%3.3d%3.3d%s'%(year,doy,set,ext.lower())
311 311 print 'the filename %s does not exist'%filename
312 312 print '...going to the last file: '
313 313
314 314 if validFilelist:
315 315 validFilelist = sorted( validFilelist, key=str.lower )
316 316 return validFilelist[-1]
317 317
318 318 return None
319 319
320 320 def getlastFileFromPath(path, ext):
321 321 """
322 322 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
323 323 al final de la depuracion devuelve el ultimo file de la lista que quedo.
324 324
325 325 Input:
326 326 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
327 327 ext : extension de los files contenidos en una carpeta
328 328
329 329 Return:
330 330 El ultimo file de una determinada carpeta, no se considera el path.
331 331 """
332 332 validFilelist = []
333 333 fileList = os.listdir(path)
334 334
335 335 # 0 1234 567 89A BCDE
336 336 # H YYYY DDD SSS .ext
337 337
338 338 for thisFile in fileList:
339 339
340 340 year = thisFile[1:5]
341 341 if not isNumber(year):
342 342 continue
343 343
344 344 doy = thisFile[5:8]
345 345 if not isNumber(doy):
346 346 continue
347 347
348 348 year = int(year)
349 349 doy = int(doy)
350 350
351 351 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
352 352 continue
353 353
354 354 validFilelist.append(thisFile)
355 355
356 356 if validFilelist:
357 357 validFilelist = sorted( validFilelist, key=str.lower )
358 358 return validFilelist[-1]
359 359
360 360 return None
361 361
362 362 def checkForRealPath(path, foldercounter, year, doy, set, ext):
363 363 """
364 364 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
365 365 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
366 366 el path exacto de un determinado file.
367 367
368 368 Example :
369 369 nombre correcto del file es .../.../D2009307/P2009307367.ext
370 370
371 371 Entonces la funcion prueba con las siguientes combinaciones
372 372 .../.../y2009307367.ext
373 373 .../.../Y2009307367.ext
374 374 .../.../x2009307/y2009307367.ext
375 375 .../.../x2009307/Y2009307367.ext
376 376 .../.../X2009307/y2009307367.ext
377 377 .../.../X2009307/Y2009307367.ext
378 378 siendo para este caso, la ultima combinacion de letras, identica al file buscado
379 379
380 380 Return:
381 381 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
382 382 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
383 383 para el filename
384 384 """
385 385 fullfilename = None
386 386 find_flag = False
387 387 filename = None
388 388
389 389 prefixDirList = [None,'d','D']
390 390 if ext.lower() == ".r": #voltage
391 391 prefixFileList = ['d','D']
392 392 elif ext.lower() == ".pdata": #spectra
393 393 prefixFileList = ['p','P']
394 394 else:
395 395 return None, filename
396 396
397 397 #barrido por las combinaciones posibles
398 398 for prefixDir in prefixDirList:
399 399 thispath = path
400 400 if prefixDir != None:
401 401 #formo el nombre del directorio xYYYYDDD (x=d o x=D)
402 402 if foldercounter == 0:
403 403 thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy ))
404 404 else:
405 405 thispath = os.path.join(path, "%s%04d%03d_%02d" % ( prefixDir, year, doy , foldercounter))
406 406 for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D"
407 407 filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
408 408 fullfilename = os.path.join( thispath, filename ) #formo el path completo
409 409
410 410 if os.path.exists( fullfilename ): #verifico que exista
411 411 find_flag = True
412 412 break
413 413 if find_flag:
414 414 break
415 415
416 416 if not(find_flag):
417 417 return None, filename
418 418
419 419 return fullfilename, filename
420 420
421 421 def isRadarFolder(folder):
422 422 try:
423 423 year = int(folder[1:5])
424 424 doy = int(folder[5:8])
425 425 except:
426 426 return 0
427 427
428 428 return 1
429 429
430 430 def isRadarFile(file):
431 431 try:
432 432 year = int(file[1:5])
433 433 doy = int(file[5:8])
434 434 set = int(file[8:11])
435 435 except:
436 436 return 0
437 437
438 438 return 1
439 439
440 440 def getDateFromRadarFile(file):
441 441 try:
442 442 year = int(file[1:5])
443 443 doy = int(file[5:8])
444 444 set = int(file[8:11])
445 445 except:
446 446 return None
447 447
448 448 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy-1)
449 449 return thisDate
450 450
451 451 def getDateFromRadarFolder(folder):
452 452 try:
453 453 year = int(folder[1:5])
454 454 doy = int(folder[5:8])
455 455 except:
456 456 return None
457 457
458 458 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy-1)
459 459 return thisDate
460 460
461 461 class JRODataIO:
462 462
463 463 c = 3E8
464 464
465 465 isConfig = False
466 466
467 467 basicHeaderObj = None
468 468
469 469 systemHeaderObj = None
470 470
471 471 radarControllerHeaderObj = None
472 472
473 473 processingHeaderObj = None
474 474
475 475 dtype = None
476 476
477 477 pathList = []
478 478
479 479 filenameList = []
480 480
481 481 filename = None
482 482
483 483 ext = None
484 484
485 485 flagIsNewFile = 1
486 486
487 487 flagDiscontinuousBlock = 0
488 488
489 489 flagIsNewBlock = 0
490 490
491 491 fp = None
492 492
493 493 firstHeaderSize = 0
494 494
495 495 basicHeaderSize = 24
496 496
497 497 versionFile = 1103
498 498
499 499 fileSize = None
500 500
501 501 # ippSeconds = None
502 502
503 503 fileSizeByHeader = None
504 504
505 505 fileIndex = None
506 506
507 507 profileIndex = None
508 508
509 509 blockIndex = None
510 510
511 511 nTotalBlocks = None
512 512
513 513 maxTimeStep = 30
514 514
515 515 lastUTTime = None
516 516
517 517 datablock = None
518 518
519 519 dataOut = None
520 520
521 521 blocksize = None
522 522
523 523 getByBlock = False
524 524
525 525 def __init__(self):
526 526
527 527 raise NotImplementedError
528 528
529 529 def run(self):
530 530
531 531 raise NotImplementedError
532 532
533 533 def getDtypeWidth(self):
534 534
535 535 dtype_index = get_dtype_index(self.dtype)
536 536 dtype_width = get_dtype_width(dtype_index)
537 537
538 538 return dtype_width
539 539
540 540 def getAllowedArgs(self):
541 541 return inspect.getargspec(self.run).args
542 542
543 543 class JRODataReader(JRODataIO):
544 544
545 545
546 546 online = 0
547 547
548 548 realtime = 0
549 549
550 550 nReadBlocks = 0
551 551
552 552 delay = 10 #number of seconds waiting a new file
553 553
554 554 nTries = 3 #quantity tries
555 555
556 556 nFiles = 3 #number of files for searching
557 557
558 558 path = None
559 559
560 560 foldercounter = 0
561 561
562 562 flagNoMoreFiles = 0
563 563
564 564 datetimeList = []
565 565
566 566 __isFirstTimeOnline = 1
567 567
568 568 __printInfo = True
569 569
570 570 profileIndex = None
571 571
572 572 nTxs = 1
573 573
574 574 txIndex = None
575 575
576 576 #Added--------------------
577 577
578 578 selBlocksize = None
579 579
580 580 selBlocktime = None
581 581
582 582
583 583 def __init__(self):
584 584
585 585 """
586 586 This class is used to find data files
587 587
588 588 Example:
589 589 reader = JRODataReader()
590 590 fileList = reader.findDataFiles()
591 591
592 592 """
593 593 pass
594 594
595 595
596 596 def createObjByDefault(self):
597 597 """
598 598
599 599 """
600 600 raise NotImplementedError
601 601
602 602 def getBlockDimension(self):
603 603
604 604 raise NotImplementedError
605 605
606 606 def __searchFilesOffLine(self,
607 607 path,
608 608 startDate=None,
609 609 endDate=None,
610 610 startTime=datetime.time(0,0,0),
611 611 endTime=datetime.time(23,59,59),
612 612 set=None,
613 613 expLabel='',
614 614 ext='.r',
615 615 queue=None,
616 616 cursor=None,
617 617 skip=None,
618 618 walk=True):
619 619
620 620 self.filenameList = []
621 621 self.datetimeList = []
622 622
623 623 pathList = []
624 624
625 625 dateList, pathList = self.findDatafiles(path, startDate, endDate, expLabel, ext, walk, include_path=True)
626 626
627 627 if dateList == []:
628 628 # print "[Reading] Date range selected invalid [%s - %s]: No *%s files in %s)" %(startDate, endDate, ext, path)
629 629 return None, None
630 630
631 631 if len(dateList) > 1:
632 632 print "[Reading] Data found for date range [%s - %s]: total days = %d" %(startDate, endDate, len(dateList))
633 633 else:
634 634 print "[Reading] Data found for date range [%s - %s]: date = %s" %(startDate, endDate, dateList[0])
635 635
636 636 filenameList = []
637 637 datetimeList = []
638 638
639 639 for thisPath in pathList:
640 640 # thisPath = pathList[pathDict[file]]
641 641
642 642 fileList = glob.glob1(thisPath, "*%s" %ext)
643 643 fileList.sort()
644 644
645 645 skippedFileList = []
646 646
647 647 if cursor is not None and skip is not None:
648 648 # if cursor*skip > len(fileList):
649 649 if skip == 0:
650 650 if queue is not None:
651 651 queue.put(len(fileList))
652 652 skippedFileList = []
653 653 else:
654 654 skippedFileList = fileList[cursor*skip: cursor*skip + skip]
655 655
656 656 else:
657 657 skippedFileList = fileList
658 658
659 659 for file in skippedFileList:
660 660
661 661 filename = os.path.join(thisPath,file)
662 662
663 663 if not isFileInDateRange(filename, startDate, endDate):
664 664 continue
665 665
666 666 thisDatetime = isFileInTimeRange(filename, startDate, endDate, startTime, endTime)
667 667
668 668 if not(thisDatetime):
669 669 continue
670 670
671 671 filenameList.append(filename)
672 672 datetimeList.append(thisDatetime)
673 673
674 674 if not(filenameList):
675 675 print "[Reading] Time range selected invalid [%s - %s]: No *%s files in %s)" %(startTime, endTime, ext, path)
676 676 return None, None
677 677
678 678 print "[Reading] %d file(s) was(were) found in time range: %s - %s" %(len(filenameList), startTime, endTime)
679 679 print
680 680
681 681 for i in range(len(filenameList)):
682 682 print "[Reading] %s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
683 683
684 684 self.filenameList = filenameList
685 685 self.datetimeList = datetimeList
686 686
687 687 return pathList, filenameList
688 688
689 689 def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True, set=None):
690 690
691 691 """
692 692 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
693 693 devuelve el archivo encontrado ademas de otros datos.
694 694
695 695 Input:
696 696 path : carpeta donde estan contenidos los files que contiene data
697 697
698 698 expLabel : Nombre del subexperimento (subfolder)
699 699
700 700 ext : extension de los files
701 701
702 702 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
703 703
704 704 Return:
705 705 directory : eL directorio donde esta el file encontrado
706 706 filename : el ultimo file de una determinada carpeta
707 707 year : el anho
708 708 doy : el numero de dia del anho
709 709 set : el set del archivo
710 710
711 711
712 712 """
713 713 if not os.path.isdir(path):
714 714 return None, None, None, None, None, None
715 715
716 716 dirList = []
717 717
718 718 if not walk:
719 719 fullpath = path
720 720 foldercounter = 0
721 721 else:
722 722 #Filtra solo los directorios
723 723 for thisPath in os.listdir(path):
724 724 if not os.path.isdir(os.path.join(path,thisPath)):
725 725 continue
726 726 if not isRadarFolder(thisPath):
727 727 continue
728 728
729 729 dirList.append(thisPath)
730 730
731 731 if not(dirList):
732 732 return None, None, None, None, None, None
733 733
734 734 dirList = sorted( dirList, key=str.lower )
735 735
736 736 doypath = dirList[-1]
737 737 foldercounter = int(doypath.split('_')[1]) if len(doypath.split('_'))>1 else 0
738 738 fullpath = os.path.join(path, doypath, expLabel)
739 739
740 740
741 741 print "[Reading] %s folder was found: " %(fullpath )
742 742
743 743 if set == None:
744 744 filename = getlastFileFromPath(fullpath, ext)
745 745 else:
746 746 filename = getFileFromSet(fullpath, ext, set)
747 747
748 748 if not(filename):
749 749 return None, None, None, None, None, None
750 750
751 751 print "[Reading] %s file was found" %(filename)
752 752
753 753 if not(self.__verifyFile(os.path.join(fullpath, filename))):
754 754 return None, None, None, None, None, None
755 755
756 756 year = int( filename[1:5] )
757 757 doy = int( filename[5:8] )
758 758 set = int( filename[8:11] )
759 759
760 760 return fullpath, foldercounter, filename, year, doy, set
761 761
762 762 def __setNextFileOffline(self):
763 763
764 764 idFile = self.fileIndex
765 765
766 766 while (True):
767 767 idFile += 1
768 768 if not(idFile < len(self.filenameList)):
769 769 self.flagNoMoreFiles = 1
770 770 # print "[Reading] No more Files"
771 771 return 0
772 772
773 773 filename = self.filenameList[idFile]
774 774
775 775 if not(self.__verifyFile(filename)):
776 776 continue
777 777
778 778 fileSize = os.path.getsize(filename)
779 779 fp = open(filename,'rb')
780 780 break
781 781
782 782 self.flagIsNewFile = 1
783 783 self.fileIndex = idFile
784 784 self.filename = filename
785 785 self.fileSize = fileSize
786 786 self.fp = fp
787 787
788 788 # print "[Reading] Setting the file: %s"%self.filename
789 789
790 790 return 1
791 791
792 792 def __setNextFileOnline(self):
793 793 """
794 794 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
795 795 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
796 796 siguientes.
797 797
798 798 Affected:
799 799 self.flagIsNewFile
800 800 self.filename
801 801 self.fileSize
802 802 self.fp
803 803 self.set
804 804 self.flagNoMoreFiles
805 805
806 806 Return:
807 807 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
808 808 1 : si el file fue abierto con exito y esta listo a ser leido
809 809
810 810 Excepciones:
811 811 Si un determinado file no puede ser abierto
812 812 """
813 813 nFiles = 0
814 814 fileOk_flag = False
815 815 firstTime_flag = True
816 816
817 817 self.set += 1
818 818
819 819 if self.set > 999:
820 820 self.set = 0
821 821 self.foldercounter += 1
822 822
823 823 #busca el 1er file disponible
824 824 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
825 825 if fullfilename:
826 826 if self.__verifyFile(fullfilename, False):
827 827 fileOk_flag = True
828 828
829 829 #si no encuentra un file entonces espera y vuelve a buscar
830 830 if not(fileOk_flag):
831 831 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
832 832
833 833 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
834 834 tries = self.nTries
835 835 else:
836 836 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
837 837
838 838 for nTries in range( tries ):
839 839 if firstTime_flag:
840 840 print "\t[Reading] Waiting %0.2f sec for the next file: \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
841 841 sleep( self.delay )
842 842 else:
843 843 print "\t[Reading] Searching the next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
844 844
845 845 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
846 846 if fullfilename:
847 847 if self.__verifyFile(fullfilename):
848 848 fileOk_flag = True
849 849 break
850 850
851 851 if fileOk_flag:
852 852 break
853 853
854 854 firstTime_flag = False
855 855
856 856 print "\t[Reading] Skipping the file \"%s\" due to this file doesn't exist" % filename
857 857 self.set += 1
858 858
859 859 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
860 860 self.set = 0
861 861 self.doy += 1
862 862 self.foldercounter = 0
863 863
864 864 if fileOk_flag:
865 865 self.fileSize = os.path.getsize( fullfilename )
866 866 self.filename = fullfilename
867 867 self.flagIsNewFile = 1
868 868 if self.fp != None: self.fp.close()
869 869 self.fp = open(fullfilename, 'rb')
870 870 self.flagNoMoreFiles = 0
871 871 # print '[Reading] Setting the file: %s' % fullfilename
872 872 else:
873 873 self.fileSize = 0
874 874 self.filename = None
875 875 self.flagIsNewFile = 0
876 876 self.fp = None
877 877 self.flagNoMoreFiles = 1
878 878 # print '[Reading] No more files to read'
879 879
880 880 return fileOk_flag
881 881
882 882 def setNextFile(self):
883 883 if self.fp != None:
884 884 self.fp.close()
885 885
886 886 if self.online:
887 887 newFile = self.__setNextFileOnline()
888 888 else:
889 889 newFile = self.__setNextFileOffline()
890 890
891 891 if not(newFile):
892 892 print '[Reading] No more files to read'
893 893 return 0
894 894
895 895 if self.verbose:
896 896 print '[Reading] Setting the file: %s' % self.filename
897 897
898 898 self.__readFirstHeader()
899 899 self.nReadBlocks = 0
900 900 return 1
901 901
902 902 def __waitNewBlock(self):
903 903 """
904 904 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
905 905
906 906 Si el modo de lectura es OffLine siempre retorn 0
907 907 """
908 908 if not self.online:
909 909 return 0
910 910
911 911 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
912 912 return 0
913 913
914 914 currentPointer = self.fp.tell()
915 915
916 916 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
917 917
918 918 for nTries in range( self.nTries ):
919 919
920 920 self.fp.close()
921 921 self.fp = open( self.filename, 'rb' )
922 922 self.fp.seek( currentPointer )
923 923
924 924 self.fileSize = os.path.getsize( self.filename )
925 925 currentSize = self.fileSize - currentPointer
926 926
927 927 if ( currentSize >= neededSize ):
928 928 self.basicHeaderObj.read(self.fp)
929 929 return 1
930 930
931 931 if self.fileSize == self.fileSizeByHeader:
932 932 # self.flagEoF = True
933 933 return 0
934 934
935 935 print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
936 936 sleep( self.delay )
937 937
938 938
939 939 return 0
940 940
941 941 def waitDataBlock(self,pointer_location):
942 942
943 943 currentPointer = pointer_location
944 944
945 945 neededSize = self.processingHeaderObj.blockSize #+ self.basicHeaderSize
946 946
947 947 for nTries in range( self.nTries ):
948 948 self.fp.close()
949 949 self.fp = open( self.filename, 'rb' )
950 950 self.fp.seek( currentPointer )
951 951
952 952 self.fileSize = os.path.getsize( self.filename )
953 953 currentSize = self.fileSize - currentPointer
954 954
955 955 if ( currentSize >= neededSize ):
956 956 return 1
957 957
958 958 print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
959 959 sleep( self.delay )
960 960
961 961 return 0
962 962
963 963 def __jumpToLastBlock(self):
964 964
965 965 if not(self.__isFirstTimeOnline):
966 966 return
967 967
968 968 csize = self.fileSize - self.fp.tell()
969 969 blocksize = self.processingHeaderObj.blockSize
970 970
971 971 #salta el primer bloque de datos
972 972 if csize > self.processingHeaderObj.blockSize:
973 973 self.fp.seek(self.fp.tell() + blocksize)
974 974 else:
975 975 return
976 976
977 977 csize = self.fileSize - self.fp.tell()
978 978 neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
979 979 while True:
980 980
981 981 if self.fp.tell()<self.fileSize:
982 982 self.fp.seek(self.fp.tell() + neededsize)
983 983 else:
984 984 self.fp.seek(self.fp.tell() - neededsize)
985 985 break
986 986
987 987 # csize = self.fileSize - self.fp.tell()
988 988 # neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
989 989 # factor = int(csize/neededsize)
990 990 # if factor > 0:
991 991 # self.fp.seek(self.fp.tell() + factor*neededsize)
992 992
993 993 self.flagIsNewFile = 0
994 994 self.__isFirstTimeOnline = 0
995 995
996 996 def __setNewBlock(self):
997 997 #if self.server is None:
998 998 if self.fp == None:
999 999 return 0
1000 1000
1001 1001 # if self.online:
1002 1002 # self.__jumpToLastBlock()
1003 print 'xxxx'
1004 1003
1005 1004 if self.flagIsNewFile:
1006 1005 self.lastUTTime = self.basicHeaderObj.utc
1007 1006 return 1
1008 1007
1009 1008 if self.realtime:
1010 1009 self.flagDiscontinuousBlock = 1
1011 1010 if not(self.setNextFile()):
1012 1011 return 0
1013 1012 else:
1014 1013 return 1
1015 print 'xxxx'
1016 1014 #if self.server is None:
1017 1015 currentSize = self.fileSize - self.fp.tell()
1018 1016 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
1019 1017 if (currentSize >= neededSize):
1020 1018 self.basicHeaderObj.read(self.fp)
1021 1019 self.lastUTTime = self.basicHeaderObj.utc
1022 1020 return 1
1023 1021 # else:
1024 1022 # self.basicHeaderObj.read(self.zHeader)
1025 1023 # self.lastUTTime = self.basicHeaderObj.utc
1026 1024 # return 1
1027 1025 if self.__waitNewBlock():
1028 1026 self.lastUTTime = self.basicHeaderObj.utc
1029 1027 return 1
1030 1028 #if self.server is None:
1031 1029 if not(self.setNextFile()):
1032 1030 return 0
1033 1031
1034 1032 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
1035 1033 self.lastUTTime = self.basicHeaderObj.utc
1036 1034
1037 1035 self.flagDiscontinuousBlock = 0
1038 1036
1039 1037 if deltaTime > self.maxTimeStep:
1040 1038 self.flagDiscontinuousBlock = 1
1041 1039
1042 1040 return 1
1043 1041
1044 1042 def readNextBlock(self):
1045 1043
1046 1044 #Skip block out of startTime and endTime
1047 1045 while True:
1048 print 'cxxxx'
1049 1046 if not(self.__setNewBlock()):
1050 1047 print 'returning'
1051 1048 return 0
1052 print 'dxxx'
1053 1049 if not(self.readBlock()):
1054 1050 return 0
1055 1051
1056 1052 self.getBasicHeader()
1057 1053
1058 1054 if not isTimeInRange(self.dataOut.datatime.time(), self.startTime, self.endTime):
1059 1055
1060 1056 print "[Reading] Block No. %d/%d -> %s [Skipping]" %(self.nReadBlocks,
1061 1057 self.processingHeaderObj.dataBlocksPerFile,
1062 1058 self.dataOut.datatime.ctime())
1063 1059 continue
1064 1060
1065 1061 break
1066 1062
1067 1063 if self.verbose:
1068 1064 print "[Reading] Block No. %d/%d -> %s" %(self.nReadBlocks,
1069 1065 self.processingHeaderObj.dataBlocksPerFile,
1070 1066 self.dataOut.datatime.ctime())
1071 1067 return 1
1072 1068
1073 1069 def __readFirstHeader(self):
1074 1070
1075 1071 self.basicHeaderObj.read(self.fp)
1076 1072 self.systemHeaderObj.read(self.fp)
1077 1073 self.radarControllerHeaderObj.read(self.fp)
1078 1074 self.processingHeaderObj.read(self.fp)
1079 1075
1080 1076 self.firstHeaderSize = self.basicHeaderObj.size
1081 1077
1082 1078 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
1083 1079 if datatype == 0:
1084 1080 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
1085 1081 elif datatype == 1:
1086 1082 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
1087 1083 elif datatype == 2:
1088 1084 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
1089 1085 elif datatype == 3:
1090 1086 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
1091 1087 elif datatype == 4:
1092 1088 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
1093 1089 elif datatype == 5:
1094 1090 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
1095 1091 else:
1096 1092 raise ValueError, 'Data type was not defined'
1097 1093
1098 1094 self.dtype = datatype_str
1099 1095 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
1100 1096 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
1101 1097 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
1102 1098 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
1103 1099 self.getBlockDimension()
1104 1100
1105 1101 def __verifyFile(self, filename, msgFlag=True):
1106 1102
1107 1103 msg = None
1108 1104
1109 1105 try:
1110 1106 fp = open(filename, 'rb')
1111 1107 except IOError:
1112 1108
1113 1109 if msgFlag:
1114 1110 print "[Reading] File %s can't be opened" % (filename)
1115 1111
1116 1112 return False
1117 1113
1118 1114 currentPosition = fp.tell()
1119 1115 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
1120 1116
1121 1117 if neededSize == 0:
1122 1118 basicHeaderObj = BasicHeader(LOCALTIME)
1123 1119 systemHeaderObj = SystemHeader()
1124 1120 radarControllerHeaderObj = RadarControllerHeader()
1125 1121 processingHeaderObj = ProcessingHeader()
1126 1122
1127 1123 if not( basicHeaderObj.read(fp) ):
1128 1124 fp.close()
1129 1125 return False
1130 1126
1131 1127 if not( systemHeaderObj.read(fp) ):
1132 1128 fp.close()
1133 1129 return False
1134 1130
1135 1131 if not( radarControllerHeaderObj.read(fp) ):
1136 1132 fp.close()
1137 1133 return False
1138 1134
1139 1135 if not( processingHeaderObj.read(fp) ):
1140 1136 fp.close()
1141 1137 return False
1142 1138
1143 1139 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
1144 1140 else:
1145 1141 msg = "[Reading] Skipping the file %s due to it hasn't enough data" %filename
1146 1142
1147 1143 fp.close()
1148 1144
1149 1145 fileSize = os.path.getsize(filename)
1150 1146 currentSize = fileSize - currentPosition
1151 1147
1152 1148 if currentSize < neededSize:
1153 1149 if msgFlag and (msg != None):
1154 1150 print msg
1155 1151 return False
1156 1152
1157 1153 return True
1158 1154
1159 1155 def findDatafiles(self, path, startDate=None, endDate=None, expLabel='', ext='.r', walk=True, include_path=False):
1160 1156
1161 1157 path_empty = True
1162 1158
1163 1159 dateList = []
1164 1160 pathList = []
1165 1161
1166 1162 multi_path = path.split(',')
1167 1163
1168 1164 if not walk:
1169 1165
1170 1166 for single_path in multi_path:
1171 1167
1172 1168 if not os.path.isdir(single_path):
1173 1169 continue
1174 1170
1175 1171 fileList = glob.glob1(single_path, "*"+ext)
1176 1172
1177 1173 if not fileList:
1178 1174 continue
1179 1175
1180 1176 path_empty = False
1181 1177
1182 1178 fileList.sort()
1183 1179
1184 1180 for thisFile in fileList:
1185 1181
1186 1182 if not os.path.isfile(os.path.join(single_path, thisFile)):
1187 1183 continue
1188 1184
1189 1185 if not isRadarFile(thisFile):
1190 1186 continue
1191 1187
1192 1188 if not isFileInDateRange(thisFile, startDate, endDate):
1193 1189 continue
1194 1190
1195 1191 thisDate = getDateFromRadarFile(thisFile)
1196 1192
1197 1193 if thisDate in dateList:
1198 1194 continue
1199 1195
1200 1196 dateList.append(thisDate)
1201 1197 pathList.append(single_path)
1202 1198
1203 1199 else:
1204 1200 for single_path in multi_path:
1205 1201
1206 1202 if not os.path.isdir(single_path):
1207 1203 continue
1208 1204
1209 1205 dirList = []
1210 1206
1211 1207 for thisPath in os.listdir(single_path):
1212 1208
1213 1209 if not os.path.isdir(os.path.join(single_path,thisPath)):
1214 1210 continue
1215 1211
1216 1212 if not isRadarFolder(thisPath):
1217 1213 continue
1218 1214
1219 1215 if not isFolderInDateRange(thisPath, startDate, endDate):
1220 1216 continue
1221 1217
1222 1218 dirList.append(thisPath)
1223 1219
1224 1220 if not dirList:
1225 1221 continue
1226 1222
1227 1223 dirList.sort()
1228 1224
1229 1225 for thisDir in dirList:
1230 1226
1231 1227 datapath = os.path.join(single_path, thisDir, expLabel)
1232 1228 fileList = glob.glob1(datapath, "*"+ext)
1233 1229
1234 1230 if not fileList:
1235 1231 continue
1236 1232
1237 1233 path_empty = False
1238 1234
1239 1235 thisDate = getDateFromRadarFolder(thisDir)
1240 1236
1241 1237 pathList.append(datapath)
1242 1238 dateList.append(thisDate)
1243 1239
1244 1240 dateList.sort()
1245 1241
1246 1242 if walk:
1247 1243 pattern_path = os.path.join(multi_path[0], "[dYYYYDDD]", expLabel)
1248 1244 else:
1249 1245 pattern_path = multi_path[0]
1250 1246
1251 1247 if path_empty:
1252 1248 print "[Reading] No *%s files in %s for %s to %s" %(ext, pattern_path, startDate, endDate)
1253 1249 else:
1254 1250 if not dateList:
1255 1251 print "[Reading] Date range selected invalid [%s - %s]: No *%s files in %s)" %(startDate, endDate, ext, path)
1256 1252
1257 1253 if include_path:
1258 1254 return dateList, pathList
1259 1255
1260 1256 return dateList
1261 1257
1262 1258 def setup(self,
1263 1259 path=None,
1264 1260 startDate=None,
1265 1261 endDate=None,
1266 1262 startTime=datetime.time(0,0,0),
1267 1263 endTime=datetime.time(23,59,59),
1268 1264 set=None,
1269 1265 expLabel = "",
1270 1266 ext = None,
1271 1267 online = False,
1272 1268 delay = 60,
1273 1269 walk = True,
1274 1270 getblock = False,
1275 1271 nTxs = 1,
1276 1272 realtime=False,
1277 1273 blocksize=None,
1278 1274 blocktime=None,
1279 1275 queue=None,
1280 1276 skip=None,
1281 1277 cursor=None,
1282 1278 warnings=True,
1283 1279 verbose=True,
1284 1280 server=None):
1285 1281 if server is not None:
1286 1282 if 'tcp://' in server:
1287 1283 address = server
1288 1284 else:
1289 1285 address = 'ipc:///tmp/%s' % server
1290 1286 self.server = address
1291 1287 self.context = zmq.Context()
1292 1288 self.receiver = self.context.socket(zmq.PULL)
1293 1289 self.receiver.connect(self.server)
1294 1290 time.sleep(0.5)
1295 1291 print '[Starting] ReceiverData from {}'.format(self.server)
1296 1292 else:
1297 1293 self.server = None
1298 1294 if path == None:
1299 1295 raise ValueError, "[Reading] The path is not valid"
1300 1296
1301 1297 if ext == None:
1302 1298 ext = self.ext
1303 1299
1304 1300 if online:
1305 1301 print "[Reading] Searching files in online mode..."
1306 1302
1307 1303 for nTries in range( self.nTries ):
1308 1304 fullpath, foldercounter, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk, set=set)
1309 1305
1310 1306 if fullpath:
1311 1307 break
1312 1308
1313 1309 print '[Reading] Waiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
1314 1310 sleep( self.delay )
1315 1311
1316 1312 if not(fullpath):
1317 1313 print "[Reading] There 'isn't any valid file in %s" % path
1318 1314 return
1319 1315
1320 1316 self.year = year
1321 1317 self.doy = doy
1322 1318 self.set = set - 1
1323 1319 self.path = path
1324 1320 self.foldercounter = foldercounter
1325 1321 last_set = None
1326 1322 else:
1327 1323 print "[Reading] Searching files in offline mode ..."
1328 1324 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
1329 1325 startTime=startTime, endTime=endTime,
1330 1326 set=set, expLabel=expLabel, ext=ext,
1331 1327 walk=walk, cursor=cursor,
1332 1328 skip=skip, queue=queue)
1333 1329
1334 1330 if not(pathList):
1335 1331 # print "[Reading] No *%s files in %s (%s - %s)"%(ext, path,
1336 1332 # datetime.datetime.combine(startDate,startTime).ctime(),
1337 1333 # datetime.datetime.combine(endDate,endTime).ctime())
1338 1334
1339 1335 # sys.exit(-1)
1340 1336
1341 1337 self.fileIndex = -1
1342 1338 self.pathList = []
1343 1339 self.filenameList = []
1344 1340 return
1345 1341
1346 1342 self.fileIndex = -1
1347 1343 self.pathList = pathList
1348 1344 self.filenameList = filenameList
1349 1345 file_name = os.path.basename(filenameList[-1])
1350 1346 basename, ext = os.path.splitext(file_name)
1351 1347 last_set = int(basename[-3:])
1352 1348
1353 1349 self.online = online
1354 1350 self.realtime = realtime
1355 1351 self.delay = delay
1356 1352 ext = ext.lower()
1357 1353 self.ext = ext
1358 1354 self.getByBlock = getblock
1359 1355 self.nTxs = nTxs
1360 1356 self.startTime = startTime
1361 1357 self.endTime = endTime
1362 1358
1363 1359 #Added-----------------
1364 1360 self.selBlocksize = blocksize
1365 1361 self.selBlocktime = blocktime
1366 1362
1367 1363 # Verbose-----------
1368 1364 self.verbose = verbose
1369 1365 self.warnings = warnings
1370 1366
1371 1367 if not(self.setNextFile()):
1372 1368 if (startDate!=None) and (endDate!=None):
1373 1369 print "[Reading] No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
1374 1370 elif startDate != None:
1375 1371 print "[Reading] No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
1376 1372 else:
1377 1373 print "[Reading] No files"
1378 1374
1379 1375 self.fileIndex = -1
1380 1376 self.pathList = []
1381 1377 self.filenameList = []
1382 1378 return
1383 1379
1384 1380 # self.getBasicHeader()
1385 1381
1386 1382 if last_set != None:
1387 1383 self.dataOut.last_block = last_set * self.processingHeaderObj.dataBlocksPerFile + self.basicHeaderObj.dataBlock
1388 1384 return
1389 1385
1390 1386 def getBasicHeader(self):
1391 1387
1392 1388 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds
1393 1389
1394 1390 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
1395 1391
1396 1392 self.dataOut.timeZone = self.basicHeaderObj.timeZone
1397 1393
1398 1394 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
1399 1395
1400 1396 self.dataOut.errorCount = self.basicHeaderObj.errorCount
1401 1397
1402 1398 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
1403 1399
1404 1400 self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds/self.nTxs
1405 1401
1406 1402 # self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock*self.nTxs
1407 1403
1408 1404
1409 1405 def getFirstHeader(self):
1410 1406
1411 1407 raise NotImplementedError
1412 1408
1413 1409 def getData(self):
1414 1410
1415 1411 raise NotImplementedError
1416 1412
1417 1413 def hasNotDataInBuffer(self):
1418 1414
1419 1415 raise NotImplementedError
1420 1416
1421 1417 def readBlock(self):
1422 1418
1423 1419 raise NotImplementedError
1424 1420
1425 1421 def isEndProcess(self):
1426 1422
1427 1423 return self.flagNoMoreFiles
1428 1424
1429 1425 def printReadBlocks(self):
1430 1426
1431 1427 print "[Reading] Number of read blocks per file %04d" %self.nReadBlocks
1432 1428
1433 1429 def printTotalBlocks(self):
1434 1430
1435 1431 print "[Reading] Number of read blocks %04d" %self.nTotalBlocks
1436 1432
1437 1433 def printNumberOfBlock(self):
1438 1434
1439 1435 if self.flagIsNewBlock:
1440 1436 print "[Reading] Block No. %d/%d -> %s" %(self.nReadBlocks,
1441 1437 self.processingHeaderObj.dataBlocksPerFile,
1442 1438 self.dataOut.datatime.ctime())
1443 1439
1444 1440 def printInfo(self):
1445 1441
1446 1442 if self.__printInfo == False:
1447 1443 return
1448 1444
1449 1445 self.basicHeaderObj.printInfo()
1450 1446 self.systemHeaderObj.printInfo()
1451 1447 self.radarControllerHeaderObj.printInfo()
1452 1448 self.processingHeaderObj.printInfo()
1453 1449
1454 1450 self.__printInfo = False
1455 1451
1456 1452
1457 1453 def run(self,
1458 1454 path=None,
1459 1455 startDate=None,
1460 1456 endDate=None,
1461 1457 startTime=datetime.time(0,0,0),
1462 1458 endTime=datetime.time(23,59,59),
1463 1459 set=None,
1464 1460 expLabel = "",
1465 1461 ext = None,
1466 1462 online = False,
1467 1463 delay = 60,
1468 1464 walk = True,
1469 1465 getblock = False,
1470 1466 nTxs = 1,
1471 1467 realtime=False,
1472 1468 blocksize=None,
1473 1469 blocktime=None,
1474 1470 queue=None,
1475 1471 skip=None,
1476 1472 cursor=None,
1477 1473 warnings=True,
1478 1474 server=None,
1479 1475 verbose=True, **kwargs):
1480 1476
1481 1477 if not(self.isConfig):
1482 1478 # self.dataOut = dataOut
1483 1479 self.setup( path=path,
1484 1480 startDate=startDate,
1485 1481 endDate=endDate,
1486 1482 startTime=startTime,
1487 1483 endTime=endTime,
1488 1484 set=set,
1489 1485 expLabel=expLabel,
1490 1486 ext=ext,
1491 1487 online=online,
1492 1488 delay=delay,
1493 1489 walk=walk,
1494 1490 getblock=getblock,
1495 1491 nTxs=nTxs,
1496 1492 realtime=realtime,
1497 1493 blocksize=blocksize,
1498 1494 blocktime=blocktime,
1499 1495 queue=queue,
1500 1496 skip=skip,
1501 1497 cursor=cursor,
1502 1498 warnings=warnings,
1503 1499 server=server,
1504 1500 verbose=verbose)
1505 1501 self.isConfig = True
1506 1502 if server is None:
1507 1503 self.getData()
1508 1504 else:
1509 1505 self.getFromServer()
1510 1506
1511 1507 class JRODataWriter(JRODataIO):
1512 1508
1513 1509 """
1514 1510 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
1515 1511 de los datos siempre se realiza por bloques.
1516 1512 """
1517 1513
1518 1514 blockIndex = 0
1519 1515
1520 1516 path = None
1521 1517
1522 1518 setFile = None
1523 1519
1524 1520 profilesPerBlock = None
1525 1521
1526 1522 blocksPerFile = None
1527 1523
1528 1524 nWriteBlocks = 0
1529 1525
1530 1526 fileDate = None
1531 1527
1532 1528 def __init__(self, dataOut=None):
1533 1529 raise NotImplementedError
1534 1530
1535 1531
1536 1532 def hasAllDataInBuffer(self):
1537 1533 raise NotImplementedError
1538 1534
1539 1535
1540 1536 def setBlockDimension(self):
1541 1537 raise NotImplementedError
1542 1538
1543 1539
1544 1540 def writeBlock(self):
1545 1541 raise NotImplementedError
1546 1542
1547 1543
1548 1544 def putData(self):
1549 1545 raise NotImplementedError
1550 1546
1551 1547
1552 1548 def getProcessFlags(self):
1553 1549
1554 1550 processFlags = 0
1555 1551
1556 1552 dtype_index = get_dtype_index(self.dtype)
1557 1553 procflag_dtype = get_procflag_dtype(dtype_index)
1558 1554
1559 1555 processFlags += procflag_dtype
1560 1556
1561 1557 if self.dataOut.flagDecodeData:
1562 1558 processFlags += PROCFLAG.DECODE_DATA
1563 1559
1564 1560 if self.dataOut.flagDeflipData:
1565 1561 processFlags += PROCFLAG.DEFLIP_DATA
1566 1562
1567 1563 if self.dataOut.code is not None:
1568 1564 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1569 1565
1570 1566 if self.dataOut.nCohInt > 1:
1571 1567 processFlags += PROCFLAG.COHERENT_INTEGRATION
1572 1568
1573 1569 if self.dataOut.type == "Spectra":
1574 1570 if self.dataOut.nIncohInt > 1:
1575 1571 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
1576 1572
1577 1573 if self.dataOut.data_dc is not None:
1578 1574 processFlags += PROCFLAG.SAVE_CHANNELS_DC
1579 1575
1580 1576 if self.dataOut.flagShiftFFT:
1581 1577 processFlags += PROCFLAG.SHIFT_FFT_DATA
1582 1578
1583 1579 return processFlags
1584 1580
1585 1581 def setBasicHeader(self):
1586 1582
1587 1583 self.basicHeaderObj.size = self.basicHeaderSize #bytes
1588 1584 self.basicHeaderObj.version = self.versionFile
1589 1585 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1590 1586
1591 1587 utc = numpy.floor(self.dataOut.utctime)
1592 1588 milisecond = (self.dataOut.utctime - utc)* 1000.0
1593 1589
1594 1590 self.basicHeaderObj.utc = utc
1595 1591 self.basicHeaderObj.miliSecond = milisecond
1596 1592 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1597 1593 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1598 1594 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1599 1595
1600 1596 def setFirstHeader(self):
1601 1597 """
1602 1598 Obtiene una copia del First Header
1603 1599
1604 1600 Affected:
1605 1601
1606 1602 self.basicHeaderObj
1607 1603 self.systemHeaderObj
1608 1604 self.radarControllerHeaderObj
1609 1605 self.processingHeaderObj self.
1610 1606
1611 1607 Return:
1612 1608 None
1613 1609 """
1614 1610
1615 1611 raise NotImplementedError
1616 1612
1617 1613 def __writeFirstHeader(self):
1618 1614 """
1619 1615 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1620 1616
1621 1617 Affected:
1622 1618 __dataType
1623 1619
1624 1620 Return:
1625 1621 None
1626 1622 """
1627 1623
1628 1624 # CALCULAR PARAMETROS
1629 1625
1630 1626 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1631 1627 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1632 1628
1633 1629 self.basicHeaderObj.write(self.fp)
1634 1630 self.systemHeaderObj.write(self.fp)
1635 1631 self.radarControllerHeaderObj.write(self.fp)
1636 1632 self.processingHeaderObj.write(self.fp)
1637 1633
1638 1634 def __setNewBlock(self):
1639 1635 """
1640 1636 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1641 1637
1642 1638 Return:
1643 1639 0 : si no pudo escribir nada
1644 1640 1 : Si escribio el Basic el First Header
1645 1641 """
1646 1642 if self.fp == None:
1647 1643 self.setNextFile()
1648 1644
1649 1645 if self.flagIsNewFile:
1650 1646 return 1
1651 1647
1652 1648 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1653 1649 self.basicHeaderObj.write(self.fp)
1654 1650 return 1
1655 1651
1656 1652 if not( self.setNextFile() ):
1657 1653 return 0
1658 1654
1659 1655 return 1
1660 1656
1661 1657
1662 1658 def writeNextBlock(self):
1663 1659 """
1664 1660 Selecciona el bloque siguiente de datos y los escribe en un file
1665 1661
1666 1662 Return:
1667 1663 0 : Si no hizo pudo escribir el bloque de datos
1668 1664 1 : Si no pudo escribir el bloque de datos
1669 1665 """
1670 1666 if not( self.__setNewBlock() ):
1671 1667 return 0
1672 1668
1673 1669 self.writeBlock()
1674 1670
1675 1671 print "[Writing] Block No. %d/%d" %(self.blockIndex,
1676 1672 self.processingHeaderObj.dataBlocksPerFile)
1677 1673
1678 1674 return 1
1679 1675
1680 1676 def setNextFile(self):
1681 1677 """
1682 1678 Determina el siguiente file que sera escrito
1683 1679
1684 1680 Affected:
1685 1681 self.filename
1686 1682 self.subfolder
1687 1683 self.fp
1688 1684 self.setFile
1689 1685 self.flagIsNewFile
1690 1686
1691 1687 Return:
1692 1688 0 : Si el archivo no puede ser escrito
1693 1689 1 : Si el archivo esta listo para ser escrito
1694 1690 """
1695 1691 ext = self.ext
1696 1692 path = self.path
1697 1693
1698 1694 if self.fp != None:
1699 1695 self.fp.close()
1700 1696
1701 1697 timeTuple = time.localtime( self.dataOut.utctime)
1702 1698 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1703 1699
1704 1700 fullpath = os.path.join( path, subfolder )
1705 1701 setFile = self.setFile
1706 1702
1707 1703 if not( os.path.exists(fullpath) ):
1708 1704 os.mkdir(fullpath)
1709 1705 setFile = -1 #inicializo mi contador de seteo
1710 1706 else:
1711 1707 filesList = os.listdir( fullpath )
1712 1708 if len( filesList ) > 0:
1713 1709 filesList = sorted( filesList, key=str.lower )
1714 1710 filen = filesList[-1]
1715 1711 # el filename debera tener el siguiente formato
1716 1712 # 0 1234 567 89A BCDE (hex)
1717 1713 # x YYYY DDD SSS .ext
1718 1714 if isNumber( filen[8:11] ):
1719 1715 setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1720 1716 else:
1721 1717 setFile = -1
1722 1718 else:
1723 1719 setFile = -1 #inicializo mi contador de seteo
1724 1720
1725 1721 setFile += 1
1726 1722
1727 1723 #If this is a new day it resets some values
1728 1724 if self.dataOut.datatime.date() > self.fileDate:
1729 1725 setFile = 0
1730 1726 self.nTotalBlocks = 0
1731 1727
1732 1728 filen = '%s%4.4d%3.3d%3.3d%s' % (self.optchar, timeTuple.tm_year, timeTuple.tm_yday, setFile, ext )
1733 1729
1734 1730 filename = os.path.join( path, subfolder, filen )
1735 1731
1736 1732 fp = open( filename,'wb' )
1737 1733
1738 1734 self.blockIndex = 0
1739 1735
1740 1736 #guardando atributos
1741 1737 self.filename = filename
1742 1738 self.subfolder = subfolder
1743 1739 self.fp = fp
1744 1740 self.setFile = setFile
1745 1741 self.flagIsNewFile = 1
1746 1742 self.fileDate = self.dataOut.datatime.date()
1747 1743
1748 1744 self.setFirstHeader()
1749 1745
1750 1746 print '[Writing] Opening file: %s'%self.filename
1751 1747
1752 1748 self.__writeFirstHeader()
1753 1749
1754 1750 return 1
1755 1751
1756 1752 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=None, ext=None, datatype=4):
1757 1753 """
1758 1754 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1759 1755
1760 1756 Inputs:
1761 1757 path : directory where data will be saved
1762 1758 profilesPerBlock : number of profiles per block
1763 1759 set : initial file set
1764 1760 datatype : An integer number that defines data type:
1765 1761 0 : int8 (1 byte)
1766 1762 1 : int16 (2 bytes)
1767 1763 2 : int32 (4 bytes)
1768 1764 3 : int64 (8 bytes)
1769 1765 4 : float32 (4 bytes)
1770 1766 5 : double64 (8 bytes)
1771 1767
1772 1768 Return:
1773 1769 0 : Si no realizo un buen seteo
1774 1770 1 : Si realizo un buen seteo
1775 1771 """
1776 1772
1777 1773 if ext == None:
1778 1774 ext = self.ext
1779 1775
1780 1776 self.ext = ext.lower()
1781 1777
1782 1778 self.path = path
1783 1779
1784 1780 if set is None:
1785 1781 self.setFile = -1
1786 1782 else:
1787 1783 self.setFile = set - 1
1788 1784
1789 1785 self.blocksPerFile = blocksPerFile
1790 1786
1791 1787 self.profilesPerBlock = profilesPerBlock
1792 1788
1793 1789 self.dataOut = dataOut
1794 1790 self.fileDate = self.dataOut.datatime.date()
1795 1791 #By default
1796 1792 self.dtype = self.dataOut.dtype
1797 1793
1798 1794 if datatype is not None:
1799 1795 self.dtype = get_numpy_dtype(datatype)
1800 1796
1801 1797 if not(self.setNextFile()):
1802 1798 print "[Writing] There isn't a next file"
1803 1799 return 0
1804 1800
1805 1801 self.setBlockDimension()
1806 1802
1807 1803 return 1
1808 1804
1809 1805 def run(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=None, ext=None, datatype=4, **kwargs):
1810 1806
1811 1807 if not(self.isConfig):
1812 1808
1813 1809 self.setup(dataOut, path, blocksPerFile, profilesPerBlock=profilesPerBlock, set=set, ext=ext, datatype=datatype, **kwargs)
1814 1810 self.isConfig = True
1815 1811
1816 1812 self.putData()
@@ -1,605 +1,631
1
1 2 '''
2 3 Created on Jul 3, 2014
3 4
4 5 @author: roj-idl71
5 6 '''
6 7 import os
7 8 import datetime
8 9 import numpy
10 from fractions import Fraction
9 11
10 12 try:
11 13 from gevent import sleep
12 14 except:
13 15 from time import sleep
14 16
15 17 from schainpy.model.data.jroheaderIO import RadarControllerHeader, SystemHeader
16 18 from schainpy.model.data.jrodata import Voltage
17 19 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
18 20
19 21 try:
20 22 import digital_rf
21 23 except:
22 24 print 'You should install "digital_rf" module if you want to read Digital RF data'
23 25
24 26 class DigitalRFReader(ProcessingUnit):
25 27 '''
26 28 classdocs
27 29 '''
28 30
29 31 def __init__(self, **kwargs):
30 32 '''
31 33 Constructor
32 34 '''
33 35
34 36 ProcessingUnit.__init__(self, **kwargs)
35 37
36 38 self.dataOut = Voltage()
37 39 self.__printInfo = True
38 40 self.__flagDiscontinuousBlock = False
39 41 self.__bufferIndex = 9999999
40 42
41 43 self.__ippKm = None
42 44 self.__codeType = 0
43 45 self.__nCode = None
44 46 self.__nBaud = None
45 47 self.__code = None
46 48
47 49 def __getCurrentSecond(self):
48 50
49 51 return self.__thisUnixSample/self.__sample_rate
50 52
51 53 thisSecond = property(__getCurrentSecond, "I'm the 'thisSecond' property.")
52 54
53 55 def __setFileHeader(self):
54 56 '''
55 57 In this method will be initialized every parameter of dataOut object (header, no data)
56 58 '''
57 59 ippSeconds = 1.0*self.__nSamples/self.__sample_rate
58 60
59 61 nProfiles = 1.0/ippSeconds #Number of profiles in one second
60 62
61 63 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(ippKm=self.__ippKm,
62 64 txA=0,
63 65 txB=0,
64 66 nWindows=1,
65 67 nHeights=self.__nSamples,
66 68 firstHeight=self.__firstHeigth,
67 69 deltaHeight=self.__deltaHeigth,
68 70 codeType=self.__codeType,
69 71 nCode=self.__nCode, nBaud=self.__nBaud,
70 72 code = self.__code)
71 73
72 74 self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples,
73 75 nProfiles=nProfiles,
74 76 nChannels=len(self.__channelList),
75 77 adcResolution=14)
76 78
77 79 self.dataOut.type = "Voltage"
78 80
79 81 self.dataOut.data = None
80 82
81 83 self.dataOut.dtype = numpy.dtype([('real','<i8'),('imag','<i8')])
82 84
83 85 # self.dataOut.nChannels = 0
84 86
85 87 # self.dataOut.nHeights = 0
86 88
87 89 self.dataOut.nProfiles = nProfiles
88 90
89 91 self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype = numpy.float)*self.__deltaHeigth
90 92
91 93 self.dataOut.channelList = self.__channelList
92 94
93 95 self.dataOut.blocksize = self.dataOut.getNChannels() * self.dataOut.getNHeights()
94 96
95 97 # self.dataOut.channelIndexList = None
96 98
97 99 self.dataOut.flagNoData = True
98 100
99 101 #Set to TRUE if the data is discontinuous
100 102 self.dataOut.flagDiscontinuousBlock = False
101 103
102 104 self.dataOut.utctime = None
103 105
104 106 self.dataOut.timeZone = self.__timezone/60 #timezone like jroheader, difference in minutes between UTC and localtime
105 107
106 108 self.dataOut.dstFlag = 0
107 109
108 110 self.dataOut.errorCount = 0
109 111
110 112 self.dataOut.nCohInt = 1
111 113
112 114 self.dataOut.flagDecodeData = False #asumo que la data esta decodificada
113 115
114 116 self.dataOut.flagDeflipData = False #asumo que la data esta sin flip
115 117
116 118 self.dataOut.flagShiftFFT = False
117 119
118 120 self.dataOut.ippSeconds = ippSeconds
119 121
120 122 #Time interval between profiles
121 123 #self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
122 124
123 125 self.dataOut.frequency = self.__frequency
124 126
125 127 self.dataOut.realtime = self.__online
126 128
127 129 def findDatafiles(self, path, startDate=None, endDate=None):
128 130
129 131 if not os.path.isdir(path):
130 132 return []
131 133
132 134 try:
133 135 digitalReadObj = digital_rf.DigitalRFReader(path, load_all_metadata=True)
134 136 except:
135 137 digitalReadObj = digital_rf.DigitalRFReader(path)
136 138
137 139 channelNameList = digitalReadObj.get_channels()
138 140
139 141 if not channelNameList:
140 142 return []
141 143
142 144 metadata_dict = digitalReadObj.get_rf_file_metadata(channelNameList[0])
143 145
144 146 sample_rate = metadata_dict['sample_rate'][0]
145 147
146 148 this_metadata_file = digitalReadObj.get_metadata(channelNameList[0])
147 149
148 150 try:
149 151 timezone = this_metadata_file['timezone'].value
150 152 except:
151 153 timezone = 0
152 154
153 155 startUTCSecond, endUTCSecond = digitalReadObj.get_bounds(channelNameList[0])/sample_rate - timezone
154 156
155 157 startDatetime = datetime.datetime.utcfromtimestamp(startUTCSecond)
156 158 endDatatime = datetime.datetime.utcfromtimestamp(endUTCSecond)
157 159
158 160 if not startDate:
159 161 startDate = startDatetime.date()
160 162
161 163 if not endDate:
162 164 endDate = endDatatime.date()
163 165
164 166 dateList = []
165 167
166 168 thisDatetime = startDatetime
167 169
168 170 while(thisDatetime<=endDatatime):
169 171
170 172 thisDate = thisDatetime.date()
171 173
172 174 if thisDate < startDate:
173 175 continue
174 176
175 177 if thisDate > endDate:
176 178 break
177 179
178 180 dateList.append(thisDate)
179 181 thisDatetime += datetime.timedelta(1)
180 182
181 183 return dateList
182 184
183 185 def setup(self, path = None,
184 186 startDate = None,
185 187 endDate = None,
186 188 startTime = datetime.time(0,0,0),
187 189 endTime = datetime.time(23,59,59),
188 190 channelList = None,
189 191 nSamples = None,
190 192 ippKm = 60,
191 193 online = False,
192 194 delay = 60,
193 195 buffer_size = 1024,
194 196 **kwargs):
195 197 '''
196 198 In this method we should set all initial parameters.
197 199
198 200 Inputs:
199 201 path
200 202 startDate
201 203 endDate
202 204 startTime
203 205 endTime
204 206 set
205 207 expLabel
206 208 ext
207 209 online
208 210 delay
209 211 '''
210 212
211 213 if not os.path.isdir(path):
212 214 raise ValueError, "[Reading] Directory %s does not exist" %path
213 215
214 216 try:
215 217 self.digitalReadObj = digital_rf.DigitalRFReader(path, load_all_metadata=True)
216 218 except:
217 219 self.digitalReadObj = digital_rf.DigitalRFReader(path)
218 220
219 221 channelNameList = self.digitalReadObj.get_channels()
220 222
221 223 if not channelNameList:
222 224 raise ValueError, "[Reading] Directory %s does not have any files" %path
223 225
224 226 if not channelList:
225 227 channelList = range(len(channelNameList))
226 228
227 229 ########## Reading metadata ######################
228 230
229 231 metadata_dict = self.digitalReadObj.get_properties(channelNameList[channelList[0]])
230 232
231 233 self.__sample_rate = metadata_dict['sample_rate_numerator'] / metadata_dict['sample_rate_denominator']
232 234 # self.__samples_per_file = metadata_dict['samples_per_file'][0]
233 235 self.__deltaHeigth = 1e6*0.15/self.__sample_rate ## why 0.15?
234 236
235 237 this_metadata_file = self.digitalReadObj.get_digital_metadata(channelNameList[channelList[0]])
236 238 metadata_bounds = this_metadata_file.get_bounds()
237 239 self.fixed_metadata_dict = this_metadata_file.read(metadata_bounds[0])[metadata_bounds[0]] ##GET FIRST HEADER
238 240
239 241 self.__frequency = None
240 242 try:
241 243 self.__frequency = self.fixed_metadata_dict['frequency']
242 244 except:
243 245 self.__frequency = None
244 246
245 247 try:
246 248 self.__timezone = self.fixed_metadata_dict['timezone']
247 249 except:
248 250 self.__timezone = 0
249 251
250 252 self.__firstHeigth = 0
251 253
252 254
253 255 try:
254 256 codeType = self.fixed_metadata_dict['codeType']
255 257 except:
256 258 codeType = 0
257 259
258 260 nCode = 1
259 261 nBaud = 1
260 262 code = numpy.ones((nCode, nBaud), dtype=numpy.int)
261 263
262 264 if codeType:
263 265 nCode = self.fixed_metadata_dict['nCode']
264 266 nBaud = self.fixed_metadata_dict['nBaud']
265 267 code = self.fixed_metadata_dict['code']
266 268
267 269 if not ippKm:
268 270 try:
269 271 #seconds to km
270 272 ippKm = 1e6*0.15*self.fixed_metadata_dict['ipp']
271 273 except:
272 274 ippKm = None
273 275 ####################################################
274 276 startUTCSecond = None
275 277 endUTCSecond = None
276 278
277 279 if startDate:
278 280 startDatetime = datetime.datetime.combine(startDate, startTime)
279 281 startUTCSecond = (startDatetime-datetime.datetime(1970,1,1)).total_seconds() + self.__timezone
280 282
281 283 if endDate:
282 284 endDatetime = datetime.datetime.combine(endDate, endTime)
283 285 endUTCSecond = (endDatetime-datetime.datetime(1970,1,1)).total_seconds() + self.__timezone
284 286
285 287 start_index, end_index = self.digitalReadObj.get_bounds(channelNameList[channelList[0]])
286 288
287 289 if not startUTCSecond:
288 290 startUTCSecond = start_index/self.__sample_rate
289 291
290 292 if start_index > startUTCSecond*self.__sample_rate:
291 293 startUTCSecond = start_index/self.__sample_rate
292 294
293 295 if not endUTCSecond:
294 296 endUTCSecond = end_index/self.__sample_rate
295 297
296 298 if end_index < endUTCSecond*self.__sample_rate:
297 299 endUTCSecond = end_index/self.__sample_rate
298 300
299 301 if not nSamples:
300 302 if not ippKm:
301 303 raise ValueError, "[Reading] nSamples or ippKm should be defined"
302 304 nSamples = int(ippKm / (1e6*0.15/self.__sample_rate))
303 305
304 306 channelBoundList = []
305 307 channelNameListFiltered = []
306 308
307 309 for thisIndexChannel in channelList:
308 310 thisChannelName = channelNameList[thisIndexChannel]
309 311 start_index, end_index = self.digitalReadObj.get_bounds(thisChannelName)
310 312 channelBoundList.append((start_index, end_index))
311 313 channelNameListFiltered.append(thisChannelName)
312 314
313 315 self.profileIndex = 0
314 316
315 317 self.__delay = delay
316 318 self.__ippKm = ippKm
317 319 self.__codeType = codeType
318 320 self.__nCode = nCode
319 321 self.__nBaud = nBaud
320 322 self.__code = code
321 323
322 324 self.__datapath = path
323 325 self.__online = online
324 326 self.__channelList = channelList
325 327 self.__channelNameList = channelNameListFiltered
326 328 self.__channelBoundList = channelBoundList
327 329 self.__nSamples = nSamples
328 330 self.__samples_to_read = long(buffer_size*nSamples) #FIJO: AHORA 40
329 331 self.__nChannels = len(self.__channelList)
330 332
331 333 self.__startUTCSecond = startUTCSecond
332 334 self.__endUTCSecond = endUTCSecond
333 335
334 336 self.__timeInterval = 1.0 * self.__samples_to_read/self.__sample_rate #Time interval
335 337
336 338 if online:
337 339 # self.__thisUnixSample = int(endUTCSecond*self.__sample_rate - 4*self.__samples_to_read)
338 340 startUTCSecond = numpy.floor(endUTCSecond)
339 341
340 342 self.__thisUnixSample = long(startUTCSecond*self.__sample_rate) - self.__samples_to_read ##por que en el otro metodo lo primero q se hace es sumar samplestoread
341 343
342 344 self.__data_buffer = numpy.zeros((self.__nChannels, self.__samples_to_read), dtype = numpy.complex)
343 345
344 346 self.__setFileHeader()
345 347 self.isConfig = True
346 348
347 349 print "[Reading] Digital RF Data was found from %s to %s " %(
348 350 datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
349 351 datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
350 352 )
351 353
352 354 print "[Reading] Starting process from %s to %s" %(datetime.datetime.utcfromtimestamp(startUTCSecond - self.__timezone),
353 355 datetime.datetime.utcfromtimestamp(endUTCSecond - self.__timezone)
354 356 )
355 357
356 358 def __reload(self):
357 359 # print
358 360 # print "%s not in range [%s, %s]" %(
359 361 # datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
360 362 # datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
361 363 # datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
362 364 # )
363 365 print "[Reading] reloading metadata ..."
364 366
365 367 try:
366 368 self.digitalReadObj.reload(complete_update=True)
367 369 except:
368 370 self.digitalReadObj.reload()
369 371
370 372 start_index, end_index = self.digitalReadObj.get_bounds(self.__channelNameList[self.__channelList[0]])
371 373
372 374 if start_index > self.__startUTCSecond*self.__sample_rate:
373 375 self.__startUTCSecond = 1.0*start_index/self.__sample_rate
374 376
375 377 if end_index > self.__endUTCSecond*self.__sample_rate:
376 378 self.__endUTCSecond = 1.0*end_index/self.__sample_rate
377 379 print
378 380 print "[Reading] New timerange found [%s, %s] " %(
379 381 datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
380 382 datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
381 383 )
382 384
383 385 return True
384 386
385 387 return False
386 388
387 389 def __readNextBlock(self, seconds=30, volt_scale = 218776):
388 390 '''
389 391 '''
390 392
391 393 #Set the next data
392 394 self.__flagDiscontinuousBlock = False
393 395 self.__thisUnixSample += self.__samples_to_read
394 396
395 397 if self.__thisUnixSample + 2*self.__samples_to_read > self.__endUTCSecond*self.__sample_rate:
396 398 print "[Reading] There are no more data into selected time-range"
397 399 if self.__online:
398 400 self.__reload()
399 401 else:
400 402 return False
401 403
402 404 if self.__thisUnixSample + 2*self.__samples_to_read > self.__endUTCSecond*self.__sample_rate:
403 405 return False
404 406 self.__thisUnixSample -= self.__samples_to_read
405 407
406 408 indexChannel = 0
407 409
408 410 dataOk = False
409 411
410 412 for thisChannelName in self.__channelNameList: ##TODO VARIOS CHANNELS?
411 413
412 414 try:
413 415 result = self.digitalReadObj.read_vector_c81d(self.__thisUnixSample,
414 416 self.__samples_to_read,
415 417 thisChannelName)
416 418 except IOError, e:
417 419 #read next profile
418 420 self.__flagDiscontinuousBlock = True
419 421 print "[Reading] %s" %datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), e
420 422 break
421 423
422 424 if result.shape[0] != self.__samples_to_read:
423 425 self.__flagDiscontinuousBlock = True
424 426 print "[Reading] %s: Too few samples were found, just %d/%d samples" %(datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
425 427 result.shape[0],
426 428 self.__samples_to_read)
427 429 break
428 430
429 431 self.__data_buffer[indexChannel,:] = result*volt_scale
430 432
431 433 indexChannel += 1
432 434
433 435 dataOk = True
434 436
435 437 self.__utctime = self.__thisUnixSample/self.__sample_rate
436 438
437 439 if not dataOk:
438 440 return False
439 441
440 442 print "[Reading] %s: %d samples <> %f sec" %(datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
441 443 self.__samples_to_read,
442 444 self.__timeInterval)
443 445
444 446 self.__bufferIndex = 0
445 447
446 448 return True
447 449
448 450 def __isBufferEmpty(self):
449 451 return self.__bufferIndex > self.__samples_to_read - self.__nSamples #40960 - 40
450 452
451 453 def getData(self, seconds=30, nTries=5):
452 454
453 455 '''
454 456 This method gets the data from files and put the data into the dataOut object
455 457
456 458 In addition, increase el the buffer counter in one.
457 459
458 460 Return:
459 461 data : retorna un perfil de voltages (alturas * canales) copiados desde el
460 462 buffer. Si no hay mas archivos a leer retorna None.
461 463
462 464 Affected:
463 465 self.dataOut
464 466 self.profileIndex
465 467 self.flagDiscontinuousBlock
466 468 self.flagIsNewBlock
467 469 '''
468 470
469 471 err_counter = 0
470 472 self.dataOut.flagNoData = True
471 473
472 474 if self.__isBufferEmpty():
473 475
474 476 self.__flagDiscontinuousBlock = False
475 477
476 478 while True:
477 479 if self.__readNextBlock():
478 480 break
479 481 if self.__thisUnixSample > self.__endUTCSecond*self.__sample_rate:
480 482 return False
481 483
482 484 if self.__flagDiscontinuousBlock:
483 485 print '[Reading] discontinuous block found ... continue with the next block'
484 486 continue
485 487
486 488 if not self.__online:
487 489 return False
488 490
489 491 err_counter += 1
490 492 if err_counter > nTries:
491 493 return False
492 494
493 495 print '[Reading] waiting %d seconds to read a new block' %seconds
494 496 sleep(seconds)
495 497
496 498 self.dataOut.data = self.__data_buffer[:,self.__bufferIndex:self.__bufferIndex+self.__nSamples]
497 499 self.dataOut.utctime = (self.__thisUnixSample + self.__bufferIndex)/self.__sample_rate
498 500 self.dataOut.flagNoData = False
499 501 self.dataOut.flagDiscontinuousBlock = self.__flagDiscontinuousBlock
500 502 self.dataOut.profileIndex = self.profileIndex
501 503
502 504 self.__bufferIndex += self.__nSamples
503 505 self.profileIndex += 1
504 506
505 507 if self.profileIndex == self.dataOut.nProfiles:
506 508 self.profileIndex = 0
507 509
508 510 return True
509 511
510 512 def printInfo(self):
511 513 '''
512 514 '''
513 515 if self.__printInfo == False:
514 516 return
515 517
516 518 # self.systemHeaderObj.printInfo()
517 519 # self.radarControllerHeaderObj.printInfo()
518 520
519 521 self.__printInfo = False
520 522
521 523 def printNumberOfBlock(self):
522 524 '''
523 525 '''
524 526 return
525 527 #print self.profileIndex
526 528
527 529 def run(self, **kwargs):
528 530 '''
529 531 This method will be called many times so here you should put all your code
530 532 '''
531 533
532 534 if not self.isConfig:
533 535 self.setup(**kwargs)
534 536
535 537 self.getData(seconds=self.__delay)
536 538
537 539 return
538 540
539 541 class DigitalRFWriter(Operation):
540 542 '''
541 543 classdocs
542 544 '''
543 545
544 546 def __init__(self, **kwargs):
545 547 '''
546 548 Constructor
547 549 '''
548 550 Operation.__init__(self, **kwargs)
549 551 self.dataOut = None
550 552
551 def setup(self, dataIn, path, blocksPerFile, set=0, ext='.h5'):
553 def setup(self, dataIn, path, set=0, ext='.h5'):
552 554 '''
553 555 In this method we should set all initial parameters.
554 556
555 557 Input:
556 558 dataIn : Input data will also be outputa data
557 559
558 560 '''
559 self.dataOut = dataIn
560
561
562
561
562 self.__ippSeconds = dataIn.ippSeconds
563 self.__deltaH = dataIn.getDeltaH()
564 self.__sample_rate = 1e6*0.15/self.__deltaH
565 self.__dtype = dataIn.dtype
566 if len(dataIn.dtype) == 2:
567 self.__dtype = dataIn.dtype[0]
568 self.__nSamples = dataIn.systemHeaderObj.nSamples
569 self.__nProfiles = dataIn.nProfiles
570 self.__blocks_per_file = dataIn.processingHeaderObj.dataBlocksPerFile
571
572 file_cadence_millisecs = 1.0 * self.__blocks_per_file * self.__nProfiles * self.__nSamples / self.__sample_rate * 1000
573 sub_cadence_secs = 10 * file_cadence_millisecs
574 sample_rate_fraction = Fraction(self.__sample_rate).limit_denominator()
575 sample_rate_numerator = long(sample_rate_fraction.numerator)
576 sample_rate_denominator = long(sample_rate_fraction.denominator)
577 start_global_index = dataIn.utctime * self.__sample_rate
578 self.arr_data = arr_data = numpy.ones((self.__nSamples, 1), dtype=[('r', self.__dtype), ('i', self.__dtype)])
579 uuid = 'prueba'
580 compression_level = 1
581 checksum = False
582 is_complex = True
583 num_subchannels = 1
584 is_continuous = True
585 marching_periods = False
586
587 self.digitalWriteObj = digital_rf.DigitalRFWriter("/home/jchavez/jicamarca/mocked_data/voltage", self.__dtype, sub_cadence_secs,
588 file_cadence_millisecs, start_global_index,
589 sample_rate_numerator, sample_rate_denominator, uuid, compression_level, checksum,
590 is_complex, num_subchannels, is_continuous, marching_periods)
563 591
564 592
565 593 self.isConfig = True
566 594
567 595 return
568 596
569 597 def run(self, dataIn, path=None, **kwargs):
570 598 '''
571 599 This method will be called many times so here you should put all your code
572 600
573 601 Inputs:
574 602
575 603 dataIn : object with the data
576 604
577 605 '''
578 print dir(dataIn)
579 print 'blocksize', dataIn.blocksize
580 print 'channelIndexList', dataIn.channelIndexList
581 print 'ippSeconds', dataIn.ippSeconds
582 print 'frequency', dataIn.frequency
583 print 'nProfiles', dataIn.nProfiles
584 print 'deltaH', dataIn.getDeltaH()
585 print 'systemHeaderObj.nSamples', dataIn.systemHeaderObj.nSamples
586 nSamples = dataIn.systemHeaderObj.nSamples
587 ippSeconds = dataIn.ippSeconds
588
589 self.__samplerate = 1.0*nSamples/ippSeconds
590 # ippKm = 1e6*0.15*self.fixed_metadata_dict['ipp']
591 # nSamples = int(ippKm / (1e6*0.15/self.__sample_rate))
592 # ippSeconds = 1.0*self.__nSamples/self.__sample_rate
606
607 self.dataOut = dataIn
593 608
594 609 if not self.isConfig:
595 610 self.setup(dataIn, path, **kwargs)
596 611
612 samples = len(self.dataOut.data[0])
613
614 for i in range(samples):
615 self.arr_data[i]['r'] = dataIn.data[0][i].real
616 self.arr_data[i]['i'] = dataIn.data[0][i].imag
617
618 if dataIn.flagNoData:
619 self.digitalWriteObj.close()
597 620
621 self.digitalWriteObj.rf_write(self.arr_data)
622
623 #raise
598 624 if __name__ == '__main__':
599 625
600 626 readObj = DigitalRFReader()
601 627
602 628 while True:
603 629 readObj.run(path='/home/jchavez/jicamarca/mocked_data/')
604 630 # readObj.printInfo()
605 631 readObj.printNumberOfBlock()
@@ -1,739 +1,739
1 1 '''
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6
7 7 import numpy
8 8
9 9 from jroIO_base import LOCALTIME, JRODataReader, JRODataWriter
10 10 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
11 11 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
12 12 from schainpy.model.data.jrodata import Voltage
13 13 import zmq
14 14 import tempfile
15 15 from StringIO import StringIO
16 16 # from _sha import blocksize
17 17
18 18 class VoltageReader(JRODataReader, ProcessingUnit):
19 19 """
20 20 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
21 21 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
22 22 perfiles*alturas*canales) son almacenados en la variable "buffer".
23 23
24 24 perfiles * alturas * canales
25 25
26 26 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
27 27 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
28 28 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
29 29 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
30 30
31 31 Example:
32 32
33 33 dpath = "/home/myuser/data"
34 34
35 35 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
36 36
37 37 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
38 38
39 39 readerObj = VoltageReader()
40 40
41 41 readerObj.setup(dpath, startTime, endTime)
42 42
43 43 while(True):
44 44
45 45 #to get one profile
46 46 profile = readerObj.getData()
47 47
48 48 #print the profile
49 49 print profile
50 50
51 51 #If you want to see all datablock
52 52 print readerObj.datablock
53 53
54 54 if readerObj.flagNoMoreFiles:
55 55 break
56 56
57 57 """
58 58
59 59 ext = ".r"
60 60
61 61 optchar = "D"
62 62 dataOut = None
63 63
64 64 def __init__(self, **kwargs):
65 65 """
66 66 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
67 67
68 68 Input:
69 69 dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para
70 70 almacenar un perfil de datos cada vez que se haga un requerimiento
71 71 (getData). El perfil sera obtenido a partir del buffer de datos,
72 72 si el buffer esta vacio se hara un nuevo proceso de lectura de un
73 73 bloque de datos.
74 74 Si este parametro no es pasado se creara uno internamente.
75 75
76 76 Variables afectadas:
77 77 self.dataOut
78 78
79 79 Return:
80 80 None
81 81 """
82 82
83 83 ProcessingUnit.__init__(self, **kwargs)
84 84
85 85 self.isConfig = False
86 86
87 87 self.datablock = None
88 88
89 89 self.utc = 0
90 90
91 91 self.ext = ".r"
92 92
93 93 self.optchar = "D"
94 94
95 95 self.basicHeaderObj = BasicHeader(LOCALTIME)
96 96
97 97 self.systemHeaderObj = SystemHeader()
98 98
99 99 self.radarControllerHeaderObj = RadarControllerHeader()
100 100
101 101 self.processingHeaderObj = ProcessingHeader()
102 102
103 103 self.online = 0
104 104
105 105 self.fp = None
106 106
107 107 self.idFile = None
108 108
109 109 self.dtype = None
110 110
111 111 self.fileSizeByHeader = None
112 112
113 113 self.filenameList = []
114 114
115 115 self.filename = None
116 116
117 117 self.fileSize = None
118 118
119 119 self.firstHeaderSize = 0
120 120
121 121 self.basicHeaderSize = 24
122 122
123 123 self.pathList = []
124 124
125 125 self.filenameList = []
126 126
127 127 self.lastUTTime = 0
128 128
129 129 self.maxTimeStep = 30
130 130
131 131 self.flagNoMoreFiles = 0
132 132
133 133 self.set = 0
134 134
135 135 self.path = None
136 136
137 137 self.profileIndex = 2**32-1
138 138
139 139 self.delay = 3 #seconds
140 140
141 141 self.nTries = 3 #quantity tries
142 142
143 143 self.nFiles = 3 #number of files for searching
144 144
145 145 self.nReadBlocks = 0
146 146
147 147 self.flagIsNewFile = 1
148 148
149 149 self.__isFirstTimeOnline = 1
150 150
151 151 # self.ippSeconds = 0
152 152
153 153 self.flagDiscontinuousBlock = 0
154 154
155 155 self.flagIsNewBlock = 0
156 156
157 157 self.nTotalBlocks = 0
158 158
159 159 self.blocksize = 0
160 160
161 161 self.dataOut = self.createObjByDefault()
162 162
163 163 self.nTxs = 1
164 164
165 165 self.txIndex = 0
166 166
167 167 def createObjByDefault(self):
168 168
169 169 dataObj = Voltage()
170 170
171 171 return dataObj
172 172
173 173 def __hasNotDataInBuffer(self):
174 174
175 175 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock*self.nTxs:
176 176 return 1
177 177
178 178 return 0
179 179
180 180
181 181 def getBlockDimension(self):
182 182 """
183 183 Obtiene la cantidad de puntos a leer por cada bloque de datos
184 184
185 185 Affected:
186 186 self.blocksize
187 187
188 188 Return:
189 189 None
190 190 """
191 191 pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
192 192 self.blocksize = pts2read
193 193
194 194
195 195
196 196 def readBlock(self):
197 197 """
198 198 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
199 199 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
200 200 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
201 201 es seteado a 0
202 202
203 203 Inputs:
204 204 None
205 205
206 206 Return:
207 207 None
208 208
209 209 Affected:
210 210 self.profileIndex
211 211 self.datablock
212 212 self.flagIsNewFile
213 213 self.flagIsNewBlock
214 214 self.nTotalBlocks
215 215
216 216 Exceptions:
217 217 Si un bloque leido no es un bloque valido
218 218 """
219 219
220 print 'READ BLOCK'
221 220 # if self.server is not None:
222 221 # self.zBlock = self.receiver.recv()
223 222 # self.zHeader = self.zBlock[:24]
224 223 # self.zDataBlock = self.zBlock[24:]
225 224 # junk = numpy.fromstring(self.zDataBlock, numpy.dtype([('real','<i4'),('imag','<i4')]))
226 225 # self.processingHeaderObj.profilesPerBlock = 240
227 226 # self.processingHeaderObj.nHeights = 248
228 227 # self.systemHeaderObj.nChannels
229 228 # else:
230 229 current_pointer_location = self.fp.tell()
231 230 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
232 231
233 232 try:
234 233 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
235 print'junked'
236 234 except:
237 235 #print "The read block (%3d) has not enough data" %self.nReadBlocks
238 236
239 237 if self.waitDataBlock(pointer_location=current_pointer_location):
240 238 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
241 239 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
242 240 # return 0
243 241
244 242 #Dimensions : nChannels, nProfiles, nSamples
245 243
246 244 junk = numpy.transpose(junk, (2,0,1))
247 245 self.datablock = junk['real'] + junk['imag']*1j
248 246
249 247 self.profileIndex = 0
250 248
251 249 self.flagIsNewFile = 0
252 250 self.flagIsNewBlock = 1
253 251
254 252 self.nTotalBlocks += 1
255 253 self.nReadBlocks += 1
256 254
257 255 return 1
258 256
259 257 def getFirstHeader(self):
260 258
261 259 self.getBasicHeader()
262 260
263 261 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
264 262
265 263 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
266 264
265 self.dataOut.processingHeaderObj = self.processingHeaderObj.copy()
266
267 267 if self.nTxs > 1:
268 268 self.dataOut.radarControllerHeaderObj.ippSeconds = self.radarControllerHeaderObj.ippSeconds/self.nTxs
269 269
270 270 #Time interval and code are propierties of dataOut. Its value depends of radarControllerHeaderObj.
271 271
272 272 # self.dataOut.timeInterval = self.radarControllerHeaderObj.ippSeconds * self.processingHeaderObj.nCohInt
273 273 #
274 274 # if self.radarControllerHeaderObj.code is not None:
275 275 #
276 276 # self.dataOut.nCode = self.radarControllerHeaderObj.nCode
277 277 #
278 278 # self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
279 279 #
280 280 # self.dataOut.code = self.radarControllerHeaderObj.code
281 281
282 282 self.dataOut.dtype = self.dtype
283 283
284 284 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
285 285
286 286 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.nHeights) *self.processingHeaderObj.deltaHeight + self.processingHeaderObj.firstHeight
287 287
288 288 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
289 289
290 290 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
291 291
292 292 self.dataOut.flagDecodeData = self.processingHeaderObj.flag_decode #asumo q la data no esta decodificada
293 293
294 294 self.dataOut.flagDeflipData = self.processingHeaderObj.flag_deflip #asumo q la data no esta sin flip
295 295
296 296 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
297 297
298 298 def reshapeData(self):
299 299
300 300 if self.nTxs < 0:
301 301 return
302 302
303 303 if self.nTxs == 1:
304 304 return
305 305
306 306 if self.nTxs < 1 and self.processingHeaderObj.profilesPerBlock % (1./self.nTxs) != 0:
307 307 raise ValueError, "1./nTxs (=%f), should be a multiple of nProfiles (=%d)" %(1./self.nTxs, self.processingHeaderObj.profilesPerBlock)
308 308
309 309 if self.nTxs > 1 and self.processingHeaderObj.nHeights % self.nTxs != 0:
310 310 raise ValueError, "nTxs (=%d), should be a multiple of nHeights (=%d)" %(self.nTxs, self.processingHeaderObj.nHeights)
311 311
312 312 self.datablock = self.datablock.reshape((self.systemHeaderObj.nChannels, self.processingHeaderObj.profilesPerBlock*self.nTxs, self.processingHeaderObj.nHeights/self.nTxs))
313 313
314 314 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock*self.nTxs
315 315 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.nHeights/self.nTxs) *self.processingHeaderObj.deltaHeight + self.processingHeaderObj.firstHeight
316 316 self.dataOut.radarControllerHeaderObj.ippSeconds = self.radarControllerHeaderObj.ippSeconds/self.nTxs
317 317
318 318 return
319 319
320 320 def readFirstHeaderFromServer(self):
321 321
322 322 self.getFirstHeader()
323 323
324 324 self.firstHeaderSize = self.basicHeaderObj.size
325 325
326 326 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
327 327 if datatype == 0:
328 328 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
329 329 elif datatype == 1:
330 330 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
331 331 elif datatype == 2:
332 332 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
333 333 elif datatype == 3:
334 334 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
335 335 elif datatype == 4:
336 336 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
337 337 elif datatype == 5:
338 338 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
339 339 else:
340 340 raise ValueError, 'Data type was not defined'
341 341
342 342 self.dtype = datatype_str
343 343 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
344 344 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
345 345 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
346 346 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
347 347 self.getBlockDimension()
348 348
349 349
350 350 def getFromServer(self):
351 351 self.flagDiscontinuousBlock = 0
352 352 self.profileIndex = 0
353 353 self.flagIsNewBlock = 1
354 354 self.dataOut.flagNoData = False
355 355 self.nTotalBlocks += 1
356 356 self.nReadBlocks += 1
357 357 self.blockPointer = 0
358 358
359 359 block = self.receiver.recv()
360 360
361 361 self.basicHeaderObj.read(block[self.blockPointer:])
362 362 self.blockPointer += self.basicHeaderObj.length
363 363 self.systemHeaderObj.read(block[self.blockPointer:])
364 364 self.blockPointer += self.systemHeaderObj.length
365 365 self.radarControllerHeaderObj.read(block[self.blockPointer:])
366 366 self.blockPointer += self.radarControllerHeaderObj.length
367 367 self.processingHeaderObj.read(block[self.blockPointer:])
368 368 self.blockPointer += self.processingHeaderObj.length
369 369 self.readFirstHeaderFromServer()
370 370
371 371 timestamp = self.basicHeaderObj.get_datatime()
372 372 print '[Reading] - Block {} - {}'.format(self.nTotalBlocks, timestamp)
373 373 current_pointer_location = self.blockPointer
374 374 junk = numpy.fromstring( block[self.blockPointer:], self.dtype, self.blocksize )
375 375
376 376 try:
377 377 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
378 378 except:
379 379 #print "The read block (%3d) has not enough data" %self.nReadBlocks
380 380 if self.waitDataBlock(pointer_location=current_pointer_location):
381 381 junk = numpy.fromstring( block[self.blockPointer:], self.dtype, self.blocksize )
382 382 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
383 383 # return 0
384 384
385 385 #Dimensions : nChannels, nProfiles, nSamples
386 386
387 387 junk = numpy.transpose(junk, (2,0,1))
388 388 self.datablock = junk['real'] + junk['imag'] * 1j
389 389 self.profileIndex = 0
390 390 if self.selBlocksize == None: self.selBlocksize = self.dataOut.nProfiles
391 391 if self.selBlocktime != None:
392 392 if self.dataOut.nCohInt is not None:
393 393 nCohInt = self.dataOut.nCohInt
394 394 else:
395 395 nCohInt = 1
396 396 self.selBlocksize = int(self.dataOut.nProfiles*round(self.selBlocktime/(nCohInt*self.dataOut.ippSeconds*self.dataOut.nProfiles)))
397 397 self.dataOut.data = self.datablock[:,self.profileIndex:self.profileIndex+self.selBlocksize,:]
398 398 datasize = self.dataOut.data.shape[1]
399 399 if datasize < self.selBlocksize:
400 400 buffer = numpy.zeros((self.dataOut.data.shape[0], self.selBlocksize, self.dataOut.data.shape[2]), dtype = 'complex')
401 401 buffer[:,:datasize,:] = self.dataOut.data
402 402 self.dataOut.data = buffer
403 403 self.profileIndex = blockIndex
404 404
405 405 self.dataOut.flagDataAsBlock = True
406 406 self.flagIsNewBlock = 1
407 407 self.dataOut.realtime = self.online
408 408
409 409 return self.dataOut.data
410 410
411 411 def getData(self):
412 412 """
413 413 getData obtiene una unidad de datos del buffer de lectura, un perfil, y la copia al objeto self.dataOut
414 414 del tipo "Voltage" con todos los parametros asociados a este (metadata). cuando no hay datos
415 415 en el buffer de lectura es necesario hacer una nueva lectura de los bloques de datos usando
416 416 "readNextBlock"
417 417
418 418 Ademas incrementa el contador del buffer "self.profileIndex" en 1.
419 419
420 420 Return:
421 421
422 422 Si el flag self.getByBlock ha sido seteado el bloque completo es copiado a self.dataOut y el self.profileIndex
423 423 es igual al total de perfiles leidos desde el archivo.
424 424
425 425 Si self.getByBlock == False:
426 426
427 427 self.dataOut.data = buffer[:, thisProfile, :]
428 428
429 429 shape = [nChannels, nHeis]
430 430
431 431 Si self.getByBlock == True:
432 432
433 433 self.dataOut.data = buffer[:, :, :]
434 434
435 435 shape = [nChannels, nProfiles, nHeis]
436 436
437 437 Variables afectadas:
438 438 self.dataOut
439 439 self.profileIndex
440 440
441 441 Affected:
442 442 self.dataOut
443 443 self.profileIndex
444 444 self.flagDiscontinuousBlock
445 445 self.flagIsNewBlock
446 446 """
447 447 if self.flagNoMoreFiles:
448 448 self.dataOut.flagNoData = True
449 449 print 'Process finished'
450 450 return 0
451 451 self.flagDiscontinuousBlock = 0
452 452 self.flagIsNewBlock = 0
453 453 if self.__hasNotDataInBuffer():
454 454 if not( self.readNextBlock() ):
455 455 return 0
456 456
457 457 self.getFirstHeader()
458 458
459 459 self.reshapeData()
460 460 if self.datablock is None:
461 461 self.dataOut.flagNoData = True
462 462 return 0
463 463
464 464 if not self.getByBlock:
465 465
466 466 """
467 467 Return profile by profile
468 468
469 469 If nTxs > 1 then one profile is divided by nTxs and number of total
470 470 blocks is increased by nTxs (nProfiles *= nTxs)
471 471 """
472 472 self.dataOut.flagDataAsBlock = False
473 473 self.dataOut.data = self.datablock[:,self.profileIndex,:]
474 474 self.dataOut.profileIndex = self.profileIndex
475 475
476 476 self.profileIndex += 1
477 477
478 478 # elif self.selBlocksize==None or self.selBlocksize==self.dataOut.nProfiles:
479 479 # """
480 480 # Return all block
481 481 # """
482 482 # self.dataOut.flagDataAsBlock = True
483 483 # self.dataOut.data = self.datablock
484 484 # self.dataOut.profileIndex = self.dataOut.nProfiles - 1
485 485 #
486 486 # self.profileIndex = self.dataOut.nProfiles
487 487
488 488 else:
489 489 """
490 490 Return a block
491 491 """
492 492 if self.selBlocksize == None: self.selBlocksize = self.dataOut.nProfiles
493 493 if self.selBlocktime != None:
494 494 if self.dataOut.nCohInt is not None:
495 495 nCohInt = self.dataOut.nCohInt
496 496 else:
497 497 nCohInt = 1
498 498 self.selBlocksize = int(self.dataOut.nProfiles*round(self.selBlocktime/(nCohInt*self.dataOut.ippSeconds*self.dataOut.nProfiles)))
499 499
500 500 self.dataOut.data = self.datablock[:,self.profileIndex:self.profileIndex+self.selBlocksize,:]
501 501 self.profileIndex += self.selBlocksize
502 502 datasize = self.dataOut.data.shape[1]
503 503
504 504 if datasize < self.selBlocksize:
505 505 buffer = numpy.zeros((self.dataOut.data.shape[0],self.selBlocksize,self.dataOut.data.shape[2]), dtype = 'complex')
506 506 buffer[:,:datasize,:] = self.dataOut.data
507 507
508 508 while datasize < self.selBlocksize: #Not enough profiles to fill the block
509 509 if not( self.readNextBlock() ):
510 510 return 0
511 511 self.getFirstHeader()
512 512 self.reshapeData()
513 513 if self.datablock is None:
514 514 self.dataOut.flagNoData = True
515 515 return 0
516 516 #stack data
517 517 blockIndex = self.selBlocksize - datasize
518 518 datablock1 = self.datablock[:,:blockIndex,:]
519 519
520 520 buffer[:,datasize:datasize+datablock1.shape[1],:] = datablock1
521 521 datasize += datablock1.shape[1]
522 522
523 523 self.dataOut.data = buffer
524 524 self.profileIndex = blockIndex
525 525
526 526 self.dataOut.flagDataAsBlock = True
527 527 self.dataOut.nProfiles = self.dataOut.data.shape[1]
528 528
529 529 self.dataOut.flagNoData = False
530 530
531 531 self.getBasicHeader()
532 532
533 533 self.dataOut.realtime = self.online
534 534
535 535 return self.dataOut.data
536 536
537 537 class VoltageWriter(JRODataWriter, Operation):
538 538 """
539 539 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
540 540 de los datos siempre se realiza por bloques.
541 541 """
542 542
543 543 ext = ".r"
544 544
545 545 optchar = "D"
546 546
547 547 shapeBuffer = None
548 548
549 549
550 550 def __init__(self, **kwargs):
551 551 """
552 552 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
553 553
554 554 Affected:
555 555 self.dataOut
556 556
557 557 Return: None
558 558 """
559 559 Operation.__init__(self, **kwargs)
560 560
561 561 self.nTotalBlocks = 0
562 562
563 563 self.profileIndex = 0
564 564
565 565 self.isConfig = False
566 566
567 567 self.fp = None
568 568
569 569 self.flagIsNewFile = 1
570 570
571 571 self.blockIndex = 0
572 572
573 573 self.flagIsNewBlock = 0
574 574
575 575 self.setFile = None
576 576
577 577 self.dtype = None
578 578
579 579 self.path = None
580 580
581 581 self.filename = None
582 582
583 583 self.basicHeaderObj = BasicHeader(LOCALTIME)
584 584
585 585 self.systemHeaderObj = SystemHeader()
586 586
587 587 self.radarControllerHeaderObj = RadarControllerHeader()
588 588
589 589 self.processingHeaderObj = ProcessingHeader()
590 590
591 591 def hasAllDataInBuffer(self):
592 592 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
593 593 return 1
594 594 return 0
595 595
596 596
597 597 def setBlockDimension(self):
598 598 """
599 599 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
600 600
601 601 Affected:
602 602 self.shape_spc_Buffer
603 603 self.shape_cspc_Buffer
604 604 self.shape_dc_Buffer
605 605
606 606 Return: None
607 607 """
608 608 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
609 609 self.processingHeaderObj.nHeights,
610 610 self.systemHeaderObj.nChannels)
611 611
612 612 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
613 613 self.processingHeaderObj.profilesPerBlock,
614 614 self.processingHeaderObj.nHeights),
615 615 dtype=numpy.dtype('complex64'))
616 616
617 617 def writeBlock(self):
618 618 """
619 619 Escribe el buffer en el file designado
620 620
621 621 Affected:
622 622 self.profileIndex
623 623 self.flagIsNewFile
624 624 self.flagIsNewBlock
625 625 self.nTotalBlocks
626 626 self.blockIndex
627 627
628 628 Return: None
629 629 """
630 630 data = numpy.zeros( self.shapeBuffer, self.dtype )
631 631
632 632 junk = numpy.transpose(self.datablock, (1,2,0))
633 633
634 634 data['real'] = junk.real
635 635 data['imag'] = junk.imag
636 636
637 637 data = data.reshape( (-1) )
638 638
639 639 data.tofile( self.fp )
640 640
641 641 self.datablock.fill(0)
642 642
643 643 self.profileIndex = 0
644 644 self.flagIsNewFile = 0
645 645 self.flagIsNewBlock = 1
646 646
647 647 self.blockIndex += 1
648 648 self.nTotalBlocks += 1
649 649
650 650 # print "[Writing] Block = %04d" %self.blockIndex
651 651
652 652 def putData(self):
653 653 """
654 654 Setea un bloque de datos y luego los escribe en un file
655 655
656 656 Affected:
657 657 self.flagIsNewBlock
658 658 self.profileIndex
659 659
660 660 Return:
661 661 0 : Si no hay data o no hay mas files que puedan escribirse
662 662 1 : Si se escribio la data de un bloque en un file
663 663 """
664 664 if self.dataOut.flagNoData:
665 665 return 0
666 666
667 667 self.flagIsNewBlock = 0
668 668
669 669 if self.dataOut.flagDiscontinuousBlock:
670 670 self.datablock.fill(0)
671 671 self.profileIndex = 0
672 672 self.setNextFile()
673 673
674 674 if self.profileIndex == 0:
675 675 self.setBasicHeader()
676 676
677 677 self.datablock[:,self.profileIndex,:] = self.dataOut.data
678 678
679 679 self.profileIndex += 1
680 680
681 681 if self.hasAllDataInBuffer():
682 682 #if self.flagIsNewFile:
683 683 self.writeNextBlock()
684 684 # self.setFirstHeader()
685 685
686 686 return 1
687 687
688 688 def __getBlockSize(self):
689 689 '''
690 690 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
691 691 '''
692 692
693 693 dtype_width = self.getDtypeWidth()
694 694
695 695 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels * self.profilesPerBlock * dtype_width * 2)
696 696
697 697 return blocksize
698 698
699 699 def setFirstHeader(self):
700 700
701 701 """
702 702 Obtiene una copia del First Header
703 703
704 704 Affected:
705 705 self.systemHeaderObj
706 706 self.radarControllerHeaderObj
707 707 self.dtype
708 708
709 709 Return:
710 710 None
711 711 """
712 712
713 713 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
714 714 self.systemHeaderObj.nChannels = self.dataOut.nChannels
715 715 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
716 716
717 717 self.processingHeaderObj.dtype = 0 # Voltage
718 718 self.processingHeaderObj.blockSize = self.__getBlockSize()
719 719 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
720 720 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
721 721 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
722 722 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
723 723 self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage
724 724 self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage
725 725
726 726 if self.dataOut.code is not None:
727 727 self.processingHeaderObj.code = self.dataOut.code
728 728 self.processingHeaderObj.nCode = self.dataOut.nCode
729 729 self.processingHeaderObj.nBaud = self.dataOut.nBaud
730 730
731 731 if self.processingHeaderObj.nWindows != 0:
732 732 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
733 733 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
734 734 self.processingHeaderObj.nHeights = self.dataOut.nHeights
735 735 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
736 736
737 737 self.processingHeaderObj.processFlags = self.getProcessFlags()
738 738
739 739 self.setBasicHeader()
General Comments 0
You need to be logged in to leave comments. Login now