##// END OF EJS Templates
manual header update
Christianpl -
r1788:c7146b87b3fa
parent child
Show More
@@ -1,1618 +1,1619
1 1 """
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 """
6 6 import os
7 7 import sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import inspect
13 13 import time
14 14 import datetime
15 15 import zmq
16 16
17 17 from schainpy.model.proc.jroproc_base import Operation, MPDecorator
18 18 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
19 19 from schainpy.model.data.jroheaderIO import get_dtype_index, get_numpy_dtype, get_procflag_dtype, get_dtype_width
20 20 from schainpy.utils import log
21 21 import schainpy.admin
22 22
23 23 LOCALTIME = True
24 24 DT_DIRECTIVES = {
25 25 '%Y': 4,
26 26 '%y': 2,
27 27 '%m': 2,
28 28 '%d': 2,
29 29 '%j': 3,
30 30 '%H': 2,
31 31 '%M': 2,
32 32 '%S': 2,
33 33 '%f': 6
34 34 }
35 35
36 36
37 37 def isNumber(cad):
38 38 """
39 39 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
40 40
41 41 Excepciones:
42 42 Si un determinado string no puede ser convertido a numero
43 43 Input:
44 44 str, string al cual se le analiza para determinar si convertible a un numero o no
45 45
46 46 Return:
47 47 True : si el string es uno numerico
48 48 False : no es un string numerico
49 49 """
50 50 try:
51 51 float(cad)
52 52 return True
53 53 except:
54 54 return False
55 55
56 56
57 57 def isFileInEpoch(filename, startUTSeconds, endUTSeconds):
58 58 """
59 59 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
60 60
61 61 Inputs:
62 62 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
63 63
64 64 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
65 65 segundos contados desde 01/01/1970.
66 66 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
67 67 segundos contados desde 01/01/1970.
68 68
69 69 Return:
70 70 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
71 71 fecha especificado, de lo contrario retorna False.
72 72
73 73 Excepciones:
74 74 Si el archivo no existe o no puede ser abierto
75 75 Si la cabecera no puede ser leida.
76 76
77 77 """
78 78 basicHeaderObj = BasicHeader(LOCALTIME)
79 79
80 80 try:
81 81
82 82 fp = open(filename, 'rb')
83 83 except IOError:
84 84 print("The file %s can't be opened" % (filename))
85 85 return 0
86 86
87 87 sts = basicHeaderObj.read(fp)
88 88 fp.close()
89 89
90 90 if not(sts):
91 91 print("Skipping the file %s because it has not a valid header" % (filename))
92 92 return 0
93 93
94 94 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
95 95 return 0
96 96
97 97 return 1
98 98
99 99
100 100 def isTimeInRange(thisTime, startTime, endTime):
101 101 if endTime >= startTime:
102 102 if (thisTime < startTime) or (thisTime > endTime):
103 103 return 0
104 104 return 1
105 105 else:
106 106 if (thisTime < startTime) and (thisTime > endTime):
107 107 return 0
108 108 return 1
109 109
110 110
111 111 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
112 112 """
113 113 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
114 114
115 115 Inputs:
116 116 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
117 117
118 118 startDate : fecha inicial del rango seleccionado en formato datetime.date
119 119
120 120 endDate : fecha final del rango seleccionado en formato datetime.date
121 121
122 122 startTime : tiempo inicial del rango seleccionado en formato datetime.time
123 123
124 124 endTime : tiempo final del rango seleccionado en formato datetime.time
125 125
126 126 Return:
127 127 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
128 128 fecha especificado, de lo contrario retorna False.
129 129
130 130 Excepciones:
131 131 Si el archivo no existe o no puede ser abierto
132 132 Si la cabecera no puede ser leida.
133 133
134 134 """
135 135
136 136 try:
137 137 fp = open(filename, 'rb')
138 138 except IOError:
139 139 print("The file %s can't be opened" % (filename))
140 140 return None
141 141
142 142 firstBasicHeaderObj = BasicHeader(LOCALTIME)
143 143 systemHeaderObj = SystemHeader()
144 144
145 145 radarControllerHeaderObj = RadarControllerHeader()
146 146 processingHeaderObj = ProcessingHeader()
147 147
148 148 lastBasicHeaderObj = BasicHeader(LOCALTIME)
149 149
150 150 sts = firstBasicHeaderObj.read(fp)
151 151
152 152 if not(sts):
153 153 print("[Reading] Skipping the file %s because it has not a valid header" % (filename))
154 154 return None
155 155
156 156 if not systemHeaderObj.read(fp):
157 157 return None
158 158
159 159 if not radarControllerHeaderObj.read(fp):
160 160 return None
161 161
162 162 if not processingHeaderObj.read(fp):
163 163 return None
164 164
165 165 filesize = os.path.getsize(filename)
166 166
167 167 offset = processingHeaderObj.blockSize + 24 # header size
168 168
169 169 if filesize <= offset:
170 170 print("[Reading] %s: This file has not enough data" % filename)
171 171 return None
172 172
173 173 fp.seek(-offset, 2)
174 174
175 175 sts = lastBasicHeaderObj.read(fp)
176 176
177 177 fp.close()
178 178
179 179 thisDatetime = lastBasicHeaderObj.datatime
180 180 thisTime_last_block = thisDatetime.time()
181 181
182 182 thisDatetime = firstBasicHeaderObj.datatime
183 183 thisDate = thisDatetime.date()
184 184 thisTime_first_block = thisDatetime.time()
185 185
186 186 # General case
187 187 # o>>>>>>>>>>>>>><<<<<<<<<<<<<<o
188 188 #-----------o----------------------------o-----------
189 189 # startTime endTime
190 190
191 191 if endTime >= startTime:
192 192 if (thisTime_last_block < startTime) or (thisTime_first_block > endTime):
193 193 return None
194 194
195 195 return thisDatetime
196 196
197 197 # If endTime < startTime then endTime belongs to the next day
198 198
199 199 #<<<<<<<<<<<o o>>>>>>>>>>>
200 200 #-----------o----------------------------o-----------
201 201 # endTime startTime
202 202
203 203 if (thisDate == startDate) and (thisTime_last_block < startTime):
204 204 return None
205 205
206 206 if (thisDate == endDate) and (thisTime_first_block > endTime):
207 207 return None
208 208
209 209 if (thisTime_last_block < startTime) and (thisTime_first_block > endTime):
210 210 return None
211 211
212 212 return thisDatetime
213 213
214 214
215 215 def isFolderInDateRange(folder, startDate=None, endDate=None):
216 216 """
217 217 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
218 218
219 219 Inputs:
220 220 folder : nombre completo del directorio.
221 221 Su formato deberia ser "/path_root/?YYYYDDD"
222 222
223 223 siendo:
224 224 YYYY : Anio (ejemplo 2015)
225 225 DDD : Dia del anio (ejemplo 305)
226 226
227 227 startDate : fecha inicial del rango seleccionado en formato datetime.date
228 228
229 229 endDate : fecha final del rango seleccionado en formato datetime.date
230 230
231 231 Return:
232 232 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
233 233 fecha especificado, de lo contrario retorna False.
234 234 Excepciones:
235 235 Si el directorio no tiene el formato adecuado
236 236 """
237 237
238 238 basename = os.path.basename(folder)
239 239
240 240 if not isRadarFolder(basename):
241 241 print("The folder %s has not the rigth format" % folder)
242 242 return 0
243 243
244 244 if startDate and endDate:
245 245 thisDate = getDateFromRadarFolder(basename)
246 246
247 247 if thisDate < startDate:
248 248 return 0
249 249
250 250 if thisDate > endDate:
251 251 return 0
252 252
253 253 return 1
254 254
255 255
256 256 def isFileInDateRange(filename, startDate=None, endDate=None):
257 257 """
258 258 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
259 259
260 260 Inputs:
261 261 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
262 262
263 263 Su formato deberia ser "?YYYYDDDsss"
264 264
265 265 siendo:
266 266 YYYY : Anio (ejemplo 2015)
267 267 DDD : Dia del anio (ejemplo 305)
268 268 sss : set
269 269
270 270 startDate : fecha inicial del rango seleccionado en formato datetime.date
271 271
272 272 endDate : fecha final del rango seleccionado en formato datetime.date
273 273
274 274 Return:
275 275 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
276 276 fecha especificado, de lo contrario retorna False.
277 277 Excepciones:
278 278 Si el archivo no tiene el formato adecuado
279 279 """
280 280
281 281 basename = os.path.basename(filename)
282 282
283 283 if not isRadarFile(basename):
284 284 print("The filename %s has not the rigth format" % filename)
285 285 return 0
286 286
287 287 if startDate and endDate:
288 288 thisDate = getDateFromRadarFile(basename)
289 289
290 290 if thisDate < startDate:
291 291 return 0
292 292
293 293 if thisDate > endDate:
294 294 return 0
295 295
296 296 return 1
297 297
298 298
299 299 def getFileFromSet(path, ext, set):
300 300 validFilelist = []
301 301 fileList = os.listdir(path)
302 302
303 303 # 0 1234 567 89A BCDE
304 304 # H YYYY DDD SSS .ext
305 305
306 306 for thisFile in fileList:
307 307 try:
308 308 year = int(thisFile[1:5])
309 309 doy = int(thisFile[5:8])
310 310 except:
311 311 continue
312 312
313 313 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
314 314 continue
315 315
316 316 validFilelist.append(thisFile)
317 317
318 318 myfile = fnmatch.filter(
319 319 validFilelist, '*%4.4d%3.3d%3.3d*' % (year, doy, set))
320 320
321 321 if len(myfile) != 0:
322 322 return myfile[0]
323 323 else:
324 324 filename = '*%4.4d%3.3d%3.3d%s' % (year, doy, set, ext.lower())
325 325 print('the filename %s does not exist' % filename)
326 326 print('...going to the last file: ')
327 327
328 328 if validFilelist:
329 329 validFilelist = sorted(validFilelist, key=str.lower)
330 330 return validFilelist[-1]
331 331
332 332 return None
333 333
334 334
335 335 def getlastFileFromPath(path, ext):
336 336 """
337 337 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
338 338 al final de la depuracion devuelve el ultimo file de la lista que quedo.
339 339
340 340 Input:
341 341 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
342 342 ext : extension de los files contenidos en una carpeta
343 343
344 344 Return:
345 345 El ultimo file de una determinada carpeta, no se considera el path.
346 346 """
347 347 validFilelist = []
348 348 fileList = os.listdir(path)
349 349
350 350 # 0 1234 567 89A BCDE
351 351 # H YYYY DDD SSS .ext
352 352
353 353 for thisFile in fileList:
354 354
355 355 year = thisFile[1:5]
356 356 if not isNumber(year):
357 357 continue
358 358
359 359 doy = thisFile[5:8]
360 360 if not isNumber(doy):
361 361 continue
362 362
363 363 year = int(year)
364 364 doy = int(doy)
365 365
366 366 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
367 367 continue
368 368
369 369 validFilelist.append(thisFile)
370 370
371 371 if validFilelist:
372 372 validFilelist = sorted(validFilelist, key=str.lower)
373 373 return validFilelist[-1]
374 374
375 375 return None
376 376
377 377
378 378 def isRadarFolder(folder):
379 379 try:
380 380 year = int(folder[1:5])
381 381 doy = int(folder[5:8])
382 382 except:
383 383 return 0
384 384
385 385 return 1
386 386
387 387
388 388 def isRadarFile(file):
389 389 try:
390 390 year = int(file[1:5])
391 391 doy = int(file[5:8])
392 392 set = int(file[8:11])
393 393 except:
394 394 return 0
395 395
396 396 return 1
397 397
398 398
399 399 def getDateFromRadarFile(file):
400 400 try:
401 401 year = int(file[1:5])
402 402 doy = int(file[5:8])
403 403 set = int(file[8:11])
404 404 except:
405 405 return None
406 406
407 407 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1)
408 408 return thisDate
409 409
410 410
411 411 def getDateFromRadarFolder(folder):
412 412 try:
413 413 year = int(folder[1:5])
414 414 doy = int(folder[5:8])
415 415 except:
416 416 return None
417 417
418 418 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1)
419 419 return thisDate
420 420
421 421 def parse_format(s, fmt):
422 422
423 423 for i in range(fmt.count('%')):
424 424 x = fmt.index('%')
425 425 d = DT_DIRECTIVES[fmt[x:x+2]]
426 426 fmt = fmt.replace(fmt[x:x+2], s[x:x+d])
427 427 return fmt
428 428
429 429 class Reader(object):
430 430
431 431 c = 3E8
432 432 isConfig = False
433 433 dtype = None
434 434 pathList = []
435 435 filenameList = []
436 436 datetimeList = []
437 437 filename = None
438 438 ext = None
439 439 flagIsNewFile = 1
440 440 flagDiscontinuousBlock = 0
441 441 flagIsNewBlock = 0
442 442 flagNoMoreFiles = 0
443 443 fp = None
444 444 firstHeaderSize = 0
445 445 basicHeaderSize = 24
446 446 versionFile = 1103
447 447 fileSize = None
448 448 fileSizeByHeader = None
449 449 fileIndex = -1
450 450 profileIndex = None
451 451 blockIndex = 0
452 452 nTotalBlocks = 0
453 453 maxTimeStep = 30
454 454 lastUTTime = None
455 455 datablock = None
456 456 dataOut = None
457 457 getByBlock = False
458 458 path = None
459 459 startDate = None
460 460 endDate = None
461 461 startTime = datetime.time(0, 0, 0)
462 462 endTime = datetime.time(23, 59, 59)
463 463 set = None
464 464 expLabel = ""
465 465 online = False
466 466 delay = 60
467 467 nTries = 3 # quantity tries
468 468 nFiles = 3 # number of files for searching
469 469 walk = True
470 470 getblock = False
471 471 nTxs = 1
472 472 realtime = False
473 473 blocksize = 0
474 474 blocktime = None
475 475 warnings = True
476 476 verbose = True
477 477 server = None
478 478 topic = None
479 479 format = None
480 480 oneDDict = None
481 481 twoDDict = None
482 482 independentParam = None
483 483 filefmt = None
484 484 folderfmt = None
485 485 open_file = open
486 486 open_mode = 'rb'
487 487
488 488 def run(self):
489 489
490 490 raise NotImplementedError
491 491
492 492 def getAllowedArgs(self):
493 493 if hasattr(self, '__attrs__'):
494 494 return self.__attrs__
495 495 else:
496 496 return inspect.getargspec(self.run).args
497 497
498 498 def set_kwargs(self, **kwargs):
499 499
500 500 for key, value in kwargs.items():
501 501 setattr(self, key, value)
502 502
503 503 def find_folders(self, path, startDate, endDate, folderfmt, last=False):
504 504
505 505 folders = [x for f in path.split(',')
506 506 for x in os.listdir(f) if os.path.isdir(os.path.join(f, x))]
507 507 folders.sort()
508 508
509 509 if last:
510 510 folders = [folders[-1]]
511 511
512 512 for folder in folders:
513 513 try:
514 514 dt = datetime.datetime.strptime(parse_format(folder, folderfmt), folderfmt).date()
515 515 if dt >= startDate and dt <= endDate:
516 516 yield os.path.join(path, folder)
517 517 else:
518 518 log.log('Skiping folder {}'.format(folder), self.name)
519 519 except Exception as e:
520 520 log.log('Skiping folder {}'.format(folder), self.name)
521 521 continue
522 522 return
523 523
524 524 def find_files(self, folders, ext, filefmt, startDate=None, endDate=None,
525 525 expLabel='', last=False):
526 526 for path in folders:
527 527 files = glob.glob1(path+'/'+expLabel, '*{}'.format(ext))
528 528 files.sort()
529 529 if last:
530 530 if files:
531 531 fo = files[-1]
532 532 try:
533 533 dt = datetime.datetime.strptime(parse_format(fo, filefmt), filefmt).date()
534 534 yield os.path.join(path, expLabel, fo)
535 535 except Exception as e:
536 536 pass
537 537 return
538 538 else:
539 539 return
540 540
541 541 for fo in files:
542 542 try:
543 543 dt = datetime.datetime.strptime(parse_format(fo, filefmt), filefmt).date()
544 544 #print(dt)
545 545 #print(startDate)
546 546 #print(endDate)
547 547 if dt >= startDate and dt <= endDate:
548 548
549 549 yield os.path.join(path, expLabel, fo)
550 550
551 551 else:
552 552
553 553 log.log('Skiping file {}'.format(fo), self.name)
554 554 except Exception as e:
555 555 log.log('Skiping file {}'.format(fo), self.name)
556 556 continue
557 557
558 558 def searchFilesOffLine(self, path, startDate, endDate,
559 559 expLabel, ext, walk,
560 560 filefmt, folderfmt):
561 561 """Search files in offline mode for the given arguments
562 562
563 563 Return:
564 564 Generator of files
565 565 """
566 566
567 567 if walk:
568 568 folders = self.find_folders(
569 569 path, startDate, endDate, folderfmt)
570 570 #print("folders: ", folders)
571 571 else:
572 572 folders = path.split(',')
573 573
574 574 return self.find_files(
575 575 folders, ext, filefmt, startDate, endDate, expLabel)
576 576
577 577 def searchFilesOnLine(self, path, startDate, endDate,
578 578 expLabel, ext, walk,
579 579 filefmt, folderfmt):
580 580 """Search for the last file of the last folder
581 581
582 582 Arguments:
583 583 path : carpeta donde estan contenidos los files que contiene data
584 584 expLabel : Nombre del subexperimento (subfolder)
585 585 ext : extension de los files
586 586 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
587 587
588 588 Return:
589 589 generator with the full path of last filename
590 590 """
591 591
592 592 if walk:
593 593 folders = self.find_folders(
594 594 path, startDate, endDate, folderfmt, last=True)
595 595 else:
596 596 folders = path.split(',')
597 597
598 598 return self.find_files(
599 599 folders, ext, filefmt, startDate, endDate, expLabel, last=True)
600 600
601 601 def setNextFile(self):
602 602 """Set the next file to be readed open it and parse de file header"""
603 603
604 604 #print("fp: ",self.fp)
605 605 while True:
606 606
607 607 #print(self.fp)
608 608 if self.fp != None:
609 609 self.fp.close()
610 610
611 611 #print("setNextFile")
612 612 #print("BEFORE OPENING",self.filename)
613 613 if self.online:
614 614 newFile = self.setNextFileOnline()
615 615
616 616 else:
617 617
618 618 newFile = self.setNextFileOffline()
619 619
620 620 #print("newFile: ",newFile)
621 621 if not(newFile):
622 622
623 623 if self.online:
624 624 raise schainpy.admin.SchainError('Time to wait for new files reach')
625 625 else:
626 626 if self.fileIndex == -1:
627 627 #print("OKK")
628 628 raise schainpy.admin.SchainWarning('No files found in the given path')
629 629 else:
630 630
631 631 raise schainpy.admin.SchainWarning('No more files to read')
632 632
633 633 if self.verifyFile(self.filename):
634 634
635 635 break
636 636
637 637 ##print("BEFORE OPENING",self.filename)
638 638
639 639 log.log('Opening file: %s' % self.filename, self.name)
640 640
641 641 self.readFirstHeader()
642 642 self.nReadBlocks = 0
643 643
644 644 def setNextFileOnline(self):
645 645 """Check for the next file to be readed in online mode.
646 646
647 647 Set:
648 648 self.filename
649 649 self.fp
650 650 self.filesize
651 651
652 652 Return:
653 653 boolean
654 654
655 655 """
656 656
657 657 nextFile = True
658 658 nextDay = False
659 659
660 660 for nFiles in range(self.nFiles+1):
661 661 for nTries in range(self.nTries):
662 662 fullfilename, filename = self.checkForRealPath(nextFile, nextDay)
663 663 if fullfilename is not None:
664 664 break
665 665 log.warning(
666 666 "Waiting %0.2f sec for the next file: \"%s\" , try %02d ..." % (self.delay, filename, nTries + 1),
667 667 self.name)
668 668 time.sleep(self.delay)
669 669 nextFile = False
670 670 continue
671 671
672 672 if fullfilename is not None:
673 673 break
674 674
675 675 #self.nTries = 1
676 676 nextFile = True
677 677
678 678 if nFiles == (self.nFiles - 1):
679 679 log.log('Trying with next day...', self.name)
680 680 nextDay = True
681 681 self.nTries = 3
682 682
683 683 if fullfilename:
684 684 self.fileSize = os.path.getsize(fullfilename)
685 685 self.filename = fullfilename
686 686 self.flagIsNewFile = 1
687 687 if self.fp != None:
688 688 self.fp.close()
689 689 #print(fullfilename)
690 690 self.fp = self.open_file(fullfilename, self.open_mode)
691 691
692 692 self.flagNoMoreFiles = 0
693 693 self.fileIndex += 1
694 694 return 1
695 695 else:
696 696 return 0
697 697
698 698 def setNextFileOffline(self):
699 699 """Open the next file to be readed in offline mode"""
700 700
701 701 try:
702 702 filename = next(self.filenameList)
703 703 self.fileIndex +=1
704 704 except StopIteration:
705 705 self.flagNoMoreFiles = 1
706 706 return 0
707 707 #print(self.fileIndex)
708 708 #print(filename)
709 709 self.filename = filename
710 710 self.fileSize = os.path.getsize(filename)
711 711 self.fp = self.open_file(filename, self.open_mode)
712 712 self.flagIsNewFile = 1
713 713
714 714 return 1
715 715
716 716 @staticmethod
717 717 def isDateTimeInRange(dt, startDate, endDate, startTime, endTime):
718 718 """Check if the given datetime is in range"""
719 719
720 720 if startDate <= dt.date() <= endDate:
721 721 if startTime <= dt.time() <= endTime:
722 722 return True
723 723 return False
724 724
725 725 def verifyFile(self, filename):
726 726 """Check for a valid file
727 727
728 728 Arguments:
729 729 filename -- full path filename
730 730
731 731 Return:
732 732 boolean
733 733 """
734 734
735 735 return True
736 736
737 737 def checkForRealPath(self, nextFile, nextDay):
738 738 """Check if the next file to be readed exists"""
739 739
740 740 raise NotImplementedError
741 741
742 742 def readFirstHeader(self):
743 743 """Parse the file header"""
744 744
745 745
746 746 pass
747 747
748 748 def waitDataBlock(self, pointer_location, blocksize=None):
749 749 """
750 750 """
751 751
752 752 currentPointer = pointer_location
753 753 if blocksize is None:
754 754 neededSize = self.processingHeaderObj.blockSize # + self.basicHeaderSize
755 755 else:
756 756 neededSize = blocksize
757 757
758 758 for nTries in range(self.nTries):
759 759 self.fp.close()
760 760 self.fp = open(self.filename, 'rb')
761 761 self.fp.seek(currentPointer)
762 762
763 763 self.fileSize = os.path.getsize(self.filename)
764 764 currentSize = self.fileSize - currentPointer
765 765
766 766 if (currentSize >= neededSize):
767 767 return 1
768 768
769 769 log.warning(
770 770 "Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1),
771 771 self.name
772 772 )
773 773 time.sleep(self.delay)
774 774
775 775 return 0
776 776
777 777 class JRODataReader(Reader):
778 778
779 779 utc = 0
780 780 nReadBlocks = 0
781 781 foldercounter = 0
782 782 firstHeaderSize = 0
783 783 basicHeaderSize = 24
784 784 __isFirstTimeOnline = 1
785 785 topic = ''
786 786 filefmt = "*%Y%j***"
787 787 folderfmt = "*%Y%j"
788 788 __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'online', 'delay', 'walk']
789 789
790 790 def getDtypeWidth(self):
791 791
792 792 dtype_index = get_dtype_index(self.dtype)
793 793 dtype_width = get_dtype_width(dtype_index)
794 794
795 795 return dtype_width
796 796
797 797 def checkForRealPath(self, nextFile, nextDay):
798 798 """Check if the next file to be readed exists.
799 799
800 800 Example :
801 801 nombre correcto del file es .../.../D2009307/P2009307367.ext
802 802
803 803 Entonces la funcion prueba con las siguientes combinaciones
804 804 .../.../y2009307367.ext
805 805 .../.../Y2009307367.ext
806 806 .../.../x2009307/y2009307367.ext
807 807 .../.../x2009307/Y2009307367.ext
808 808 .../.../X2009307/y2009307367.ext
809 809 .../.../X2009307/Y2009307367.ext
810 810 siendo para este caso, la ultima combinacion de letras, identica al file buscado
811 811
812 812 Return:
813 813 str -- fullpath of the file
814 814 """
815 815
816 816
817 817 if nextFile:
818 818 self.set += 1
819 819 if nextDay:
820 820 self.set = 0
821 821 self.doy += 1
822 822 foldercounter = 0
823 823 prefixDirList = [None, 'd', 'D']
824 824 if self.ext.lower() == ".r": # voltage
825 825 prefixFileList = ['d', 'D']
826 826 elif self.ext.lower() == ".pdata": # spectra
827 827 prefixFileList = ['p', 'P']
828 828
829 829 ##############DP##############
830 830
831 831 elif self.ext.lower() == ".dat": # dat
832 832 prefixFileList = ['z', 'Z']
833 833
834 834
835 835
836 836 ##############DP##############
837 837 # barrido por las combinaciones posibles
838 838 for prefixDir in prefixDirList:
839 839 thispath = self.path
840 840 if prefixDir != None:
841 841 # formo el nombre del directorio xYYYYDDD (x=d o x=D)
842 842 if foldercounter == 0:
843 843 thispath = os.path.join(self.path, "%s%04d%03d" %
844 844 (prefixDir, self.year, self.doy))
845 845 else:
846 846 thispath = os.path.join(self.path, "%s%04d%03d_%02d" % (
847 847 prefixDir, self.year, self.doy, foldercounter))
848 848 for prefixFile in prefixFileList: # barrido por las dos combinaciones posibles de "D"
849 849 # formo el nombre del file xYYYYDDDSSS.ext
850 850 filename = "%s%04d%03d%03d%s" % (prefixFile, self.year, self.doy, self.set, self.ext)
851 851 fullfilename = os.path.join(
852 852 thispath, filename)
853 853
854 854 if os.path.exists(fullfilename):
855 855 return fullfilename, filename
856 856
857 857 return None, filename
858 858
859 859 def __waitNewBlock(self):
860 860 """
861 861 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
862 862
863 863 Si el modo de lectura es OffLine siempre retorn 0
864 864 """
865 865 if not self.online:
866 866 return 0
867 867
868 868 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
869 869 return 0
870 870
871 871 currentPointer = self.fp.tell()
872 872
873 873 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
874 874
875 875 for nTries in range(self.nTries):
876 876
877 877 self.fp.close()
878 878 self.fp = open(self.filename, 'rb')
879 879 self.fp.seek(currentPointer)
880 880
881 881 self.fileSize = os.path.getsize(self.filename)
882 882 currentSize = self.fileSize - currentPointer
883 883
884 884 if (currentSize >= neededSize):
885 885 self.basicHeaderObj.read(self.fp)
886 886 return 1
887 887
888 888 if self.fileSize == self.fileSizeByHeader:
889 889 # self.flagEoF = True
890 890 return 0
891 891
892 892 print("[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1))
893 893 #print(self.filename)
894 894 time.sleep(self.delay)
895 895
896 896 return 0
897 897
898 898 def __setNewBlock(self):
899 899
900 900 if self.fp == None:
901 901 return 0
902 902
903 903 if self.flagIsNewFile:
904 904 self.lastUTTime = self.basicHeaderObj.utc
905 905 return 1
906 906
907 907 if self.realtime:
908 908 self.flagDiscontinuousBlock = 1
909 909 if not(self.setNextFile()):
910 910 return 0
911 911 else:
912 912 return 1
913 913
914 914 currentSize = self.fileSize - self.fp.tell()
915 915 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
916 916
917 917 if (currentSize >= neededSize):
918 918 self.basicHeaderObj.read(self.fp)
919 919 self.lastUTTime = self.basicHeaderObj.utc
920 920 return 1
921 921
922 922 if self.__waitNewBlock():
923 923 self.lastUTTime = self.basicHeaderObj.utc
924 924 return 1
925 925
926 926 if not(self.setNextFile()):
927 927 return 0
928 928
929 929 deltaTime = self.basicHeaderObj.utc - self.lastUTTime
930 930 self.lastUTTime = self.basicHeaderObj.utc
931 931
932 932 self.flagDiscontinuousBlock = 0
933 933 if deltaTime > self.maxTimeStep:
934 934 self.flagDiscontinuousBlock = 1
935 935
936 936 return 1
937 937
938 938 def readNextBlock(self):
939 939
940 940 while True:
941 941 if not(self.__setNewBlock()):
942 942 continue
943 943
944 944 if not(self.readBlock()):
945 945 return 0
946 946
947 947 self.getBasicHeader()
948 948
949 949 if not self.isDateTimeInRange(self.dataOut.datatime, self.startDate, self.endDate, self.startTime, self.endTime):
950 950 print("[Reading] Block No. %d/%d -> %s [Skipping]" % (self.nReadBlocks,
951 951 self.processingHeaderObj.dataBlocksPerFile,
952 952 self.dataOut.datatime.ctime()))
953 953 continue
954 954
955 955 break
956 956
957 957 if self.verbose:
958 958 print("[Reading] Block No. %d/%d -> %s" % (self.nReadBlocks,
959 959 self.processingHeaderObj.dataBlocksPerFile,
960 960 self.dataOut.datatime.ctime()))
961 961 #################DP#################
962 962 self.dataOut.TimeBlockDate=self.dataOut.datatime.ctime()
963 963 self.dataOut.TimeBlockSeconds=time.mktime(time.strptime(self.dataOut.datatime.ctime()))
964 964 #################DP#################
965 965 return 1
966 966
967 967 def readFirstHeader(self):
968 968
969 969 self.basicHeaderObj.read(self.fp)
970 970 self.systemHeaderObj.read(self.fp)
971 971 self.radarControllerHeaderObj.read(self.fp)
972 972 self.processingHeaderObj.read(self.fp)
973 973 self.firstHeaderSize = self.basicHeaderObj.size
974 974
975 975 datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
976 976 PROCFLAG.DATATYPE_MASK)) - numpy.log2(PROCFLAG.DATATYPE_CHAR))
977 977 if datatype == 0:
978 978 datatype_str = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
979 979 elif datatype == 1:
980 980 datatype_str = numpy.dtype([('real', '<i2'), ('imag', '<i2')])
981 981 elif datatype == 2:
982 982 datatype_str = numpy.dtype([('real', '<i4'), ('imag', '<i4')])
983 983 elif datatype == 3:
984 984 datatype_str = numpy.dtype([('real', '<i8'), ('imag', '<i8')])
985 985 elif datatype == 4:
986 986 datatype_str = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
987 987 elif datatype == 5:
988 988 datatype_str = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
989 989 else:
990 990 raise ValueError('Data type was not defined')
991 991
992 992 self.dtype = datatype_str
993 993 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
994 994 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + \
995 995 self.firstHeaderSize + self.basicHeaderSize * \
996 996 (self.processingHeaderObj.dataBlocksPerFile - 1)
997 997 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
998 998 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
999 999 self.getBlockDimension()
1000 1000
1001 1001 def verifyFile(self, filename):
1002 1002
1003 1003 flag = True
1004 1004
1005 1005 try:
1006 1006 fp = open(filename, 'rb')
1007 1007 except IOError:
1008 1008 log.error("File {} can't be opened".format(filename), self.name)
1009 1009 return False
1010 1010
1011 1011 if self.online and self.waitDataBlock(0):
1012 1012 pass
1013 1013
1014 1014 basicHeaderObj = BasicHeader(LOCALTIME)
1015 1015 systemHeaderObj = SystemHeader()
1016 1016 radarControllerHeaderObj = RadarControllerHeader()
1017 1017 processingHeaderObj = ProcessingHeader()
1018 1018
1019 1019 if not(basicHeaderObj.read(fp)):
1020 1020 flag = False
1021 1021 if not(systemHeaderObj.read(fp)):
1022 1022 flag = False
1023 1023 if not(radarControllerHeaderObj.read(fp)):
1024 1024 flag = False
1025 1025 if not(processingHeaderObj.read(fp)):
1026 1026 flag = False
1027 1027 if not self.online:
1028 1028 dt1 = basicHeaderObj.datatime
1029 1029 pos = self.fileSize-processingHeaderObj.blockSize-24
1030 1030 if pos<0:
1031 1031 flag = False
1032 1032 log.error('Invalid size for file: {}'.format(self.filename), self.name)
1033 1033 else:
1034 1034 fp.seek(pos)
1035 1035 if not(basicHeaderObj.read(fp)):
1036 1036 flag = False
1037 1037 dt2 = basicHeaderObj.datatime
1038 1038 if not self.isDateTimeInRange(dt1, self.startDate, self.endDate, self.startTime, self.endTime) and not \
1039 1039 self.isDateTimeInRange(dt2, self.startDate, self.endDate, self.startTime, self.endTime):
1040 1040 flag = False
1041 1041
1042 1042 fp.close()
1043 1043 return flag
1044 1044
1045 1045 def findDatafiles(self, path, startDate=None, endDate=None, expLabel='', ext='.r', walk=True, include_path=False):
1046 1046
1047 1047 path_empty = True
1048 1048
1049 1049 dateList = []
1050 1050 pathList = []
1051 1051
1052 1052 multi_path = path.split(',')
1053 1053
1054 1054 if not walk:
1055 1055
1056 1056 for single_path in multi_path:
1057 1057
1058 1058 if not os.path.isdir(single_path):
1059 1059 continue
1060 1060
1061 1061 fileList = glob.glob1(single_path, "*" + ext)
1062 1062
1063 1063 if not fileList:
1064 1064 continue
1065 1065
1066 1066 path_empty = False
1067 1067
1068 1068 fileList.sort()
1069 1069
1070 1070 for thisFile in fileList:
1071 1071
1072 1072 if not os.path.isfile(os.path.join(single_path, thisFile)):
1073 1073 continue
1074 1074
1075 1075 if not isRadarFile(thisFile):
1076 1076 continue
1077 1077
1078 1078 if not isFileInDateRange(thisFile, startDate, endDate):
1079 1079 continue
1080 1080
1081 1081 thisDate = getDateFromRadarFile(thisFile)
1082 1082
1083 1083 if thisDate in dateList or single_path in pathList:
1084 1084 continue
1085 1085
1086 1086 dateList.append(thisDate)
1087 1087 pathList.append(single_path)
1088 1088
1089 1089 else:
1090 1090 for single_path in multi_path:
1091 1091
1092 1092 if not os.path.isdir(single_path):
1093 1093 continue
1094 1094
1095 1095 dirList = []
1096 1096
1097 1097 for thisPath in os.listdir(single_path):
1098 1098
1099 1099 if not os.path.isdir(os.path.join(single_path, thisPath)):
1100 1100 continue
1101 1101
1102 1102 if not isRadarFolder(thisPath):
1103 1103 continue
1104 1104
1105 1105 if not isFolderInDateRange(thisPath, startDate, endDate):
1106 1106 continue
1107 1107
1108 1108 dirList.append(thisPath)
1109 1109
1110 1110 if not dirList:
1111 1111 continue
1112 1112
1113 1113 dirList.sort()
1114 1114
1115 1115 for thisDir in dirList:
1116 1116
1117 1117 datapath = os.path.join(single_path, thisDir, expLabel)
1118 1118 fileList = glob.glob1(datapath, "*" + ext)
1119 1119
1120 1120 if not fileList:
1121 1121 continue
1122 1122
1123 1123 path_empty = False
1124 1124
1125 1125 thisDate = getDateFromRadarFolder(thisDir)
1126 1126
1127 1127 pathList.append(datapath)
1128 1128 dateList.append(thisDate)
1129 1129
1130 1130 dateList.sort()
1131 1131
1132 1132 if walk:
1133 1133 pattern_path = os.path.join(multi_path[0], "[dYYYYDDD]", expLabel)
1134 1134 else:
1135 1135 pattern_path = multi_path[0]
1136 1136
1137 1137 if path_empty:
1138 1138 raise schainpy.admin.SchainError("[Reading] No *%s files in %s for %s to %s" % (ext, pattern_path, startDate, endDate))
1139 1139 else:
1140 1140 if not dateList:
1141 1141 raise schainpy.admin.SchainError("[Reading] Date range selected invalid [%s - %s]: No *%s files in %s)" % (startDate, endDate, ext, path))
1142 1142
1143 1143 if include_path:
1144 1144 return dateList, pathList
1145 1145
1146 1146 return dateList
1147 1147
1148 1148 def setup(self, **kwargs):
1149 1149
1150 1150 self.set_kwargs(**kwargs)
1151 1151 if not self.ext.startswith('.'):
1152 1152 self.ext = '.{}'.format(self.ext)
1153 1153
1154 1154 if self.server is not None:
1155 1155 if 'tcp://' in self.server:
1156 1156 address = self.server
1157 1157 else:
1158 1158 address = 'ipc:///tmp/%s' % self.server
1159 1159 self.server = address
1160 1160 self.context = zmq.Context()
1161 1161 self.receiver = self.context.socket(zmq.SUB)
1162 1162 self.receiver.connect(self.server)
1163 1163 self.receiver.setsockopt(zmq.SUBSCRIBE, str.encode(str(self.topic)))
1164 1164 time.sleep(0.5)
1165 1165 print('[Starting] ReceiverData from {}'.format(self.server))
1166 1166 else:
1167 1167 self.server = None
1168 1168 if self.path == None:
1169 1169 raise ValueError("[Reading] The path is not valid")
1170 1170
1171 1171 if self.online:
1172 1172 log.log("[Reading] Searching files in online mode...", self.name)
1173 1173
1174 1174 for nTries in range(self.nTries):
1175 1175 fullpath = self.searchFilesOnLine(self.path, self.startDate,
1176 1176 self.endDate, self.expLabel, self.ext, self.walk,
1177 1177 self.filefmt, self.folderfmt)
1178 1178
1179 1179 try:
1180 1180 fullpath = next(fullpath)
1181 1181 except:
1182 1182 fullpath = None
1183 1183
1184 1184 if fullpath:
1185 1185 break
1186 1186
1187 1187 log.warning(
1188 1188 'Waiting {} sec for a valid file in {}: try {} ...'.format(
1189 1189 self.delay, self.path, nTries + 1),
1190 1190 self.name)
1191 1191 time.sleep(self.delay)
1192 1192
1193 1193 if not(fullpath):
1194 1194 raise schainpy.admin.SchainError(
1195 1195 'There isn\'t any valid file in {}'.format(self.path))
1196 1196
1197 1197 pathname, filename = os.path.split(fullpath)
1198 1198 self.year = int(filename[1:5])
1199 1199 self.doy = int(filename[5:8])
1200 1200 self.set = int(filename[8:11]) - 1
1201 1201 else:
1202 1202 log.log("Searching files in {}".format(self.path), self.name)
1203 1203 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
1204 1204 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
1205 1205
1206 1206 self.setNextFile()
1207 1207
1208 1208 return
1209 1209
1210 1210 def getBasicHeader(self):
1211 1211
1212 1212 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond / \
1213 1213 1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds
1214 1214
1215 1215 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
1216 1216
1217 1217 self.dataOut.timeZone = self.basicHeaderObj.timeZone
1218 1218
1219 1219 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
1220 1220
1221 1221 self.dataOut.errorCount = self.basicHeaderObj.errorCount
1222 1222
1223 1223 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
1224 1224
1225 1225 self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds / self.nTxs
1226 1226
1227 1227 def getFirstHeader(self):
1228 1228
1229 1229 raise NotImplementedError
1230 1230
1231 1231 def getData(self):
1232 1232
1233 1233 raise NotImplementedError
1234 1234
1235 1235 def hasNotDataInBuffer(self):
1236 1236
1237 1237 raise NotImplementedError
1238 1238
1239 1239 def readBlock(self):
1240 1240
1241 1241 raise NotImplementedError
1242 1242
1243 1243 def isEndProcess(self):
1244 1244
1245 1245 return self.flagNoMoreFiles
1246 1246
1247 1247 def printReadBlocks(self):
1248 1248
1249 1249 print("[Reading] Number of read blocks per file %04d" % self.nReadBlocks)
1250 1250
1251 1251 def printTotalBlocks(self):
1252 1252
1253 1253 print("[Reading] Number of read blocks %04d" % self.nTotalBlocks)
1254 1254
1255 1255 def run(self, **kwargs):
1256 1256 """
1257 1257
1258 1258 Arguments:
1259 1259 path :
1260 1260 startDate :
1261 1261 endDate :
1262 1262 startTime :
1263 1263 endTime :
1264 1264 set :
1265 1265 expLabel :
1266 1266 ext :
1267 1267 online :
1268 1268 delay :
1269 1269 walk :
1270 1270 getblock :
1271 1271 nTxs :
1272 1272 realtime :
1273 1273 blocksize :
1274 1274 blocktime :
1275 1275 skip :
1276 1276 cursor :
1277 1277 warnings :
1278 1278 server :
1279 1279 verbose :
1280 1280 format :
1281 1281 oneDDict :
1282 1282 twoDDict :
1283 1283 independentParam :
1284 1284 """
1285 1285
1286 1286 if not(self.isConfig):
1287 1287 self.setup(**kwargs)
1288 1288 self.isConfig = True
1289 1289 if self.server is None:
1290 1290 self.getData()
1291 1291 else:
1292 1292 try:
1293 1293 self.getFromServer()
1294 1294 except Exception as e:
1295 1295 log.warning('Invalid block...')
1296 1296 self.dataOut.flagNoData = True
1297 1297
1298 1298
1299 1299 class JRODataWriter(Reader):
1300 1300
1301 1301 """
1302 1302 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
1303 1303 de los datos siempre se realiza por bloques.
1304 1304 """
1305 1305
1306 1306 setFile = None
1307 1307 profilesPerBlock = None
1308 1308 blocksPerFile = None
1309 1309 nWriteBlocks = 0
1310 1310 fileDate = None
1311 1311
1312 1312 def __init__(self, dataOut=None):
1313 1313 raise NotImplementedError
1314 1314
1315 1315 def hasAllDataInBuffer(self):
1316 1316 raise NotImplementedError
1317 1317
1318 1318 def setBlockDimension(self):
1319 1319 raise NotImplementedError
1320 1320
1321 1321 def writeBlock(self):
1322 1322 raise NotImplementedError
1323 1323
1324 1324 def putData(self):
1325 1325 raise NotImplementedError
1326 1326
1327 1327 def getDtypeWidth(self):
1328 1328
1329 1329 dtype_index = get_dtype_index(self.dtype)
1330 1330 dtype_width = get_dtype_width(dtype_index)
1331 1331
1332 1332 return dtype_width
1333 1333
1334 1334 def getProcessFlags(self):
1335 1335
1336 1336 processFlags = 0
1337 1337
1338 1338 dtype_index = get_dtype_index(self.dtype)
1339 1339 procflag_dtype = get_procflag_dtype(dtype_index)
1340 1340
1341 1341 processFlags += procflag_dtype
1342 1342
1343 1343 if self.dataOut.flagDecodeData:
1344 1344 processFlags += PROCFLAG.DECODE_DATA
1345 1345
1346 1346 if self.dataOut.flagDeflipData:
1347 1347 processFlags += PROCFLAG.DEFLIP_DATA
1348 1348
1349 1349 if self.dataOut.code is not None:
1350 1350 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1351 1351
1352 1352 if self.dataOut.nCohInt > 1:
1353 1353 processFlags += PROCFLAG.COHERENT_INTEGRATION
1354 1354
1355 1355 if self.dataOut.type == "Spectra":
1356 1356 if self.dataOut.nIncohInt > 1:
1357 1357 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
1358 1358
1359 1359 if self.dataOut.data_dc is not None:
1360 1360 processFlags += PROCFLAG.SAVE_CHANNELS_DC
1361 1361
1362 1362 if self.dataOut.flagShiftFFT:
1363 1363 processFlags += PROCFLAG.SHIFT_FFT_DATA
1364 1364
1365 1365 return processFlags
1366 1366
1367 1367 def setBasicHeader(self):
1368 1368
1369 1369 self.basicHeaderObj.size = self.basicHeaderSize # bytes
1370 1370 self.basicHeaderObj.version = self.versionFile
1371 1371 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1372 1372 utc = numpy.floor(self.dataOut.utctime)
1373 1373 milisecond = (self.dataOut.utctime - utc) * 1000.0
1374 1374 self.basicHeaderObj.utc = utc
1375 1375 self.basicHeaderObj.miliSecond = milisecond
1376 1376 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1377 1377 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1378 1378 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1379 1379
1380 1380 def setFirstHeader(self):
1381 1381 """
1382 1382 Obtiene una copia del First Header
1383 1383
1384 1384 Affected:
1385 1385
1386 1386 self.basicHeaderObj
1387 1387 self.systemHeaderObj
1388 1388 self.radarControllerHeaderObj
1389 1389 self.processingHeaderObj self.
1390 1390
1391 1391 Return:
1392 1392 None
1393 1393 """
1394 1394
1395 1395 raise NotImplementedError
1396 1396
1397 1397 def __writeFirstHeader(self):
1398 1398 """
1399 1399 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1400 1400
1401 1401 Affected:
1402 1402 __dataType
1403 1403
1404 1404 Return:
1405 1405 None
1406 1406 """
1407 1407
1408 1408 # CALCULAR PARAMETROS
1409 1409
1410 1410 sizeLongHeader = self.systemHeaderObj.size + \
1411 1411 self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1412 1412 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1413 1413
1414 1414 self.basicHeaderObj.write(self.fp)
1415 1415 self.systemHeaderObj.write(self.fp)
1416 1416 self.radarControllerHeaderObj.write(self.fp)
1417 1417 self.processingHeaderObj.write(self.fp)
1418 1418
1419 1419 def __setNewBlock(self):
1420 1420 """
1421 1421 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1422 1422
1423 1423 Return:
1424 1424 0 : si no pudo escribir nada
1425 1425 1 : Si escribio el Basic el First Header
1426 1426 """
1427 1427 if self.fp == None:
1428 1428 self.setNextFile()
1429 1429
1430 1430 if self.flagIsNewFile:
1431 1431 return 1
1432 1432
1433 1433 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1434 1434 self.basicHeaderObj.write(self.fp)
1435 1435 return 1
1436 1436
1437 1437 if not(self.setNextFile()):
1438 1438 return 0
1439 1439
1440 1440 return 1
1441 1441
1442 1442 def writeNextBlock(self):
1443 1443 """
1444 1444 Selecciona el bloque siguiente de datos y los escribe en un file
1445 1445
1446 1446 Return:
1447 1447 0 : Si no hizo pudo escribir el bloque de datos
1448 1448 1 : Si no pudo escribir el bloque de datos
1449 1449 """
1450 1450 if not(self.__setNewBlock()):
1451 1451 return 0
1452 1452
1453 1453 self.writeBlock()
1454 1454
1455 1455 print("[Writing] Block No. %d/%d" % (self.blockIndex,
1456 1456 self.processingHeaderObj.dataBlocksPerFile))
1457 1457
1458 1458 return 1
1459 1459
1460 1460 def setNextFile(self):
1461 1461 """Determina el siguiente file que sera escrito
1462 1462
1463 1463 Affected:
1464 1464 self.filename
1465 1465 self.subfolder
1466 1466 self.fp
1467 1467 self.setFile
1468 1468 self.flagIsNewFile
1469 1469
1470 1470 Return:
1471 1471 0 : Si el archivo no puede ser escrito
1472 1472 1 : Si el archivo esta listo para ser escrito
1473 1473 """
1474 1474 ext = self.ext
1475 1475 path = self.path
1476 1476
1477 1477 if self.fp != None:
1478 1478 self.fp.close()
1479 1479
1480 1480 timeTuple = time.localtime(self.dataOut.utctime)
1481 1481 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year, timeTuple.tm_yday)
1482 1482
1483 1483 fullpath = os.path.join(path, subfolder)
1484 1484 setFile = self.setFile
1485 1485
1486 1486 if not(os.path.exists(fullpath)):
1487 1487 os.makedirs(fullpath)
1488 1488 setFile = -1 # inicializo mi contador de seteo
1489 1489 else:
1490 1490 filesList = os.listdir(fullpath)
1491 1491 if len(filesList) > 0:
1492 1492 filesList = sorted(filesList, key=str.lower)
1493 1493 filen = filesList[-1]
1494 1494 # el filename debera tener el siguiente formato
1495 1495 # 0 1234 567 89A BCDE (hex)
1496 1496 # x YYYY DDD SSS .ext
1497 1497 if isNumber(filen[8:11]):
1498 1498 # inicializo mi contador de seteo al seteo del ultimo file
1499 1499 setFile = int(filen[8:11])
1500 1500 else:
1501 1501 setFile = -1
1502 1502 else:
1503 1503 setFile = -1 # inicializo mi contador de seteo
1504 1504
1505 1505 setFile += 1
1506 1506
1507 1507 # If this is a new day it resets some values
1508 1508 if self.dataOut.datatime.date() > self.fileDate:
1509 1509 setFile = 0
1510 1510 self.nTotalBlocks = 0
1511 1511
1512 1512 filen = '{}{:04d}{:03d}{:03d}{}'.format(
1513 1513 self.optchar, timeTuple.tm_year, timeTuple.tm_yday, setFile, ext)
1514 1514
1515 1515 filename = os.path.join(path, subfolder, filen)
1516 1516
1517 1517 fp = open(filename, 'wb')
1518 1518
1519 1519 self.blockIndex = 0
1520 1520 self.filename = filename
1521 1521 self.subfolder = subfolder
1522 1522 self.fp = fp
1523 1523 self.setFile = setFile
1524 1524 self.flagIsNewFile = 1
1525 1525 self.fileDate = self.dataOut.datatime.date()
1526 1526 self.setFirstHeader()
1527 1527
1528 1528 print('[Writing] Opening file: %s' % self.filename)
1529 1529
1530 1530 self.__writeFirstHeader()
1531 1531
1532 1532 return 1
1533 1533
1534 1534 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=None, ext=None, datatype=4):
1535 1535 """
1536 1536 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1537 1537
1538 1538 Inputs:
1539 1539 path : directory where data will be saved
1540 1540 profilesPerBlock : number of profiles per block
1541 1541 set : initial file set
1542 1542 datatype : An integer number that defines data type:
1543 1543 0 : int8 (1 byte)
1544 1544 1 : int16 (2 bytes)
1545 1545 2 : int32 (4 bytes)
1546 1546 3 : int64 (8 bytes)
1547 1547 4 : float32 (4 bytes)
1548 1548 5 : double64 (8 bytes)
1549 1549
1550 1550 Return:
1551 1551 0 : Si no realizo un buen seteo
1552 1552 1 : Si realizo un buen seteo
1553 1553 """
1554 1554
1555 1555 if ext == None:
1556 1556 ext = self.ext
1557 1557
1558 1558 self.ext = ext.lower()
1559 1559
1560 1560 self.path = path
1561 1561
1562 1562 if set is None:
1563 1563 self.setFile = -1
1564 1564 else:
1565 1565 self.setFile = set - 1
1566 1566
1567 1567 self.blocksPerFile = blocksPerFile
1568 1568 self.profilesPerBlock = profilesPerBlock
1569 1569 self.dataOut = dataOut
1570 1570 self.fileDate = self.dataOut.datatime.date()
1571 1571 self.dtype = self.dataOut.dtype
1572 1572
1573 1573 if datatype is not None:
1574 1574 self.dtype = get_numpy_dtype(datatype)
1575 1575
1576 1576 if not(self.setNextFile()):
1577 1577 print("[Writing] There isn't a next file")
1578 1578 return 0
1579 1579
1580 1580 self.setBlockDimension()
1581 1581
1582 1582 return 1
1583 1583
1584 def run(self, dataOut, path, blocksPerFile=100, profilesPerBlock=64, set=None, ext=None, datatype=4, **kwargs):
1584 def run(self, dataOut, path, blocksPerFile=100, profilesPerBlock=64, set=None, ext=None, datatype=4, flagManualHeader = False,**kwargs):
1585 self.flagManualHeader = flagManualHeader
1585 1586
1586 1587 if not(self.isConfig):
1587 1588
1588 1589 self.setup(dataOut, path, blocksPerFile, profilesPerBlock=profilesPerBlock,
1589 1590 set=set, ext=ext, datatype=datatype, **kwargs)
1590 1591 self.isConfig = True
1591 1592
1592 1593 self.dataOut = dataOut
1593 1594 self.putData()
1594 1595 return self.dataOut
1595 1596
1596 1597 @MPDecorator
1597 1598 class printInfo(Operation):
1598 1599
1599 1600 def __init__(self):
1600 1601
1601 1602 Operation.__init__(self)
1602 1603 self.__printInfo = True
1603 1604
1604 1605 def run(self, dataOut, headers = ['systemHeaderObj', 'radarControllerHeaderObj', 'processingHeaderObj']):
1605 1606 if self.__printInfo == False:
1606 1607 return
1607 1608
1608 1609 for header in headers:
1609 1610 if hasattr(dataOut, header):
1610 1611 obj = getattr(dataOut, header)
1611 1612 if hasattr(obj, 'printInfo'):
1612 1613 obj.printInfo()
1613 1614 else:
1614 1615 print(obj)
1615 1616 else:
1616 1617 log.warning('Header {} Not found in object'.format(header))
1617 1618
1618 1619 self.__printInfo = False
@@ -1,527 +1,537
1 1 '''
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6 import numpy
7 7
8 8 from schainpy.model.io.jroIO_base import LOCALTIME, JRODataReader, JRODataWriter
9 9 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
10 10 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
11 11 from schainpy.model.data.jrodata import Spectra
12 12 from schainpy.utils import log
13 13
14 14
15 15 class SpectraReader(JRODataReader, ProcessingUnit):
16 16 """
17 17 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
18 18 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
19 19 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
20 20
21 21 paresCanalesIguales * alturas * perfiles (Self Spectra)
22 22 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
23 23 canales * alturas (DC Channels)
24 24
25 25 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
26 26 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
27 27 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
28 28 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
29 29
30 30 Example:
31 31 dpath = "/home/myuser/data"
32 32
33 33 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
34 34
35 35 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
36 36
37 37 readerObj = SpectraReader()
38 38
39 39 readerObj.setup(dpath, startTime, endTime)
40 40
41 41 while(True):
42 42
43 43 readerObj.getData()
44 44
45 45 print readerObj.data_spc
46 46
47 47 print readerObj.data_cspc
48 48
49 49 print readerObj.data_dc
50 50
51 51 if readerObj.flagNoMoreFiles:
52 52 break
53 53
54 54 """
55 55
56 56 def __init__(self):#, **kwargs):
57 57 """
58 58 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
59 59
60 60 Inputs:
61 61 dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para
62 62 almacenar un perfil de datos cada vez que se haga un requerimiento
63 63 (getData). El perfil sera obtenido a partir del buffer de datos,
64 64 si el buffer esta vacio se hara un nuevo proceso de lectura de un
65 65 bloque de datos.
66 66 Si este parametro no es pasado se creara uno internamente.
67 67
68 68 Affected:
69 69 self.dataOut
70 70
71 71 Return : None
72 72 """
73 73
74 74 ProcessingUnit.__init__(self)
75 75
76 76 self.pts2read_SelfSpectra = 0
77 77 self.pts2read_CrossSpectra = 0
78 78 self.pts2read_DCchannels = 0
79 79 self.ext = ".pdata"
80 80 self.optchar = "P"
81 81 self.basicHeaderObj = BasicHeader(LOCALTIME)
82 82 self.systemHeaderObj = SystemHeader()
83 83 self.radarControllerHeaderObj = RadarControllerHeader()
84 84 self.processingHeaderObj = ProcessingHeader()
85 85 self.lastUTTime = 0
86 86 self.maxTimeStep = 30
87 87 self.dataOut = Spectra()
88 88 self.profileIndex = 1
89 89 self.nRdChannels = None
90 90 self.nRdPairs = None
91 91 self.rdPairList = []
92 92
93 93 def createObjByDefault(self):
94 94
95 95 dataObj = Spectra()
96 96
97 97 return dataObj
98 98
99 99 def __hasNotDataInBuffer(self):
100 100 return 1
101 101
102 102
103 103 def getBlockDimension(self):
104 104 """
105 105 Obtiene la cantidad de puntos a leer por cada bloque de datos
106 106
107 107 Affected:
108 108 self.nRdChannels
109 109 self.nRdPairs
110 110 self.pts2read_SelfSpectra
111 111 self.pts2read_CrossSpectra
112 112 self.pts2read_DCchannels
113 113 self.blocksize
114 114 self.dataOut.nChannels
115 115 self.dataOut.nPairs
116 116
117 117 Return:
118 118 None
119 119 """
120 120 self.nRdChannels = 0
121 121 self.nRdPairs = 0
122 122 self.rdPairList = []
123 123
124 124 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
125 125 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
126 126 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
127 127 else:
128 128 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
129 129 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
130 130
131 131 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
132 132
133 133 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
134 134 self.blocksize = self.pts2read_SelfSpectra
135 135
136 136 if self.processingHeaderObj.flag_cspc:
137 137 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
138 138 self.blocksize += self.pts2read_CrossSpectra
139 139
140 140 if self.processingHeaderObj.flag_dc:
141 141 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
142 142 self.blocksize += self.pts2read_DCchannels
143 143
144 144 def readBlock(self):
145 145 """
146 146 Lee el bloque de datos desde la posicion actual del puntero del archivo
147 147 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
148 148 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
149 149 es seteado a 0
150 150
151 151 Return: None
152 152
153 153 Variables afectadas:
154 154
155 155 self.flagIsNewFile
156 156 self.flagIsNewBlock
157 157 self.nTotalBlocks
158 158 self.data_spc
159 159 self.data_cspc
160 160 self.data_dc
161 161
162 162 Exceptions:
163 163 Si un bloque leido no es un bloque valido
164 164 """
165 165
166 166 fpointer = self.fp.tell()
167 167
168 168 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
169 169 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
170 170
171 171 if self.processingHeaderObj.flag_cspc:
172 172 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
173 173 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
174 174
175 175 if self.processingHeaderObj.flag_dc:
176 176 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
177 177 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
178 178
179 179 if not self.processingHeaderObj.shif_fft:
180 180 #desplaza a la derecha en el eje 2 determinadas posiciones
181 181 shift = int(self.processingHeaderObj.profilesPerBlock/2)
182 182 spc = numpy.roll( spc, shift , axis=2 )
183 183
184 184 if self.processingHeaderObj.flag_cspc:
185 185 #desplaza a la derecha en el eje 2 determinadas posiciones
186 186 cspc = numpy.roll( cspc, shift, axis=2 )
187 187
188 188 #Dimensions : nChannels, nProfiles, nSamples
189 189 spc = numpy.transpose( spc, (0,2,1) )
190 190 self.data_spc = spc
191 191
192 192 if self.processingHeaderObj.flag_cspc:
193 193 cspc = numpy.transpose( cspc, (0,2,1) )
194 194 self.data_cspc = cspc['real'] + cspc['imag']*1j
195 195 else:
196 196 self.data_cspc = None
197 197
198 198 if self.processingHeaderObj.flag_dc:
199 199 self.data_dc = dc['real'] + dc['imag']*1j
200 200 else:
201 201 self.data_dc = None
202 202
203 203 self.flagIsNewFile = 0
204 204 self.flagIsNewBlock = 1
205 205
206 206 self.nTotalBlocks += 1
207 207 self.nReadBlocks += 1
208 208
209 209 return 1
210 210
211 211 def getFirstHeader(self):
212 212
213 213 self.getBasicHeader()
214 214 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
215 215 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
216 216 self.dataOut.dtype = self.dtype
217 217 self.dataOut.pairsList = self.rdPairList
218 218 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
219 219 self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
220 220 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
221 221 self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
222 222 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
223 223 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
224 224 self.dataOut.channelList = list(range(self.systemHeaderObj.nChannels))
225 225 self.dataOut.flagShiftFFT = True #Data is always shifted
226 226 self.dataOut.flagDecodeData = self.processingHeaderObj.flag_decode #asumo q la data no esta decodificada
227 227 self.dataOut.flagDeflipData = self.processingHeaderObj.flag_deflip #asumo q la data esta sin flip
228 228
229 229 def getData(self):
230 230 """
231 231 First method to execute before "RUN" is called.
232 232
233 233 Copia el buffer de lectura a la clase "Spectra",
234 234 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
235 235 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
236 236
237 237 Return:
238 238 0 : Si no hay mas archivos disponibles
239 239 1 : Si hizo una buena copia del buffer
240 240
241 241 Affected:
242 242 self.dataOut
243 243 self.flagDiscontinuousBlock
244 244 self.flagIsNewBlock
245 245 """
246 246
247 247 if self.flagNoMoreFiles:
248 248 self.dataOut.flagNoData = True
249 249 return 0
250 250
251 251 self.flagDiscontinuousBlock = 0
252 252 self.flagIsNewBlock = 0
253 253
254 254 if self.__hasNotDataInBuffer():
255 255
256 256 if not( self.readNextBlock() ):
257 257 self.dataOut.flagNoData = True
258 258 return 0
259 259
260 260 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
261 261
262 262 if self.data_spc is None:
263 263 self.dataOut.flagNoData = True
264 264 return 0
265 265
266 266 self.getBasicHeader()
267 267 self.getFirstHeader()
268 268 self.dataOut.data_spc = self.data_spc
269 269 self.dataOut.data_cspc = self.data_cspc
270 270 self.dataOut.data_dc = self.data_dc
271 271 self.dataOut.flagNoData = False
272 272 self.dataOut.realtime = self.online
273 273
274 274 return self.dataOut.data_spc
275 275
276 276
277 277 @MPDecorator
278 278 class SpectraWriter(JRODataWriter, Operation):
279 279
280 280 """
281 281 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
282 282 de los datos siempre se realiza por bloques.
283 283 """
284 284
285 285 def __init__(self):
286 286 """
287 287 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
288 288
289 289 Affected:
290 290 self.dataOut
291 291 self.basicHeaderObj
292 292 self.systemHeaderObj
293 293 self.radarControllerHeaderObj
294 294 self.processingHeaderObj
295 295
296 296 Return: None
297 297 """
298 298
299 299 Operation.__init__(self)
300 300
301 301 self.ext = ".pdata"
302 302 self.optchar = "P"
303 303 self.shape_spc_Buffer = None
304 304 self.shape_cspc_Buffer = None
305 305 self.shape_dc_Buffer = None
306 306 self.data_spc = None
307 307 self.data_cspc = None
308 308 self.data_dc = None
309 309 self.setFile = None
310 310 self.noMoreFiles = 0
311 311 self.basicHeaderObj = BasicHeader(LOCALTIME)
312 312 self.systemHeaderObj = SystemHeader()
313 313 self.radarControllerHeaderObj = RadarControllerHeader()
314 314 self.processingHeaderObj = ProcessingHeader()
315 315
316 316 def hasAllDataInBuffer(self):
317 317 return 1
318 318
319 319
320 320 def setBlockDimension(self):
321 321 """
322 322 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
323 323
324 324 Affected:
325 325 self.shape_spc_Buffer
326 326 self.shape_cspc_Buffer
327 327 self.shape_dc_Buffer
328 328
329 329 Return: None
330 330 """
331 331 self.shape_spc_Buffer = (self.dataOut.nChannels,
332 332 self.processingHeaderObj.nHeights,
333 333 self.processingHeaderObj.profilesPerBlock)
334 334
335 335 self.shape_cspc_Buffer = (self.dataOut.nPairs,
336 336 self.processingHeaderObj.nHeights,
337 337 self.processingHeaderObj.profilesPerBlock)
338 338
339 339 self.shape_dc_Buffer = (self.dataOut.nChannels,
340 340 self.processingHeaderObj.nHeights)
341 341
342 342
343 343 def writeBlock(self):
344 344 """processingHeaderObj
345 345 Escribe el buffer en el file designado
346 346
347 347 Affected:
348 348 self.data_spc
349 349 self.data_cspc
350 350 self.data_dc
351 351 self.flagIsNewFile
352 352 self.flagIsNewBlock
353 353 self.nTotalBlocks
354 354 self.nWriteBlocks
355 355
356 356 Return: None
357 357 """
358 358
359 359 spc = numpy.transpose( self.data_spc, (0,2,1) )
360 360 if not self.processingHeaderObj.shif_fft:
361 361 spc = numpy.roll( spc, int(self.processingHeaderObj.profilesPerBlock/2), axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
362 362 data = spc.reshape((-1))
363 363 data = data.astype(self.dtype[0])
364 364 data.tofile(self.fp)
365 365
366 366 if self.data_cspc is not None:
367 367
368 368 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
369 369 data = numpy.zeros( numpy.shape(cspc), self.dtype )
370 370 #print 'data.shape', self.shape_cspc_Buffer
371 371 if not self.processingHeaderObj.shif_fft:
372 372 cspc = numpy.roll( cspc, int(self.processingHeaderObj.profilesPerBlock/2), axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
373 373 data['real'] = cspc.real
374 374 data['imag'] = cspc.imag
375 375 data = data.reshape((-1))
376 376 data.tofile(self.fp)
377 377
378 378 if self.data_dc is not None:
379 379
380 380 dc = self.data_dc
381 381 data = numpy.zeros( numpy.shape(dc), self.dtype )
382 382 data['real'] = dc.real
383 383 data['imag'] = dc.imag
384 384 data = data.reshape((-1))
385 385 data.tofile(self.fp)
386 386
387 387 # self.data_spc.fill(0)
388 388 #
389 389 # if self.data_dc is not None:
390 390 # self.data_dc.fill(0)
391 391 #
392 392 # if self.data_cspc is not None:
393 393 # self.data_cspc.fill(0)
394 394
395 395 self.flagIsNewFile = 0
396 396 self.flagIsNewBlock = 1
397 397 self.nTotalBlocks += 1
398 398 self.nWriteBlocks += 1
399 399 self.blockIndex += 1
400 400
401 401 # print "[Writing] Block = %d04" %self.blockIndex
402 402
403 403 def putData(self):
404 404 """
405 405 Setea un bloque de datos y luego los escribe en un file
406 406
407 407 Affected:
408 408 self.data_spc
409 409 self.data_cspc
410 410 self.data_dc
411 411
412 412 Return:
413 413 0 : Si no hay data o no hay mas files que puedan escribirse
414 414 1 : Si se escribio la data de un bloque en un file
415 415 """
416 416
417 417 if self.dataOut.flagNoData:
418 418 return 0
419 419
420 420 self.flagIsNewBlock = 0
421 421
422 422 if self.dataOut.flagDiscontinuousBlock:
423 423 self.data_spc.fill(0)
424 424 if self.dataOut.data_cspc is not None:
425 425 self.data_cspc.fill(0)
426 426 if self.dataOut.data_dc is not None:
427 427 self.data_dc.fill(0)
428 428 self.setNextFile()
429 429
430 430 if self.flagIsNewFile == 0:
431 431 self.setBasicHeader()
432 432
433 433 self.data_spc = self.dataOut.data_spc.copy()
434 434
435 435 if self.dataOut.data_cspc is not None:
436 436 self.data_cspc = self.dataOut.data_cspc.copy()
437 437
438 438 if self.dataOut.data_dc is not None:
439 439 self.data_dc = self.dataOut.data_dc.copy()
440 440
441 441 # #self.processingHeaderObj.dataBlocksPerFile)
442 442 if self.hasAllDataInBuffer():
443 # self.setFirstHeader()
443 self.setFirstHeader()
444 444 self.writeNextBlock()
445 445
446 446 def __getBlockSize(self):
447 447 '''
448 448 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
449 449 '''
450 450
451 451 dtype_width = self.getDtypeWidth()
452 452
453 453 pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints
454 454
455 455 pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write)
456 456 blocksize = (pts2write_SelfSpectra*dtype_width)
457 457
458 458 if self.dataOut.data_cspc is not None:
459 459 pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write)
460 460 blocksize += (pts2write_CrossSpectra*dtype_width*2)
461 461
462 462 if self.dataOut.data_dc is not None:
463 463 pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights)
464 464 blocksize += (pts2write_DCchannels*dtype_width*2)
465 465
466 466 # blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
467 467
468 468 return blocksize
469 469
470 470 def setFirstHeader(self):
471 471
472 472 """
473 473 Obtiene una copia del First Header
474 474
475 475 Affected:
476 476 self.systemHeaderObj
477 477 self.radarControllerHeaderObj
478 478 self.dtype
479 479
480 480 Return:
481 481 None
482 482 """
483 483
484 484 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
485 485 self.systemHeaderObj.nChannels = self.dataOut.nChannels
486 486 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
487 487
488 488 self.processingHeaderObj.dtype = 1 # Spectra
489 489 self.processingHeaderObj.blockSize = self.__getBlockSize()
490 490 self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints
491 491 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
492 492 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
493 493 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval
494 494 self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt
495 495 self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
496 496 self.processingHeaderObj.shif_fft = self.dataOut.flagShiftFFT
497 497
498 498 if self.processingHeaderObj.totalSpectra > 0:
499 499 channelList = []
500 500 for channel in range(self.dataOut.nChannels):
501 501 channelList.append(channel)
502 502 channelList.append(channel)
503 503
504 504 pairsList = []
505 505 if self.dataOut.nPairs > 0:
506 506 for pair in self.dataOut.pairsList:
507 507 pairsList.append(pair[0])
508 508 pairsList.append(pair[1])
509 509
510 510 spectraComb = channelList + pairsList
511 511 spectraComb = numpy.array(spectraComb, dtype="u1")
512 512 self.processingHeaderObj.spectraComb = spectraComb
513 513
514 514 if self.dataOut.code is not None:
515 515 self.processingHeaderObj.code = self.dataOut.code
516 516 self.processingHeaderObj.nCode = self.dataOut.nCode
517 517 self.processingHeaderObj.nBaud = self.dataOut.nBaud
518 518
519 519 if self.processingHeaderObj.nWindows != 0:
520 520 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
521 521 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
522 522 self.processingHeaderObj.nHeights = self.dataOut.nHeights
523 523 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
524 524
525 if self.flagManualHeader is True:
526 HeaderList1D = ["nCode" , "nBaud", "codeType", "txA", "txB", "nTx"]
527 for attr_ in HeaderList1D: # pass dataOut variables to radarControllerHeaderObj for manual header
528 try: setattr(self.radarControllerHeaderObj, attr_, getattr(self.dataOut,attr_))
529 except: pass
530
531 if self.dataOut.code is not None:
532 self.radarControllerHeaderObj.code = numpy.array(self.dataOut.code)
533
534
525 535 self.processingHeaderObj.processFlags = self.getProcessFlags()
526 536
527 537 self.setBasicHeader()
General Comments 0
You need to be logged in to leave comments. Login now