##// END OF EJS Templates
Update test Julia_EEJ.py
Alexander Valdez -
r1685:2ca28c2f318e
parent child
Show More
@@ -1,1575 +1,1572
1 1 """
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 """
6 6 import os
7 7 import sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import inspect
13 13 import time
14 14 import datetime
15 15 import zmq
16 16
17 17 from schainpy.model.proc.jroproc_base import Operation, MPDecorator
18 18 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
19 19 from schainpy.model.data.jroheaderIO import get_dtype_index, get_numpy_dtype, get_procflag_dtype, get_dtype_width
20 20 from schainpy.utils import log
21 21 import schainpy.admin
22 22
23 23 LOCALTIME = True
24 24 DT_DIRECTIVES = {
25 25 '%Y': 4,
26 26 '%y': 2,
27 27 '%m': 2,
28 28 '%d': 2,
29 29 '%j': 3,
30 30 '%H': 2,
31 31 '%M': 2,
32 32 '%S': 2,
33 33 '%f': 6
34 34 }
35 35
36 36
37 37 def isNumber(cad):
38 38 """
39 39 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
40 40
41 41 Excepciones:
42 42 Si un determinado string no puede ser convertido a numero
43 43 Input:
44 44 str, string al cual se le analiza para determinar si convertible a un numero o no
45 45
46 46 Return:
47 47 True : si el string es uno numerico
48 48 False : no es un string numerico
49 49 """
50 50 try:
51 51 float(cad)
52 52 return True
53 53 except:
54 54 return False
55 55
56 56
57 57 def isFileInEpoch(filename, startUTSeconds, endUTSeconds):
58 58 """
59 59 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
60 60
61 61 Inputs:
62 62 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
63 63
64 64 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
65 65 segundos contados desde 01/01/1970.
66 66 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
67 67 segundos contados desde 01/01/1970.
68 68
69 69 Return:
70 70 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
71 71 fecha especificado, de lo contrario retorna False.
72 72
73 73 Excepciones:
74 74 Si el archivo no existe o no puede ser abierto
75 75 Si la cabecera no puede ser leida.
76 76
77 77 """
78 78 basicHeaderObj = BasicHeader(LOCALTIME)
79 79
80 80 try:
81 81 fp = open(filename, 'rb')
82 82 except IOError:
83 83 print("The file %s can't be opened" % (filename))
84 84 return 0
85 85
86 86 sts = basicHeaderObj.read(fp)
87 87 fp.close()
88 88
89 89 if not(sts):
90 90 print("Skipping the file %s because it has not a valid header" % (filename))
91 91 return 0
92 92
93 93 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
94 94 return 0
95 95
96 96 return 1
97 97
98 98
99 99 def isTimeInRange(thisTime, startTime, endTime):
100 100 if endTime >= startTime:
101 101 if (thisTime < startTime) or (thisTime > endTime):
102 102 return 0
103 103 return 1
104 104 else:
105 105 if (thisTime < startTime) and (thisTime > endTime):
106 106 return 0
107 107 return 1
108 108
109 109
110 110 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
111 111 """
112 112 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
113 113
114 114 Inputs:
115 115 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
116 116
117 117 startDate : fecha inicial del rango seleccionado en formato datetime.date
118 118
119 119 endDate : fecha final del rango seleccionado en formato datetime.date
120 120
121 121 startTime : tiempo inicial del rango seleccionado en formato datetime.time
122 122
123 123 endTime : tiempo final del rango seleccionado en formato datetime.time
124 124
125 125 Return:
126 126 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
127 127 fecha especificado, de lo contrario retorna False.
128 128
129 129 Excepciones:
130 130 Si el archivo no existe o no puede ser abierto
131 131 Si la cabecera no puede ser leida.
132 132
133 133 """
134 134
135 135 try:
136 136 fp = open(filename, 'rb')
137 137 except IOError:
138 138 print("The file %s can't be opened" % (filename))
139 139 return None
140 140
141 141 firstBasicHeaderObj = BasicHeader(LOCALTIME)
142 142 systemHeaderObj = SystemHeader()
143 143 radarControllerHeaderObj = RadarControllerHeader()
144 144 processingHeaderObj = ProcessingHeader()
145 145
146 146 lastBasicHeaderObj = BasicHeader(LOCALTIME)
147 147
148 148 sts = firstBasicHeaderObj.read(fp)
149 149
150 150 if not(sts):
151 151 print("[Reading] Skipping the file %s because it has not a valid header" % (filename))
152 152 return None
153 153
154 154 if not systemHeaderObj.read(fp):
155 155 return None
156 156
157 157 if not radarControllerHeaderObj.read(fp):
158 158 return None
159 159
160 160 if not processingHeaderObj.read(fp):
161 161 return None
162 162
163 163 filesize = os.path.getsize(filename)
164 164
165 165 offset = processingHeaderObj.blockSize + 24 # header size
166 166
167 167 if filesize <= offset:
168 168 print("[Reading] %s: This file has not enough data" % filename)
169 169 return None
170 170
171 171 fp.seek(-offset, 2)
172 172
173 173 sts = lastBasicHeaderObj.read(fp)
174 174
175 175 fp.close()
176 176
177 177 thisDatetime = lastBasicHeaderObj.datatime
178 178 thisTime_last_block = thisDatetime.time()
179 179
180 180 thisDatetime = firstBasicHeaderObj.datatime
181 181 thisDate = thisDatetime.date()
182 182 thisTime_first_block = thisDatetime.time()
183 183
184 184 # General case
185 185 # o>>>>>>>>>>>>>><<<<<<<<<<<<<<o
186 186 #-----------o----------------------------o-----------
187 187 # startTime endTime
188 188
189 189 if endTime >= startTime:
190 190 if (thisTime_last_block < startTime) or (thisTime_first_block > endTime):
191 191 return None
192 192
193 193 return thisDatetime
194 194
195 195 # If endTime < startTime then endTime belongs to the next day
196 196
197 197 #<<<<<<<<<<<o o>>>>>>>>>>>
198 198 #-----------o----------------------------o-----------
199 199 # endTime startTime
200 200
201 201 if (thisDate == startDate) and (thisTime_last_block < startTime):
202 202 return None
203 203
204 204 if (thisDate == endDate) and (thisTime_first_block > endTime):
205 205 return None
206 206
207 207 if (thisTime_last_block < startTime) and (thisTime_first_block > endTime):
208 208 return None
209 209
210 210 return thisDatetime
211 211
212 212
213 213 def isFolderInDateRange(folder, startDate=None, endDate=None):
214 214 """
215 215 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
216 216
217 217 Inputs:
218 218 folder : nombre completo del directorio.
219 219 Su formato deberia ser "/path_root/?YYYYDDD"
220 220
221 221 siendo:
222 222 YYYY : Anio (ejemplo 2015)
223 223 DDD : Dia del anio (ejemplo 305)
224 224
225 225 startDate : fecha inicial del rango seleccionado en formato datetime.date
226 226
227 227 endDate : fecha final del rango seleccionado en formato datetime.date
228 228
229 229 Return:
230 230 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
231 231 fecha especificado, de lo contrario retorna False.
232 232 Excepciones:
233 233 Si el directorio no tiene el formato adecuado
234 234 """
235 235
236 236 basename = os.path.basename(folder)
237 237
238 238 if not isRadarFolder(basename):
239 239 print("The folder %s has not the rigth format" % folder)
240 240 return 0
241 241
242 242 if startDate and endDate:
243 243 thisDate = getDateFromRadarFolder(basename)
244 244
245 245 if thisDate < startDate:
246 246 return 0
247 247
248 248 if thisDate > endDate:
249 249 return 0
250 250
251 251 return 1
252 252
253 253
254 254 def isFileInDateRange(filename, startDate=None, endDate=None):
255 255 """
256 256 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
257 257
258 258 Inputs:
259 259 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
260 260
261 261 Su formato deberia ser "?YYYYDDDsss"
262 262
263 263 siendo:
264 264 YYYY : Anio (ejemplo 2015)
265 265 DDD : Dia del anio (ejemplo 305)
266 266 sss : set
267 267
268 268 startDate : fecha inicial del rango seleccionado en formato datetime.date
269 269
270 270 endDate : fecha final del rango seleccionado en formato datetime.date
271 271
272 272 Return:
273 273 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
274 274 fecha especificado, de lo contrario retorna False.
275 275 Excepciones:
276 276 Si el archivo no tiene el formato adecuado
277 277 """
278 278
279 279 basename = os.path.basename(filename)
280 280
281 281 if not isRadarFile(basename):
282 282 print("The filename %s has not the rigth format" % filename)
283 283 return 0
284 284
285 285 if startDate and endDate:
286 286 thisDate = getDateFromRadarFile(basename)
287 287
288 288 if thisDate < startDate:
289 289 return 0
290 290
291 291 if thisDate > endDate:
292 292 return 0
293 293
294 294 return 1
295 295
296 296
297 297 def getFileFromSet(path, ext, set):
298 298 validFilelist = []
299 299 fileList = os.listdir(path)
300 300
301 301 # 0 1234 567 89A BCDE
302 302 # H YYYY DDD SSS .ext
303 303
304 304 for thisFile in fileList:
305 305 try:
306 306 year = int(thisFile[1:5])
307 307 doy = int(thisFile[5:8])
308 308 except:
309 309 continue
310 310
311 311 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
312 312 continue
313 313
314 314 validFilelist.append(thisFile)
315 315
316 316 myfile = fnmatch.filter(
317 317 validFilelist, '*%4.4d%3.3d%3.3d*' % (year, doy, set))
318 318
319 319 if len(myfile) != 0:
320 320 return myfile[0]
321 321 else:
322 322 filename = '*%4.4d%3.3d%3.3d%s' % (year, doy, set, ext.lower())
323 323 print('the filename %s does not exist' % filename)
324 324 print('...going to the last file: ')
325 325
326 326 if validFilelist:
327 327 validFilelist = sorted(validFilelist, key=str.lower)
328 328 return validFilelist[-1]
329 329
330 330 return None
331 331
332 332
333 333 def getlastFileFromPath(path, ext):
334 334 """
335 335 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
336 336 al final de la depuracion devuelve el ultimo file de la lista que quedo.
337 337
338 338 Input:
339 339 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
340 340 ext : extension de los files contenidos en una carpeta
341 341
342 342 Return:
343 343 El ultimo file de una determinada carpeta, no se considera el path.
344 344 """
345 345 validFilelist = []
346 346 fileList = os.listdir(path)
347 347
348 348 # 0 1234 567 89A BCDE
349 349 # H YYYY DDD SSS .ext
350 350
351 351 for thisFile in fileList:
352 352
353 353 year = thisFile[1:5]
354 354 if not isNumber(year):
355 355 continue
356 356
357 357 doy = thisFile[5:8]
358 358 if not isNumber(doy):
359 359 continue
360 360
361 361 year = int(year)
362 362 doy = int(doy)
363 363
364 364 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
365 365 continue
366 366
367 367 validFilelist.append(thisFile)
368 368
369 369 if validFilelist:
370 370 validFilelist = sorted(validFilelist, key=str.lower)
371 371 return validFilelist[-1]
372 372
373 373 return None
374 374
375 375
376 376 def isRadarFolder(folder):
377 377 try:
378 378 year = int(folder[1:5])
379 379 doy = int(folder[5:8])
380 380 except:
381 381 return 0
382 382
383 383 return 1
384 384
385 385
386 386 def isRadarFile(file):
387 try:
387 try:
388 388 year = int(file[1:5])
389 389 doy = int(file[5:8])
390 390 set = int(file[8:11])
391 391 except:
392 392 return 0
393 393
394 394 return 1
395 395
396 396
397 397 def getDateFromRadarFile(file):
398 try:
398 try:
399 399 year = int(file[1:5])
400 400 doy = int(file[5:8])
401 set = int(file[8:11])
401 set = int(file[8:11])
402 402 except:
403 403 return None
404 404
405 405 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1)
406 406 return thisDate
407 407
408 408
409 409 def getDateFromRadarFolder(folder):
410 410 try:
411 411 year = int(folder[1:5])
412 412 doy = int(folder[5:8])
413 413 except:
414 414 return None
415 415
416 416 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1)
417 417 return thisDate
418 418
419 419 def parse_format(s, fmt):
420
420
421 421 for i in range(fmt.count('%')):
422 422 x = fmt.index('%')
423 423 d = DT_DIRECTIVES[fmt[x:x+2]]
424 424 fmt = fmt.replace(fmt[x:x+2], s[x:x+d])
425 425 return fmt
426 426
427 427 class Reader(object):
428 428
429 429 c = 3E8
430 430 isConfig = False
431 431 dtype = None
432 432 pathList = []
433 433 filenameList = []
434 434 datetimeList = []
435 435 filename = None
436 436 ext = None
437 437 flagIsNewFile = 1
438 438 flagDiscontinuousBlock = 0
439 439 flagIsNewBlock = 0
440 440 flagNoMoreFiles = 0
441 441 fp = None
442 442 firstHeaderSize = 0
443 443 basicHeaderSize = 24
444 444 versionFile = 1103
445 445 fileSize = None
446 446 fileSizeByHeader = None
447 447 fileIndex = -1
448 448 profileIndex = None
449 449 blockIndex = 0
450 450 nTotalBlocks = 0
451 451 maxTimeStep = 30
452 452 lastUTTime = None
453 453 datablock = None
454 454 dataOut = None
455 455 getByBlock = False
456 456 path = None
457 457 startDate = None
458 458 endDate = None
459 459 startTime = datetime.time(0, 0, 0)
460 460 endTime = datetime.time(23, 59, 59)
461 461 set = None
462 462 expLabel = ""
463 463 online = False
464 464 delay = 60
465 465 nTries = 3 # quantity tries
466 466 nFiles = 3 # number of files for searching
467 467 walk = True
468 468 getblock = False
469 469 nTxs = 1
470 470 realtime = False
471 471 blocksize = 0
472 472 blocktime = None
473 473 warnings = True
474 474 verbose = True
475 475 server = None
476 476 format = None
477 477 oneDDict = None
478 478 twoDDict = None
479 479 independentParam = None
480 480 filefmt = None
481 481 folderfmt = None
482 482 open_file = open
483 483 open_mode = 'rb'
484 484
485 485 def run(self):
486 486
487 raise NotImplementedError
487 raise NotImplementedError
488 488
489 489 def getAllowedArgs(self):
490 490 if hasattr(self, '__attrs__'):
491 491 return self.__attrs__
492 492 else:
493 493 return inspect.getargspec(self.run).args
494 494
495 495 def set_kwargs(self, **kwargs):
496 496
497 497 for key, value in kwargs.items():
498 498 setattr(self, key, value)
499
499
500 500 def find_folders(self, path, startDate, endDate, folderfmt, last=False):
501 501
502 folders = [x for f in path.split(',')
502 folders = [x for f in path.split(',')
503 503 for x in os.listdir(f) if os.path.isdir(os.path.join(f, x))]
504 504 folders.sort()
505 505
506 506 if last:
507 507 folders = [folders[-1]]
508 508
509 for folder in folders:
510 try:
511 dt = datetime.datetime.strptime(parse_format(folder, folderfmt), folderfmt).date()
509 for folder in folders:
510 try:
511 dt = datetime.datetime.strptime(parse_format(folder, folderfmt), folderfmt).date()
512 512 if dt >= startDate and dt <= endDate:
513 513 yield os.path.join(path, folder)
514 514 else:
515 515 log.log('Skiping folder {}'.format(folder), self.name)
516 516 except Exception as e:
517 517 log.log('Skiping folder {}'.format(folder), self.name)
518 518 continue
519 519 return
520
521 def find_files(self, folders, ext, filefmt, startDate=None, endDate=None,
520
521 def find_files(self, folders, ext, filefmt, startDate=None, endDate=None,
522 522 expLabel='', last=False):
523
524 for path in folders:
525 files = glob.glob1(path, '*{}'.format(ext))
523 for path in folders:
524 files = glob.glob1(path+'/'+expLabel, '*{}'.format(ext))
526 525 files.sort()
527 526 if last:
528 if files:
527 if files:
529 528 fo = files[-1]
530 try:
529 try:
531 530 dt = datetime.datetime.strptime(parse_format(fo, filefmt), filefmt).date()
532 yield os.path.join(path, expLabel, fo)
533 except Exception as e:
531 yield os.path.join(path, expLabel, fo)
532 except Exception as e:
534 533 pass
535 534 return
536 535 else:
537 536 return
538 537
539 538 for fo in files:
540 try:
541 dt = datetime.datetime.strptime(parse_format(fo, filefmt), filefmt).date()
539 try:
540 dt = datetime.datetime.strptime(parse_format(fo, filefmt), filefmt).date()
542 541 if dt >= startDate and dt <= endDate:
543 542 yield os.path.join(path, expLabel, fo)
544 543 else:
545 544 log.log('Skiping file {}'.format(fo), self.name)
546 545 except Exception as e:
547 546 log.log('Skiping file {}'.format(fo), self.name)
548 continue
547 continue
549 548
550 549 def searchFilesOffLine(self, path, startDate, endDate,
551 expLabel, ext, walk,
550 expLabel, ext, walk,
552 551 filefmt, folderfmt):
553 552 """Search files in offline mode for the given arguments
554 553
555 554 Return:
556 555 Generator of files
557 556 """
558 557
559 558 if walk:
560 559 folders = self.find_folders(
561 560 path, startDate, endDate, folderfmt)
562 561 else:
563 562 folders = path.split(',')
564 563
565 564 return self.find_files(
566 folders, ext, filefmt, startDate, endDate, expLabel)
565 folders, ext, filefmt, startDate, endDate, expLabel)
567 566
568 567 def searchFilesOnLine(self, path, startDate, endDate,
569 expLabel, ext, walk,
568 expLabel, ext, walk,
570 569 filefmt, folderfmt):
571 570 """Search for the last file of the last folder
572 571
573 572 Arguments:
574 573 path : carpeta donde estan contenidos los files que contiene data
575 574 expLabel : Nombre del subexperimento (subfolder)
576 575 ext : extension de los files
577 576 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
578 577
579 578 Return:
580 579 generator with the full path of last filename
581 580 """
582
581
583 582 if walk:
584 583 folders = self.find_folders(
585 584 path, startDate, endDate, folderfmt, last=True)
586 585 else:
587 586 folders = path.split(',')
588
587
589 588 return self.find_files(
590 589 folders, ext, filefmt, startDate, endDate, expLabel, last=True)
591 590
592 591 def setNextFile(self):
593 592 """Set the next file to be readed open it and parse de file header"""
594 593
595 594 while True:
596 595 if self.fp != None:
597 self.fp.close()
596 self.fp.close()
598 597
599 598 if self.online:
600 599 newFile = self.setNextFileOnline()
601 600 else:
602 601 newFile = self.setNextFileOffline()
603
604 602 if not(newFile):
605 603 if self.online:
606 604 raise schainpy.admin.SchainError('Time to wait for new files reach')
607 605 else:
608 606 if self.fileIndex == -1:
609 607 raise schainpy.admin.SchainWarning('No files found in the given path')
610 608 else:
611 609 raise schainpy.admin.SchainWarning('No more files to read')
612
610
613 611 if self.verifyFile(self.filename):
614 612 break
615
613
616 614 log.log('Opening file: %s' % self.filename, self.name)
617 615
618 616 self.readFirstHeader()
619 617 self.nReadBlocks = 0
620 618
621 619 def setNextFileOnline(self):
622 620 """Check for the next file to be readed in online mode.
623 621
624 622 Set:
625 623 self.filename
626 624 self.fp
627 625 self.filesize
628
626
629 627 Return:
630 628 boolean
631 629
632 630 """
631
633 632 nextFile = True
634 633 nextDay = False
635 634
636 for nFiles in range(self.nFiles+1):
635 for nFiles in range(self.nFiles+1):
637 636 for nTries in range(self.nTries):
638 637 fullfilename, filename = self.checkForRealPath(nextFile, nextDay)
639 638 if fullfilename is not None:
640 639 break
641 640 log.warning(
642 641 "Waiting %0.2f sec for the next file: \"%s\" , try %02d ..." % (self.delay, filename, nTries + 1),
643 642 self.name)
644 643 time.sleep(self.delay)
645 644 nextFile = False
646 continue
647
645 continue
646
648 647 if fullfilename is not None:
649 648 break
650
649
651 650 self.nTries = 1
652 nextFile = True
651 nextFile = True
653 652
654 653 if nFiles == (self.nFiles - 1):
655 654 log.log('Trying with next day...', self.name)
656 655 nextDay = True
657 self.nTries = 3
656 self.nTries = 3
658 657
659 658 if fullfilename:
660 659 self.fileSize = os.path.getsize(fullfilename)
661 660 self.filename = fullfilename
662 661 self.flagIsNewFile = 1
663 662 if self.fp != None:
664 663 self.fp.close()
665 664 self.fp = self.open_file(fullfilename, self.open_mode)
666 665 self.flagNoMoreFiles = 0
667 666 self.fileIndex += 1
668 667 return 1
669 else:
668 else:
670 669 return 0
671
670
672 671 def setNextFileOffline(self):
673 672 """Open the next file to be readed in offline mode"""
674
673
675 674 try:
676 675 filename = next(self.filenameList)
677 676 self.fileIndex +=1
678 677 except StopIteration:
679 678 self.flagNoMoreFiles = 1
680 return 0
679 return 0
681 680
682 681 self.filename = filename
683 682 self.fileSize = os.path.getsize(filename)
684 683 self.fp = self.open_file(filename, self.open_mode)
685 684 self.flagIsNewFile = 1
686 685
687 686 return 1
688
687
689 688 @staticmethod
690 689 def isDateTimeInRange(dt, startDate, endDate, startTime, endTime):
691 690 """Check if the given datetime is in range"""
692
691
693 692 if startDate <= dt.date() <= endDate:
694 693 if startTime <= dt.time() <= endTime:
695 694 return True
696 695 return False
697
696
698 697 def verifyFile(self, filename):
699 698 """Check for a valid file
700
699
701 700 Arguments:
702 701 filename -- full path filename
703
702
704 703 Return:
705 704 boolean
706 705 """
707 706
708 707 return True
709 708
710 709 def checkForRealPath(self, nextFile, nextDay):
711 710 """Check if the next file to be readed exists"""
712 711
713 712 raise NotImplementedError
714
713
715 714 def readFirstHeader(self):
716 715 """Parse the file header"""
717 716
718 717 pass
719 718
720 719 def waitDataBlock(self, pointer_location, blocksize=None):
721 720 """
722 721 """
723 722
724 723 currentPointer = pointer_location
725 724 if blocksize is None:
726 725 neededSize = self.processingHeaderObj.blockSize # + self.basicHeaderSize
727 726 else:
728 727 neededSize = blocksize
729 728
730 729 for nTries in range(self.nTries):
731 730 self.fp.close()
732 731 self.fp = open(self.filename, 'rb')
733 732 self.fp.seek(currentPointer)
734 733
735 734 self.fileSize = os.path.getsize(self.filename)
736 735 currentSize = self.fileSize - currentPointer
737 736
738 737 if (currentSize >= neededSize):
739 738 return 1
740 739
741 740 log.warning(
742 741 "Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1),
743 742 self.name
744 743 )
745 744 time.sleep(self.delay)
746 745
747 746 return 0
748 747
749 748 class JRODataReader(Reader):
750 749
751 750 utc = 0
752 751 nReadBlocks = 0
753 752 foldercounter = 0
754 753 firstHeaderSize = 0
755 754 basicHeaderSize = 24
756 755 __isFirstTimeOnline = 1
757 756 filefmt = "*%Y%j***"
758 757 folderfmt = "*%Y%j"
759 758 __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'online', 'delay', 'walk']
760 759
761 760 def getDtypeWidth(self):
762 761
763 762 dtype_index = get_dtype_index(self.dtype)
764 763 dtype_width = get_dtype_width(dtype_index)
765 764
766 765 return dtype_width
767 766
768 767 def checkForRealPath(self, nextFile, nextDay):
769 768 """Check if the next file to be readed exists.
770 769
771 770 Example :
772 771 nombre correcto del file es .../.../D2009307/P2009307367.ext
773 772
774 773 Entonces la funcion prueba con las siguientes combinaciones
775 774 .../.../y2009307367.ext
776 775 .../.../Y2009307367.ext
777 776 .../.../x2009307/y2009307367.ext
778 777 .../.../x2009307/Y2009307367.ext
779 778 .../.../X2009307/y2009307367.ext
780 779 .../.../X2009307/Y2009307367.ext
781 780 siendo para este caso, la ultima combinacion de letras, identica al file buscado
782 781
783 782 Return:
784 783 str -- fullpath of the file
785 784 """
786
787
785
788 786 if nextFile:
789 787 self.set += 1
790 788 if nextDay:
791 789 self.set = 0
792 790 self.doy += 1
793 791 foldercounter = 0
794 792 prefixDirList = [None, 'd', 'D']
795 793 if self.ext.lower() == ".r": # voltage
796 794 prefixFileList = ['d', 'D']
797 795 elif self.ext.lower() == ".pdata": # spectra
798 796 prefixFileList = ['p', 'P']
799
797
800 798 # barrido por las combinaciones posibles
801 799 for prefixDir in prefixDirList:
802 800 thispath = self.path
803 801 if prefixDir != None:
804 802 # formo el nombre del directorio xYYYYDDD (x=d o x=D)
805 803 if foldercounter == 0:
806 804 thispath = os.path.join(self.path, "%s%04d%03d" %
807 805 (prefixDir, self.year, self.doy))
808 806 else:
809 807 thispath = os.path.join(self.path, "%s%04d%03d_%02d" % (
810 808 prefixDir, self.year, self.doy, foldercounter))
811 809 for prefixFile in prefixFileList: # barrido por las dos combinaciones posibles de "D"
812 810 # formo el nombre del file xYYYYDDDSSS.ext
813 811 filename = "%s%04d%03d%03d%s" % (prefixFile, self.year, self.doy, self.set, self.ext)
814 812 fullfilename = os.path.join(
815 813 thispath, filename)
816 814
817 815 if os.path.exists(fullfilename):
818 816 return fullfilename, filename
819
820 return None, filename
821
817
818 return None, filename
819
822 820 def __waitNewBlock(self):
823 821 """
824 822 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
825 823
826 824 Si el modo de lectura es OffLine siempre retorn 0
827 825 """
828 826 if not self.online:
829 827 return 0
830 828
831 829 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
832 830 return 0
833 831
834 832 currentPointer = self.fp.tell()
835 833
836 834 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
837 835
838 836 for nTries in range(self.nTries):
839 837
840 838 self.fp.close()
841 839 self.fp = open(self.filename, 'rb')
842 840 self.fp.seek(currentPointer)
843 841
844 842 self.fileSize = os.path.getsize(self.filename)
845 843 currentSize = self.fileSize - currentPointer
846 844
847 845 if (currentSize >= neededSize):
848 846 self.basicHeaderObj.read(self.fp)
849 847 return 1
850 848
851 849 if self.fileSize == self.fileSizeByHeader:
852 850 # self.flagEoF = True
853 851 return 0
854 852
855 853 print("[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1))
856 854 time.sleep(self.delay)
857 855
858 856 return 0
859 857
860 858 def __setNewBlock(self):
861 859
862 860 if self.fp == None:
863 return 0
864
865 if self.flagIsNewFile:
861 return 0
862
863 if self.flagIsNewFile:
866 864 self.lastUTTime = self.basicHeaderObj.utc
867 865 return 1
868 866
869 867 if self.realtime:
870 868 self.flagDiscontinuousBlock = 1
871 869 if not(self.setNextFile()):
872 870 return 0
873 871 else:
874 872 return 1
875 873
876 874 currentSize = self.fileSize - self.fp.tell()
877 875 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
878
876
879 877 if (currentSize >= neededSize):
880 878 self.basicHeaderObj.read(self.fp)
881 879 self.lastUTTime = self.basicHeaderObj.utc
882 880 return 1
883
881
884 882 if self.__waitNewBlock():
885 883 self.lastUTTime = self.basicHeaderObj.utc
886 884 return 1
887 885
888 886 if not(self.setNextFile()):
889 887 return 0
890 888
891 889 deltaTime = self.basicHeaderObj.utc - self.lastUTTime
892 890 self.lastUTTime = self.basicHeaderObj.utc
893 891
894 892 self.flagDiscontinuousBlock = 0
895
896 893 if deltaTime > self.maxTimeStep:
897 894 self.flagDiscontinuousBlock = 1
898 895
899 896 return 1
900 897
901 898 def readNextBlock(self):
902 899
903 900 while True:
904 901 if not(self.__setNewBlock()):
905 902 continue
906 903
907 904 if not(self.readBlock()):
908 905 return 0
909 906
910 907 self.getBasicHeader()
911 908
912 909 if not self.isDateTimeInRange(self.dataOut.datatime, self.startDate, self.endDate, self.startTime, self.endTime):
913 910 print("[Reading] Block No. %d/%d -> %s [Skipping]" % (self.nReadBlocks,
914 911 self.processingHeaderObj.dataBlocksPerFile,
915 912 self.dataOut.datatime.ctime()))
916 913 continue
917 914
918 915 break
919 916
920 917 if self.verbose:
921 918 print("[Reading] Block No. %d/%d -> %s" % (self.nReadBlocks,
922 919 self.processingHeaderObj.dataBlocksPerFile,
923 920 self.dataOut.datatime.ctime()))
924 921 return 1
925 922
926 923 def readFirstHeader(self):
927 924
928 925 self.basicHeaderObj.read(self.fp)
929 926 self.systemHeaderObj.read(self.fp)
930 927 self.radarControllerHeaderObj.read(self.fp)
931 928 self.processingHeaderObj.read(self.fp)
932 929 self.firstHeaderSize = self.basicHeaderObj.size
933 930
934 931 datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
935 932 PROCFLAG.DATATYPE_MASK)) - numpy.log2(PROCFLAG.DATATYPE_CHAR))
936 933 if datatype == 0:
937 934 datatype_str = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
938 935 elif datatype == 1:
939 936 datatype_str = numpy.dtype([('real', '<i2'), ('imag', '<i2')])
940 937 elif datatype == 2:
941 938 datatype_str = numpy.dtype([('real', '<i4'), ('imag', '<i4')])
942 939 elif datatype == 3:
943 940 datatype_str = numpy.dtype([('real', '<i8'), ('imag', '<i8')])
944 941 elif datatype == 4:
945 942 datatype_str = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
946 943 elif datatype == 5:
947 944 datatype_str = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
948 945 else:
949 946 raise ValueError('Data type was not defined')
950 947
951 948 self.dtype = datatype_str
952 949 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
953 950 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + \
954 951 self.firstHeaderSize + self.basicHeaderSize * \
955 952 (self.processingHeaderObj.dataBlocksPerFile - 1)
956 953 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
957 954 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
958 955 self.getBlockDimension()
959 956
960 957 def verifyFile(self, filename):
961 958
962 959 flag = True
963 960
964 961 try:
965 962 fp = open(filename, 'rb')
966 963 except IOError:
967 964 log.error("File {} can't be opened".format(filename), self.name)
968 965 return False
969
966
970 967 if self.online and self.waitDataBlock(0):
971 968 pass
972
969
973 970 basicHeaderObj = BasicHeader(LOCALTIME)
974 971 systemHeaderObj = SystemHeader()
975 972 radarControllerHeaderObj = RadarControllerHeader()
976 973 processingHeaderObj = ProcessingHeader()
977 974
978 975 if not(basicHeaderObj.read(fp)):
979 976 flag = False
980 977 if not(systemHeaderObj.read(fp)):
981 978 flag = False
982 979 if not(radarControllerHeaderObj.read(fp)):
983 980 flag = False
984 981 if not(processingHeaderObj.read(fp)):
985 982 flag = False
986 983 if not self.online:
987 984 dt1 = basicHeaderObj.datatime
988 985 pos = self.fileSize-processingHeaderObj.blockSize-24
989 986 if pos<0:
990 987 flag = False
991 988 log.error('Invalid size for file: {}'.format(self.filename), self.name)
992 989 else:
993 990 fp.seek(pos)
994 991 if not(basicHeaderObj.read(fp)):
995 992 flag = False
996 993 dt2 = basicHeaderObj.datatime
997 994 if not self.isDateTimeInRange(dt1, self.startDate, self.endDate, self.startTime, self.endTime) and not \
998 995 self.isDateTimeInRange(dt2, self.startDate, self.endDate, self.startTime, self.endTime):
999 flag = False
996 flag = False
1000 997
1001 998 fp.close()
1002 999 return flag
1003 1000
1004 1001 def findDatafiles(self, path, startDate=None, endDate=None, expLabel='', ext='.r', walk=True, include_path=False):
1005 1002
1006 1003 path_empty = True
1007 1004
1008 1005 dateList = []
1009 1006 pathList = []
1010 1007
1011 1008 multi_path = path.split(',')
1012 1009
1013 1010 if not walk:
1014 1011
1015 1012 for single_path in multi_path:
1016 1013
1017 1014 if not os.path.isdir(single_path):
1018 1015 continue
1019 1016
1020 1017 fileList = glob.glob1(single_path, "*" + ext)
1021 1018
1022 1019 if not fileList:
1023 1020 continue
1024 1021
1025 1022 path_empty = False
1026 1023
1027 1024 fileList.sort()
1028 1025
1029 1026 for thisFile in fileList:
1030 1027
1031 1028 if not os.path.isfile(os.path.join(single_path, thisFile)):
1032 1029 continue
1033 1030
1034 1031 if not isRadarFile(thisFile):
1035 1032 continue
1036 1033
1037 1034 if not isFileInDateRange(thisFile, startDate, endDate):
1038 1035 continue
1039 1036
1040 1037 thisDate = getDateFromRadarFile(thisFile)
1041 1038
1042 1039 if thisDate in dateList or single_path in pathList:
1043 1040 continue
1044 1041
1045 1042 dateList.append(thisDate)
1046 1043 pathList.append(single_path)
1047 1044
1048 1045 else:
1049 1046 for single_path in multi_path:
1050 1047
1051 1048 if not os.path.isdir(single_path):
1052 1049 continue
1053 1050
1054 1051 dirList = []
1055 1052
1056 1053 for thisPath in os.listdir(single_path):
1057 1054
1058 1055 if not os.path.isdir(os.path.join(single_path, thisPath)):
1059 1056 continue
1060 1057
1061 1058 if not isRadarFolder(thisPath):
1062 1059 continue
1063 1060
1064 1061 if not isFolderInDateRange(thisPath, startDate, endDate):
1065 1062 continue
1066 1063
1067 1064 dirList.append(thisPath)
1068 1065
1069 1066 if not dirList:
1070 1067 continue
1071 1068
1072 1069 dirList.sort()
1073 1070
1074 1071 for thisDir in dirList:
1075 1072
1076 1073 datapath = os.path.join(single_path, thisDir, expLabel)
1077 1074 fileList = glob.glob1(datapath, "*" + ext)
1078 1075
1079 1076 if not fileList:
1080 1077 continue
1081 1078
1082 1079 path_empty = False
1083 1080
1084 1081 thisDate = getDateFromRadarFolder(thisDir)
1085 1082
1086 1083 pathList.append(datapath)
1087 1084 dateList.append(thisDate)
1088 1085
1089 1086 dateList.sort()
1090 1087
1091 1088 if walk:
1092 1089 pattern_path = os.path.join(multi_path[0], "[dYYYYDDD]", expLabel)
1093 1090 else:
1094 1091 pattern_path = multi_path[0]
1095 1092
1096 1093 if path_empty:
1097 1094 raise schainpy.admin.SchainError("[Reading] No *%s files in %s for %s to %s" % (ext, pattern_path, startDate, endDate))
1098 1095 else:
1099 1096 if not dateList:
1100 1097 raise schainpy.admin.SchainError("[Reading] Date range selected invalid [%s - %s]: No *%s files in %s)" % (startDate, endDate, ext, path))
1101 1098
1102 1099 if include_path:
1103 1100 return dateList, pathList
1104 1101
1105 1102 return dateList
1106 1103
1107 1104 def setup(self, **kwargs):
1108
1105
1109 1106 self.set_kwargs(**kwargs)
1110 1107 if not self.ext.startswith('.'):
1111 1108 self.ext = '.{}'.format(self.ext)
1112
1109
1113 1110 if self.server is not None:
1114 1111 if 'tcp://' in self.server:
1115 1112 address = server
1116 1113 else:
1117 1114 address = 'ipc:///tmp/%s' % self.server
1118 1115 self.server = address
1119 1116 self.context = zmq.Context()
1120 1117 self.receiver = self.context.socket(zmq.PULL)
1121 1118 self.receiver.connect(self.server)
1122 1119 time.sleep(0.5)
1123 1120 print('[Starting] ReceiverData from {}'.format(self.server))
1124 1121 else:
1125 1122 self.server = None
1126 1123 if self.path == None:
1127 1124 raise ValueError("[Reading] The path is not valid")
1128 1125
1129 1126 if self.online:
1130 1127 log.log("[Reading] Searching files in online mode...", self.name)
1131 1128
1132 1129 for nTries in range(self.nTries):
1133 1130 fullpath = self.searchFilesOnLine(self.path, self.startDate,
1134 self.endDate, self.expLabel, self.ext, self.walk,
1131 self.endDate, self.expLabel, self.ext, self.walk,
1135 1132 self.filefmt, self.folderfmt)
1136 1133
1137 1134 try:
1138 1135 fullpath = next(fullpath)
1139 1136 except:
1140 1137 fullpath = None
1141
1138
1142 1139 if fullpath:
1143 1140 break
1144 1141
1145 1142 log.warning(
1146 1143 'Waiting {} sec for a valid file in {}: try {} ...'.format(
1147 self.delay, self.path, nTries + 1),
1144 self.delay, self.path, nTries + 1),
1148 1145 self.name)
1149 1146 time.sleep(self.delay)
1150 1147
1151 1148 if not(fullpath):
1152 1149 raise schainpy.admin.SchainError(
1153 'There isn\'t any valid file in {}'.format(self.path))
1150 'There isn\'t any valid file in {}'.format(self.path))
1154 1151
1155 1152 pathname, filename = os.path.split(fullpath)
1156 1153 self.year = int(filename[1:5])
1157 1154 self.doy = int(filename[5:8])
1158 self.set = int(filename[8:11]) - 1
1155 self.set = int(filename[8:11]) - 1
1159 1156 else:
1160 1157 log.log("Searching files in {}".format(self.path), self.name)
1161 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
1158 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
1162 1159 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
1163
1160
1164 1161 self.setNextFile()
1165 1162
1166 1163 return
1167 1164
1168 1165 def getBasicHeader(self):
1169 1166
1170 1167 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond / \
1171 1168 1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds
1172 1169
1173 1170 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
1174 1171
1175 1172 self.dataOut.timeZone = self.basicHeaderObj.timeZone
1176 1173
1177 1174 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
1178 1175
1179 1176 self.dataOut.errorCount = self.basicHeaderObj.errorCount
1180 1177
1181 1178 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
1182 1179
1183 1180 self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds / self.nTxs
1184
1181
1185 1182 def getFirstHeader(self):
1186 1183
1187 1184 raise NotImplementedError
1188 1185
1189 1186 def getData(self):
1190 1187
1191 1188 raise NotImplementedError
1192 1189
1193 1190 def hasNotDataInBuffer(self):
1194 1191
1195 1192 raise NotImplementedError
1196 1193
1197 1194 def readBlock(self):
1198 1195
1199 1196 raise NotImplementedError
1200 1197
1201 1198 def isEndProcess(self):
1202 1199
1203 1200 return self.flagNoMoreFiles
1204 1201
1205 1202 def printReadBlocks(self):
1206 1203
1207 1204 print("[Reading] Number of read blocks per file %04d" % self.nReadBlocks)
1208 1205
1209 1206 def printTotalBlocks(self):
1210 1207
1211 1208 print("[Reading] Number of read blocks %04d" % self.nTotalBlocks)
1212 1209
1213 1210 def run(self, **kwargs):
1214 1211 """
1215 1212
1216 1213 Arguments:
1217 path :
1218 startDate :
1214 path :
1215 startDate :
1219 1216 endDate :
1220 1217 startTime :
1221 1218 endTime :
1222 1219 set :
1223 1220 expLabel :
1224 1221 ext :
1225 1222 online :
1226 1223 delay :
1227 1224 walk :
1228 1225 getblock :
1229 1226 nTxs :
1230 1227 realtime :
1231 1228 blocksize :
1232 1229 blocktime :
1233 1230 skip :
1234 1231 cursor :
1235 1232 warnings :
1236 1233 server :
1237 1234 verbose :
1238 1235 format :
1239 1236 oneDDict :
1240 1237 twoDDict :
1241 1238 independentParam :
1242 1239 """
1243 1240
1244 1241 if not(self.isConfig):
1245 1242 self.setup(**kwargs)
1246 1243 self.isConfig = True
1247 1244 if self.server is None:
1248 1245 self.getData()
1249 1246 else:
1250 1247 self.getFromServer()
1251 1248
1252 1249
1253 1250 class JRODataWriter(Reader):
1254 1251
1255 1252 """
1256 1253 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
1257 1254 de los datos siempre se realiza por bloques.
1258 1255 """
1259 1256
1260 1257 setFile = None
1261 1258 profilesPerBlock = None
1262 1259 blocksPerFile = None
1263 1260 nWriteBlocks = 0
1264 1261 fileDate = None
1265 1262
1266 1263 def __init__(self, dataOut=None):
1267 1264 raise NotImplementedError
1268 1265
1269 1266 def hasAllDataInBuffer(self):
1270 1267 raise NotImplementedError
1271 1268
1272 1269 def setBlockDimension(self):
1273 1270 raise NotImplementedError
1274 1271
1275 1272 def writeBlock(self):
1276 1273 raise NotImplementedError
1277 1274
1278 1275 def putData(self):
1279 1276 raise NotImplementedError
1280 1277
1281 1278 def getDtypeWidth(self):
1282 1279
1283 1280 dtype_index = get_dtype_index(self.dtype)
1284 1281 dtype_width = get_dtype_width(dtype_index)
1285 1282
1286 1283 return dtype_width
1287
1284
1288 1285 def getProcessFlags(self):
1289 1286
1290 1287 processFlags = 0
1291 1288
1292 1289 dtype_index = get_dtype_index(self.dtype)
1293 1290 procflag_dtype = get_procflag_dtype(dtype_index)
1294 1291
1295 1292 processFlags += procflag_dtype
1296 1293
1297 1294 if self.dataOut.flagDecodeData:
1298 1295 processFlags += PROCFLAG.DECODE_DATA
1299 1296
1300 1297 if self.dataOut.flagDeflipData:
1301 1298 processFlags += PROCFLAG.DEFLIP_DATA
1302 1299
1303 1300 if self.dataOut.code is not None:
1304 1301 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1305 1302
1306 1303 if self.dataOut.nCohInt > 1:
1307 1304 processFlags += PROCFLAG.COHERENT_INTEGRATION
1308 1305
1309 1306 if self.dataOut.type == "Spectra":
1310 1307 if self.dataOut.nIncohInt > 1:
1311 1308 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
1312 1309
1313 1310 if self.dataOut.data_dc is not None:
1314 1311 processFlags += PROCFLAG.SAVE_CHANNELS_DC
1315 1312
1316 1313 if self.dataOut.flagShiftFFT:
1317 1314 processFlags += PROCFLAG.SHIFT_FFT_DATA
1318 1315
1319 1316 return processFlags
1320 1317
1321 1318 def setBasicHeader(self):
1322 1319
1323 1320 self.basicHeaderObj.size = self.basicHeaderSize # bytes
1324 1321 self.basicHeaderObj.version = self.versionFile
1325 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1322 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1326 1323 utc = numpy.floor(self.dataOut.utctime)
1327 milisecond = (self.dataOut.utctime - utc) * 1000.0
1324 milisecond = (self.dataOut.utctime - utc) * 1000.0
1328 1325 self.basicHeaderObj.utc = utc
1329 1326 self.basicHeaderObj.miliSecond = milisecond
1330 1327 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1331 1328 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1332 1329 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1333 1330
1334 1331 def setFirstHeader(self):
1335 1332 """
1336 1333 Obtiene una copia del First Header
1337 1334
1338 1335 Affected:
1339 1336
1340 1337 self.basicHeaderObj
1341 1338 self.systemHeaderObj
1342 1339 self.radarControllerHeaderObj
1343 1340 self.processingHeaderObj self.
1344 1341
1345 1342 Return:
1346 1343 None
1347 1344 """
1348 1345
1349 1346 raise NotImplementedError
1350 1347
1351 1348 def __writeFirstHeader(self):
1352 1349 """
1353 1350 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1354 1351
1355 1352 Affected:
1356 1353 __dataType
1357 1354
1358 1355 Return:
1359 1356 None
1360 1357 """
1361 1358
1362 1359 # CALCULAR PARAMETROS
1363 1360
1364 1361 sizeLongHeader = self.systemHeaderObj.size + \
1365 1362 self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1366 1363 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1367 1364
1368 1365 self.basicHeaderObj.write(self.fp)
1369 1366 self.systemHeaderObj.write(self.fp)
1370 1367 self.radarControllerHeaderObj.write(self.fp)
1371 1368 self.processingHeaderObj.write(self.fp)
1372 1369
1373 1370 def __setNewBlock(self):
1374 1371 """
1375 1372 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1376 1373
1377 1374 Return:
1378 1375 0 : si no pudo escribir nada
1379 1376 1 : Si escribio el Basic el First Header
1380 1377 """
1381 1378 if self.fp == None:
1382 1379 self.setNextFile()
1383 1380
1384 1381 if self.flagIsNewFile:
1385 1382 return 1
1386 1383
1387 1384 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1388 1385 self.basicHeaderObj.write(self.fp)
1389 1386 return 1
1390 1387
1391 1388 if not(self.setNextFile()):
1392 1389 return 0
1393 1390
1394 1391 return 1
1395 1392
1396 1393 def writeNextBlock(self):
1397 1394 """
1398 1395 Selecciona el bloque siguiente de datos y los escribe en un file
1399 1396
1400 1397 Return:
1401 1398 0 : Si no hizo pudo escribir el bloque de datos
1402 1399 1 : Si no pudo escribir el bloque de datos
1403 1400 """
1404 1401 if not(self.__setNewBlock()):
1405 1402 return 0
1406 1403
1407 1404 self.writeBlock()
1408 1405
1409 1406 print("[Writing] Block No. %d/%d" % (self.blockIndex,
1410 1407 self.processingHeaderObj.dataBlocksPerFile))
1411 1408
1412 1409 return 1
1413 1410
1414 1411 def setNextFile(self):
1415 1412 """Determina el siguiente file que sera escrito
1416 1413
1417 1414 Affected:
1418 1415 self.filename
1419 1416 self.subfolder
1420 1417 self.fp
1421 1418 self.setFile
1422 1419 self.flagIsNewFile
1423 1420
1424 1421 Return:
1425 1422 0 : Si el archivo no puede ser escrito
1426 1423 1 : Si el archivo esta listo para ser escrito
1427 1424 """
1428 1425 ext = self.ext
1429 1426 path = self.path
1430 1427
1431 1428 if self.fp != None:
1432 1429 self.fp.close()
1433 1430
1434 1431 if not os.path.exists(path):
1435 1432 os.mkdir(path)
1436 1433
1437 1434 timeTuple = time.localtime(self.dataOut.utctime)
1438 1435 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year, timeTuple.tm_yday)
1439 1436
1440 1437 fullpath = os.path.join(path, subfolder)
1441 1438 setFile = self.setFile
1442 1439
1443 1440 if not(os.path.exists(fullpath)):
1444 1441 os.mkdir(fullpath)
1445 1442 setFile = -1 # inicializo mi contador de seteo
1446 1443 else:
1447 1444 filesList = os.listdir(fullpath)
1448 1445 if len(filesList) > 0:
1449 1446 filesList = sorted(filesList, key=str.lower)
1450 1447 filen = filesList[-1]
1451 1448 # el filename debera tener el siguiente formato
1452 1449 # 0 1234 567 89A BCDE (hex)
1453 1450 # x YYYY DDD SSS .ext
1454 1451 if isNumber(filen[8:11]):
1455 1452 # inicializo mi contador de seteo al seteo del ultimo file
1456 1453 setFile = int(filen[8:11])
1457 1454 else:
1458 1455 setFile = -1
1459 1456 else:
1460 1457 setFile = -1 # inicializo mi contador de seteo
1461 1458
1462 1459 setFile += 1
1463 1460
1464 1461 # If this is a new day it resets some values
1465 1462 if self.dataOut.datatime.date() > self.fileDate:
1466 1463 setFile = 0
1467 1464 self.nTotalBlocks = 0
1468
1465
1469 1466 filen = '{}{:04d}{:03d}{:03d}{}'.format(
1470 self.optchar, timeTuple.tm_year, timeTuple.tm_yday, setFile, ext)
1467 self.optchar, timeTuple.tm_year, timeTuple.tm_yday, setFile, ext)
1471 1468
1472 1469 filename = os.path.join(path, subfolder, filen)
1473 1470
1474 1471 fp = open(filename, 'wb')
1475 1472
1476 1473 self.blockIndex = 0
1477 1474 self.filename = filename
1478 1475 self.subfolder = subfolder
1479 1476 self.fp = fp
1480 1477 self.setFile = setFile
1481 1478 self.flagIsNewFile = 1
1482 1479 self.fileDate = self.dataOut.datatime.date()
1483 1480 self.setFirstHeader()
1484 1481
1485 1482 print('[Writing] Opening file: %s' % self.filename)
1486 1483
1487 1484 self.__writeFirstHeader()
1488 1485
1489 1486 return 1
1490 1487
1491 1488 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=None, ext=None, datatype=4):
1492 1489 """
1493 1490 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1494 1491
1495 1492 Inputs:
1496 1493 path : directory where data will be saved
1497 1494 profilesPerBlock : number of profiles per block
1498 1495 set : initial file set
1499 1496 datatype : An integer number that defines data type:
1500 1497 0 : int8 (1 byte)
1501 1498 1 : int16 (2 bytes)
1502 1499 2 : int32 (4 bytes)
1503 1500 3 : int64 (8 bytes)
1504 1501 4 : float32 (4 bytes)
1505 1502 5 : double64 (8 bytes)
1506 1503
1507 1504 Return:
1508 1505 0 : Si no realizo un buen seteo
1509 1506 1 : Si realizo un buen seteo
1510 1507 """
1511 1508
1512 1509 if ext == None:
1513 1510 ext = self.ext
1514 1511
1515 1512 self.ext = ext.lower()
1516 1513
1517 1514 self.path = path
1518
1515
1519 1516 if set is None:
1520 1517 self.setFile = -1
1521 1518 else:
1522 self.setFile = set - 1
1519 self.setFile = set - 1
1523 1520
1524 1521 self.blocksPerFile = blocksPerFile
1525 1522 self.profilesPerBlock = profilesPerBlock
1526 1523 self.dataOut = dataOut
1527 1524 self.fileDate = self.dataOut.datatime.date()
1528 1525 self.dtype = self.dataOut.dtype
1529 1526
1530 1527 if datatype is not None:
1531 1528 self.dtype = get_numpy_dtype(datatype)
1532 1529
1533 1530 if not(self.setNextFile()):
1534 1531 print("[Writing] There isn't a next file")
1535 1532 return 0
1536 1533
1537 1534 self.setBlockDimension()
1538 1535
1539 1536 return 1
1540 1537
1541 1538 def run(self, dataOut, path, blocksPerFile=100, profilesPerBlock=64, set=None, ext=None, datatype=4, **kwargs):
1542 1539
1543 1540 if not(self.isConfig):
1544 1541
1545 1542 self.setup(dataOut, path, blocksPerFile, profilesPerBlock=profilesPerBlock,
1546 1543 set=set, ext=ext, datatype=datatype, **kwargs)
1547 1544 self.isConfig = True
1548 1545
1549 1546 self.dataOut = dataOut
1550 1547 self.putData()
1551 1548 return self.dataOut
1552 1549
1553 1550 @MPDecorator
1554 1551 class printInfo(Operation):
1555 1552
1556 1553 def __init__(self):
1557 1554
1558 1555 Operation.__init__(self)
1559 1556 self.__printInfo = True
1560 1557
1561 1558 def run(self, dataOut, headers = ['systemHeaderObj', 'radarControllerHeaderObj', 'processingHeaderObj']):
1562 1559 if self.__printInfo == False:
1563 1560 return
1564 1561
1565 1562 for header in headers:
1566 1563 if hasattr(dataOut, header):
1567 1564 obj = getattr(dataOut, header)
1568 1565 if hasattr(obj, 'printInfo'):
1569 1566 obj.printInfo()
1570 1567 else:
1571 1568 print(obj)
1572 1569 else:
1573 1570 log.warning('Header {} Not found in object'.format(header))
1574 1571
1575 self.__printInfo = False
1572 self.__printInfo = False No newline at end of file
@@ -1,967 +1,966
1 1 # Copyright (c) 2012-2020 Jicamarca Radio Observatory
2 2 # All rights reserved.
3 3 #
4 4 # Distributed under the terms of the BSD 3-clause license.
5 5 """Spectra processing Unit and operations
6 6
7 7 Here you will find the processing unit `SpectraProc` and several operations
8 8 to work with Spectra data type
9 9 """
10 10
11 11 import time
12 12 import itertools
13 13 import numpy
14 14 # repositorio
15 15 from schainpy.model.proc.jroproc_base import ProcessingUnit, MPDecorator, Operation
16 16 from schainpy.model.data.jrodata import Spectra
17 17 from schainpy.model.data.jrodata import hildebrand_sekhon
18 18 from schainpy.utils import log
19 19
20 20
21 21 class SpectraProc(ProcessingUnit):
22 22
23 23 def __init__(self):
24 24
25 25 ProcessingUnit.__init__(self)
26 26
27 27 self.buffer = None
28 28 self.firstdatatime = None
29 29 self.profIndex = 0
30 30 self.dataOut = Spectra()
31 31 self.id_min = None
32 32 self.id_max = None
33 33 self.setupReq = False #Agregar a todas las unidades de proc
34 34
35 35 def __updateSpecFromVoltage(self):
36 36
37 37 self.dataOut.timeZone = self.dataIn.timeZone
38 38 self.dataOut.dstFlag = self.dataIn.dstFlag
39 39 self.dataOut.errorCount = self.dataIn.errorCount
40 40 self.dataOut.useLocalTime = self.dataIn.useLocalTime
41 41 try:
42 42 self.dataOut.processingHeaderObj = self.dataIn.processingHeaderObj.copy()
43 43 except:
44 44 pass
45 45 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
46 46 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
47 47 self.dataOut.channelList = self.dataIn.channelList
48 48 self.dataOut.heightList = self.dataIn.heightList
49 49 self.dataOut.dtype = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
50 50 self.dataOut.nProfiles = self.dataOut.nFFTPoints
51 51 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
52 52 self.dataOut.utctime = self.firstdatatime
53 53 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData
54 54 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData
55 55 self.dataOut.flagShiftFFT = False
56 56 self.dataOut.nCohInt = self.dataIn.nCohInt
57 57 self.dataOut.nIncohInt = 1
58 58 self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
59 59 self.dataOut.frequency = self.dataIn.frequency
60 60 self.dataOut.realtime = self.dataIn.realtime
61 61 self.dataOut.azimuth = self.dataIn.azimuth
62 62 self.dataOut.zenith = self.dataIn.zenith
63 63 self.dataOut.beam.codeList = self.dataIn.beam.codeList
64 64 self.dataOut.beam.azimuthList = self.dataIn.beam.azimuthList
65 65 self.dataOut.beam.zenithList = self.dataIn.beam.zenithList
66 66 self.dataOut.runNextUnit = self.dataIn.runNextUnit
67 67 try:
68 68 self.dataOut.step = self.dataIn.step
69 69 except:
70 70 pass
71 71
72 72 def __getFft(self):
73 73 """
74 74 Convierte valores de Voltaje a Spectra
75 75
76 76 Affected:
77 77 self.dataOut.data_spc
78 78 self.dataOut.data_cspc
79 79 self.dataOut.data_dc
80 80 self.dataOut.heightList
81 81 self.profIndex
82 82 self.buffer
83 83 self.dataOut.flagNoData
84 84 """
85 85 fft_volt = numpy.fft.fft(
86 86 self.buffer, n=self.dataOut.nFFTPoints, axis=1)
87 87 fft_volt = fft_volt.astype(numpy.dtype('complex'))
88 88 dc = fft_volt[:, 0, :]
89 89
90 90 # calculo de self-spectra
91 91 fft_volt = numpy.fft.fftshift(fft_volt, axes=(1,))
92 92 spc = fft_volt * numpy.conjugate(fft_volt)
93 93 spc = spc.real
94 94
95 95 blocksize = 0
96 96 blocksize += dc.size
97 97 blocksize += spc.size
98 98
99 99 cspc = None
100 100 pairIndex = 0
101 101 if self.dataOut.pairsList != None:
102 102 # calculo de cross-spectra
103 103 cspc = numpy.zeros(
104 104 (self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex')
105 105 for pair in self.dataOut.pairsList:
106 106 if pair[0] not in self.dataOut.channelList:
107 107 raise ValueError("Error getting CrossSpectra: pair 0 of %s is not in channelList = %s" % (
108 108 str(pair), str(self.dataOut.channelList)))
109 109 if pair[1] not in self.dataOut.channelList:
110 110 raise ValueError("Error getting CrossSpectra: pair 1 of %s is not in channelList = %s" % (
111 111 str(pair), str(self.dataOut.channelList)))
112 112
113 113 cspc[pairIndex, :, :] = fft_volt[pair[0], :, :] * \
114 114 numpy.conjugate(fft_volt[pair[1], :, :])
115 115 pairIndex += 1
116 116 blocksize += cspc.size
117 117
118 118 self.dataOut.data_spc = spc
119 119 self.dataOut.data_cspc = cspc
120 120 self.dataOut.data_dc = dc
121 121 self.dataOut.blockSize = blocksize
122 122 self.dataOut.flagShiftFFT = False
123 123
124 124 def run(self, nProfiles=None, nFFTPoints=None, pairsList=None, ippFactor=None, shift_fft=False, runNextUnit = 0):
125 125
126 126 self.dataIn.runNextUnit = runNextUnit
127 127 if self.dataIn.type == "Spectra":
128 128 self.dataOut.copy(self.dataIn)
129 129 if shift_fft:
130 130 #desplaza a la derecha en el eje 2 determinadas posiciones
131 131 shift = int(self.dataOut.nFFTPoints/2)
132 132 self.dataOut.data_spc = numpy.roll(self.dataOut.data_spc, shift , axis=1)
133 133
134 134 if self.dataOut.data_cspc is not None:
135 135 #desplaza a la derecha en el eje 2 determinadas posiciones
136 136 self.dataOut.data_cspc = numpy.roll(self.dataOut.data_cspc, shift, axis=1)
137 137 if pairsList:
138 138 self.__selectPairs(pairsList)
139 139
140 140 elif self.dataIn.type == "Voltage":
141 141
142 142 self.dataOut.flagNoData = True
143 143
144 144 if nFFTPoints == None:
145 145 raise ValueError("This SpectraProc.run() need nFFTPoints input variable")
146 146
147 147 if nProfiles == None:
148 148 nProfiles = nFFTPoints
149 149
150 150 if ippFactor == None:
151 151 self.dataOut.ippFactor = 1
152 152
153 153 self.dataOut.nFFTPoints = nFFTPoints
154 154
155 155 if self.buffer is None:
156 156 self.buffer = numpy.zeros((self.dataIn.nChannels,
157 157 nProfiles,
158 158 self.dataIn.nHeights),
159 159 dtype='complex')
160 160
161 161 if self.dataIn.flagDataAsBlock:
162 162 nVoltProfiles = self.dataIn.data.shape[1]
163 163 if nVoltProfiles == nProfiles:
164 164 self.buffer = self.dataIn.data.copy()
165 165 self.profIndex = nVoltProfiles
166 166
167 167 elif nVoltProfiles < nProfiles:
168 168
169 169 if self.profIndex == 0:
170 170 self.id_min = 0
171 171 self.id_max = nVoltProfiles
172 172
173 173 self.buffer[:, self.id_min:self.id_max,
174 174 :] = self.dataIn.data
175 175 self.profIndex += nVoltProfiles
176 176 self.id_min += nVoltProfiles
177 177 self.id_max += nVoltProfiles
178 178 elif nVoltProfiles > nProfiles:
179 179 self.reader.bypass = True
180 180 if self.profIndex == 0:
181 181 self.id_min = 0
182 182 self.id_max = nProfiles
183 183
184 184 self.buffer = self.dataIn.data[:, self.id_min:self.id_max,:]
185 185 self.profIndex += nProfiles
186 186 self.id_min += nProfiles
187 187 self.id_max += nProfiles
188 188 if self.id_max == nVoltProfiles:
189 189 self.reader.bypass = False
190 190
191 191 else:
192 192 raise ValueError("The type object %s has %d profiles, it should just has %d profiles" % (
193 193 self.dataIn.type, self.dataIn.data.shape[1], nProfiles))
194 194 self.dataOut.flagNoData = True
195 195 else:
196 196 self.buffer[:, self.profIndex, :] = self.dataIn.data.copy()
197 197 self.profIndex += 1
198 198
199 199 if self.firstdatatime == None:
200 200 self.firstdatatime = self.dataIn.utctime
201 201
202 202 if self.profIndex == nProfiles:
203 203 self.__updateSpecFromVoltage()
204 204 if pairsList == None:
205 205 self.dataOut.pairsList = [pair for pair in itertools.combinations(self.dataOut.channelList, 2)]
206 206 else:
207 207 self.dataOut.pairsList = pairsList
208 208 self.__getFft()
209 209 self.dataOut.flagNoData = False
210 210 self.firstdatatime = None
211 211 #if not self.reader.bypass:
212 212 self.profIndex = 0
213 213 else:
214 214 raise ValueError("The type of input object '%s' is not valid".format(
215 215 self.dataIn.type))
216 216
217 217 def __selectPairs(self, pairsList):
218 218
219 219 if not pairsList:
220 220 return
221 221
222 222 pairs = []
223 223 pairsIndex = []
224 224
225 225 for pair in pairsList:
226 226 if pair[0] not in self.dataOut.channelList or pair[1] not in self.dataOut.channelList:
227 227 continue
228 228 pairs.append(pair)
229 229 pairsIndex.append(pairs.index(pair))
230 230
231 231 self.dataOut.data_cspc = self.dataOut.data_cspc[pairsIndex]
232 232 self.dataOut.pairsList = pairs
233 233
234 234 return
235 235
236 236 def selectFFTs(self, minFFT, maxFFT ):
237 237 """
238 238 Selecciona un bloque de datos en base a un grupo de valores de puntos FFTs segun el rango
239 239 minFFT<= FFT <= maxFFT
240 240 """
241 241
242 242 if (minFFT > maxFFT):
243 243 raise ValueError("Error selecting heights: Height range (%d,%d) is not valid" % (minFFT, maxFFT))
244 244
245 245 if (minFFT < self.dataOut.getFreqRange()[0]):
246 246 minFFT = self.dataOut.getFreqRange()[0]
247 247
248 248 if (maxFFT > self.dataOut.getFreqRange()[-1]):
249 249 maxFFT = self.dataOut.getFreqRange()[-1]
250 250
251 251 minIndex = 0
252 252 maxIndex = 0
253 253 FFTs = self.dataOut.getFreqRange()
254 254
255 255 inda = numpy.where(FFTs >= minFFT)
256 256 indb = numpy.where(FFTs <= maxFFT)
257 257
258 258 try:
259 259 minIndex = inda[0][0]
260 260 except:
261 261 minIndex = 0
262 262
263 263 try:
264 264 maxIndex = indb[0][-1]
265 265 except:
266 266 maxIndex = len(FFTs)
267 267
268 268 self.selectFFTsByIndex(minIndex, maxIndex)
269 269
270 270 return 1
271 271
272 272 def getBeaconSignal(self, tauindex=0, channelindex=0, hei_ref=None):
273 273 newheis = numpy.where(
274 274 self.dataOut.heightList > self.dataOut.radarControllerHeaderObj.Taus[tauindex])
275 275
276 276 if hei_ref != None:
277 277 newheis = numpy.where(self.dataOut.heightList > hei_ref)
278 278
279 279 minIndex = min(newheis[0])
280 280 maxIndex = max(newheis[0])
281 281 data_spc = self.dataOut.data_spc[:, :, minIndex:maxIndex + 1]
282 282 heightList = self.dataOut.heightList[minIndex:maxIndex + 1]
283 283
284 284 # determina indices
285 285 nheis = int(self.dataOut.radarControllerHeaderObj.txB /
286 286 (self.dataOut.heightList[1] - self.dataOut.heightList[0]))
287 287 avg_dB = 10 * \
288 288 numpy.log10(numpy.sum(data_spc[channelindex, :, :], axis=0))
289 289 beacon_dB = numpy.sort(avg_dB)[-nheis:]
290 290 beacon_heiIndexList = []
291 291 for val in avg_dB.tolist():
292 292 if val >= beacon_dB[0]:
293 293 beacon_heiIndexList.append(avg_dB.tolist().index(val))
294 294
295 295 data_cspc = None
296 296 if self.dataOut.data_cspc is not None:
297 297 data_cspc = self.dataOut.data_cspc[:, :, minIndex:maxIndex + 1]
298 298
299 299 data_dc = None
300 300 if self.dataOut.data_dc is not None:
301 301 data_dc = self.dataOut.data_dc[:, minIndex:maxIndex + 1]
302 302
303 303 self.dataOut.data_spc = data_spc
304 304 self.dataOut.data_cspc = data_cspc
305 305 self.dataOut.data_dc = data_dc
306 306 self.dataOut.heightList = heightList
307 307 self.dataOut.beacon_heiIndexList = beacon_heiIndexList
308 308
309 309 return 1
310 310
311 311 def selectFFTsByIndex(self, minIndex, maxIndex):
312 312 """
313 313
314 314 """
315 315
316 316 if (minIndex < 0) or (minIndex > maxIndex):
317 317 raise ValueError("Error selecting heights: Index range (%d,%d) is not valid" % (minIndex, maxIndex))
318 318
319 319 if (maxIndex >= self.dataOut.nProfiles):
320 320 maxIndex = self.dataOut.nProfiles-1
321 321
322 322 #Spectra
323 323 data_spc = self.dataOut.data_spc[:,minIndex:maxIndex+1,:]
324 324
325 325 data_cspc = None
326 326 if self.dataOut.data_cspc is not None:
327 327 data_cspc = self.dataOut.data_cspc[:,minIndex:maxIndex+1,:]
328 328
329 329 data_dc = None
330 330 if self.dataOut.data_dc is not None:
331 331 data_dc = self.dataOut.data_dc[minIndex:maxIndex+1,:]
332 332
333 333 self.dataOut.data_spc = data_spc
334 334 self.dataOut.data_cspc = data_cspc
335 335 self.dataOut.data_dc = data_dc
336 336
337 337 self.dataOut.ippSeconds = self.dataOut.ippSeconds*(self.dataOut.nFFTPoints / numpy.shape(data_cspc)[1])
338 338 self.dataOut.nFFTPoints = numpy.shape(data_cspc)[1]
339 339 self.dataOut.profilesPerBlock = numpy.shape(data_cspc)[1]
340 340
341 341 return 1
342 342
343 343 def getNoise(self, minHei=None, maxHei=None, minVel=None, maxVel=None):
344 344 # validacion de rango
345 345 if minHei == None:
346 346 minHei = self.dataOut.heightList[0]
347 347
348 348 if maxHei == None:
349 349 maxHei = self.dataOut.heightList[-1]
350 350
351 351 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
352 352 print('minHei: %.2f is out of the heights range' % (minHei))
353 353 print('minHei is setting to %.2f' % (self.dataOut.heightList[0]))
354 354 minHei = self.dataOut.heightList[0]
355 355
356 356 if (maxHei > self.dataOut.heightList[-1]) or (maxHei < minHei):
357 357 print('maxHei: %.2f is out of the heights range' % (maxHei))
358 358 print('maxHei is setting to %.2f' % (self.dataOut.heightList[-1]))
359 359 maxHei = self.dataOut.heightList[-1]
360 360
361 361 # validacion de velocidades
362 362 velrange = self.dataOut.getVelRange(1)
363 363
364 364 if minVel == None:
365 365 minVel = velrange[0]
366 366
367 367 if maxVel == None:
368 368 maxVel = velrange[-1]
369 369
370 370 if (minVel < velrange[0]) or (minVel > maxVel):
371 371 print('minVel: %.2f is out of the velocity range' % (minVel))
372 372 print('minVel is setting to %.2f' % (velrange[0]))
373 373 minVel = velrange[0]
374 374
375 375 if (maxVel > velrange[-1]) or (maxVel < minVel):
376 376 print('maxVel: %.2f is out of the velocity range' % (maxVel))
377 377 print('maxVel is setting to %.2f' % (velrange[-1]))
378 378 maxVel = velrange[-1]
379 379
380 380 # seleccion de indices para rango
381 381 minIndex = 0
382 382 maxIndex = 0
383 383 heights = self.dataOut.heightList
384 384
385 385 inda = numpy.where(heights >= minHei)
386 386 indb = numpy.where(heights <= maxHei)
387 387
388 388 try:
389 389 minIndex = inda[0][0]
390 390 except:
391 391 minIndex = 0
392 392
393 393 try:
394 394 maxIndex = indb[0][-1]
395 395 except:
396 396 maxIndex = len(heights)
397 397
398 398 if (minIndex < 0) or (minIndex > maxIndex):
399 399 raise ValueError("some value in (%d,%d) is not valid" % (
400 400 minIndex, maxIndex))
401 401
402 402 if (maxIndex >= self.dataOut.nHeights):
403 403 maxIndex = self.dataOut.nHeights - 1
404 404
405 405 # seleccion de indices para velocidades
406 406 indminvel = numpy.where(velrange >= minVel)
407 407 indmaxvel = numpy.where(velrange <= maxVel)
408 408 try:
409 409 minIndexVel = indminvel[0][0]
410 410 except:
411 411 minIndexVel = 0
412 412
413 413 try:
414 414 maxIndexVel = indmaxvel[0][-1]
415 415 except:
416 416 maxIndexVel = len(velrange)
417 417
418 418 # seleccion del espectro
419 419 data_spc = self.dataOut.data_spc[:,
420 420 minIndexVel:maxIndexVel + 1, minIndex:maxIndex + 1]
421 421 # estimacion de ruido
422 422 noise = numpy.zeros(self.dataOut.nChannels)
423 423
424 424 for channel in range(self.dataOut.nChannels):
425 425 daux = data_spc[channel, :, :]
426 426 sortdata = numpy.sort(daux, axis=None)
427 427 noise[channel] = hildebrand_sekhon(sortdata, self.dataOut.nIncohInt)
428 428
429 429 self.dataOut.noise_estimation = noise.copy()
430 430
431 431 return 1
432 432
433
434 433 class GetSNR(Operation):
435 434 '''
436 435 Written by R. Flores
437 436 '''
438 437 """Operation to get SNR.
439 438
440 439 Parameters:
441 440 -----------
442 441
443 442 Example
444 443 --------
445 444
446 445 op = proc_unit.addOperation(name='GetSNR', optype='other')
447 446
448 447 """
449 448
450 449 def __init__(self, **kwargs):
451 450
452 451 Operation.__init__(self, **kwargs)
453 452
454 453 def run(self,dataOut):
455 454
456 455 noise = dataOut.getNoise(ymin_index=-10) #RegiΓ³n superior donde solo deberΓ­a de haber ruido
457 456 dataOut.data_snr = (dataOut.data_spc.sum(axis=1)-noise[:,None]*dataOut.nFFTPoints)/(noise[:,None]*dataOut.nFFTPoints) #It works apparently
458 457 dataOut.snl = numpy.log10(dataOut.data_snr)
459 458 dataOut.snl = numpy.where(dataOut.data_snr<.01, numpy.nan, dataOut.snl)
460 459
461 460 return dataOut
462 461
463 462 class removeDC(Operation):
464 463
465 464 def run(self, dataOut, mode=2):
466 465 self.dataOut = dataOut
467 466 jspectra = self.dataOut.data_spc
468 467 jcspectra = self.dataOut.data_cspc
469 468
470 469 num_chan = jspectra.shape[0]
471 470 num_hei = jspectra.shape[2]
472 471
473 472 if jcspectra is not None:
474 473 jcspectraExist = True
475 474 num_pairs = jcspectra.shape[0]
476 475 else:
477 476 jcspectraExist = False
478 477
479 478 freq_dc = int(jspectra.shape[1] / 2)
480 479 ind_vel = numpy.array([-2, -1, 1, 2]) + freq_dc
481 480 ind_vel = ind_vel.astype(int)
482 481
483 482 if ind_vel[0] < 0:
484 483 ind_vel[list(range(0, 1))] = ind_vel[list(range(0, 1))] + self.num_prof
485 484
486 485 if mode == 1:
487 486 jspectra[:, freq_dc, :] = (
488 487 jspectra[:, ind_vel[1], :] + jspectra[:, ind_vel[2], :]) / 2 # CORRECCION
489 488
490 489 if jcspectraExist:
491 490 jcspectra[:, freq_dc, :] = (
492 491 jcspectra[:, ind_vel[1], :] + jcspectra[:, ind_vel[2], :]) / 2
493 492
494 493 if mode == 2:
495 494
496 495 vel = numpy.array([-2, -1, 1, 2])
497 496 xx = numpy.zeros([4, 4])
498 497
499 498 for fil in range(4):
500 499 xx[fil, :] = vel[fil]**numpy.asarray(list(range(4)))
501 500
502 501 xx_inv = numpy.linalg.inv(xx)
503 502 xx_aux = xx_inv[0, :]
504 503
505 504 for ich in range(num_chan):
506 505 yy = jspectra[ich, ind_vel, :]
507 506 jspectra[ich, freq_dc, :] = numpy.dot(xx_aux, yy)
508 507
509 508 junkid = jspectra[ich, freq_dc, :] <= 0
510 509 cjunkid = sum(junkid)
511 510
512 511 if cjunkid.any():
513 512 jspectra[ich, freq_dc, junkid.nonzero()] = (
514 513 jspectra[ich, ind_vel[1], junkid] + jspectra[ich, ind_vel[2], junkid]) / 2
515 514
516 515 if jcspectraExist:
517 516 for ip in range(num_pairs):
518 517 yy = jcspectra[ip, ind_vel, :]
519 518 jcspectra[ip, freq_dc, :] = numpy.dot(xx_aux, yy)
520 519
521 520 self.dataOut.data_spc = jspectra
522 521 self.dataOut.data_cspc = jcspectra
523 522
524 523 return self.dataOut
525 524
526 525 class removeInterference(Operation):
527 526
528 527 def removeInterference2(self):
529 528
530 529 cspc = self.dataOut.data_cspc
531 530 spc = self.dataOut.data_spc
532 531 Heights = numpy.arange(cspc.shape[2])
533 532 realCspc = numpy.abs(cspc)
534 533
535 534 for i in range(cspc.shape[0]):
536 535 LinePower= numpy.sum(realCspc[i], axis=0)
537 536 Threshold = numpy.amax(LinePower)-numpy.sort(LinePower)[len(Heights)-int(len(Heights)*0.1)]
538 SelectedHeights = Heights[ numpy.where(LinePower < Threshold) ]
539 InterferenceSum = numpy.sum(realCspc[i,:,SelectedHeights],axis=0)
537 SelectedHeights = Heights[ numpy.where( LinePower < Threshold ) ]
538 InterferenceSum = numpy.sum( realCspc[i,:,SelectedHeights], axis=0 )
540 539 InterferenceThresholdMin = numpy.sort(InterferenceSum)[int(len(InterferenceSum)*0.98)]
541 540 InterferenceThresholdMax = numpy.sort(InterferenceSum)[int(len(InterferenceSum)*0.99)]
542
543
544 InterferenceRange = numpy.where(([InterferenceSum > InterferenceThresholdMin]))# , InterferenceSum < InterferenceThresholdMax]) )
541
542
543 InterferenceRange = numpy.where( ([InterferenceSum > InterferenceThresholdMin]))# , InterferenceSum < InterferenceThresholdMax]) )
545 544 #InterferenceRange = numpy.where( ([InterferenceRange < InterferenceThresholdMax]))
546 545 if len(InterferenceRange)<int(cspc.shape[1]*0.3):
547 546 cspc[i,InterferenceRange,:] = numpy.NaN
548
547
549 548 self.dataOut.data_cspc = cspc
550 549
551 550 def removeInterference(self, interf=2, hei_interf=None, nhei_interf=None, offhei_interf=None):
552 551
553 552 jspectra = self.dataOut.data_spc
554 553 jcspectra = self.dataOut.data_cspc
555 554 jnoise = self.dataOut.getNoise()
556 555 num_incoh = self.dataOut.nIncohInt
557 556
558 557 num_channel = jspectra.shape[0]
559 558 num_prof = jspectra.shape[1]
560 559 num_hei = jspectra.shape[2]
561 560
562 561 # hei_interf
563 562 if hei_interf is None:
564 563 count_hei = int(num_hei / 2)
565 564 hei_interf = numpy.asmatrix(list(range(count_hei))) + num_hei - count_hei
566 565 hei_interf = numpy.asarray(hei_interf)[0]
567 566 # nhei_interf
568 567 if (nhei_interf == None):
569 568 nhei_interf = 5
570 569 if (nhei_interf < 1):
571 570 nhei_interf = 1
572 571 if (nhei_interf > count_hei):
573 572 nhei_interf = count_hei
574 573 if (offhei_interf == None):
575 574 offhei_interf = 0
576 575
577 576 ind_hei = list(range(num_hei))
578 577 # mask_prof = numpy.asarray(range(num_prof - 2)) + 1
579 578 # mask_prof[range(num_prof/2 - 1,len(mask_prof))] += 1
580 579 mask_prof = numpy.asarray(list(range(num_prof)))
581 580 num_mask_prof = mask_prof.size
582 581 comp_mask_prof = [0, num_prof / 2]
583 582
584 583 # noise_exist: Determina si la variable jnoise ha sido definida y contiene la informacion del ruido de cada canal
585 584 if (jnoise.size < num_channel or numpy.isnan(jnoise).any()):
586 585 jnoise = numpy.nan
587 586 noise_exist = jnoise[0] < numpy.Inf
588 587
589 588 # Subrutina de Remocion de la Interferencia
590 589 for ich in range(num_channel):
591 590 # Se ordena los espectros segun su potencia (menor a mayor)
592 591 power = jspectra[ich, mask_prof, :]
593 592 power = power[:, hei_interf]
594 593 power = power.sum(axis=0)
595 594 psort = power.ravel().argsort()
596 595
597 596 # Se estima la interferencia promedio en los Espectros de Potencia empleando
598 597 junkspc_interf = jspectra[ich, :, hei_interf[psort[list(range(
599 598 offhei_interf, nhei_interf + offhei_interf))]]]
600 599
601 600 if noise_exist:
602 601 # tmp_noise = jnoise[ich] / num_prof
603 602 tmp_noise = jnoise[ich]
604 603 junkspc_interf = junkspc_interf - tmp_noise
605 604 #junkspc_interf[:,comp_mask_prof] = 0
606 605
607 606 jspc_interf = junkspc_interf.sum(axis=0) / nhei_interf
608 607 jspc_interf = jspc_interf.transpose()
609 608 # Calculando el espectro de interferencia promedio
610 609 noiseid = numpy.where(
611 610 jspc_interf <= tmp_noise / numpy.sqrt(num_incoh))
612 611 noiseid = noiseid[0]
613 612 cnoiseid = noiseid.size
614 613 interfid = numpy.where(
615 614 jspc_interf > tmp_noise / numpy.sqrt(num_incoh))
616 615 interfid = interfid[0]
617 616 cinterfid = interfid.size
618 617
619 618 if (cnoiseid > 0):
620 619 jspc_interf[noiseid] = 0
621 620
622 621 # Expandiendo los perfiles a limpiar
623 622 if (cinterfid > 0):
624 623 new_interfid = (
625 624 numpy.r_[interfid - 1, interfid, interfid + 1] + num_prof) % num_prof
626 625 new_interfid = numpy.asarray(new_interfid)
627 626 new_interfid = {x for x in new_interfid}
628 627 new_interfid = numpy.array(list(new_interfid))
629 628 new_cinterfid = new_interfid.size
630 629 else:
631 630 new_cinterfid = 0
632 631
633 632 for ip in range(new_cinterfid):
634 633 ind = junkspc_interf[:, new_interfid[ip]].ravel().argsort()
635 634 jspc_interf[new_interfid[ip]
636 635 ] = junkspc_interf[ind[nhei_interf // 2], new_interfid[ip]]
637 636
638 637 jspectra[ich, :, ind_hei] = jspectra[ich, :,
639 638 ind_hei] - jspc_interf # Corregir indices
640 639
641 640 # Removiendo la interferencia del punto de mayor interferencia
642 641 ListAux = jspc_interf[mask_prof].tolist()
643 642 maxid = ListAux.index(max(ListAux))
644 643
645 644 if cinterfid > 0:
646 645 for ip in range(cinterfid * (interf == 2) - 1):
647 646 ind = (jspectra[ich, interfid[ip], :] < tmp_noise *
648 647 (1 + 1 / numpy.sqrt(num_incoh))).nonzero()
649 648 cind = len(ind)
650 649
651 650 if (cind > 0):
652 651 jspectra[ich, interfid[ip], ind] = tmp_noise * \
653 652 (1 + (numpy.random.uniform(cind) - 0.5) /
654 653 numpy.sqrt(num_incoh))
655 654
656 655 ind = numpy.array([-2, -1, 1, 2])
657 656 xx = numpy.zeros([4, 4])
658 657
659 658 for id1 in range(4):
660 659 xx[:, id1] = ind[id1]**numpy.asarray(list(range(4)))
661 660
662 661 xx_inv = numpy.linalg.inv(xx)
663 662 xx = xx_inv[:, 0]
664 663 ind = (ind + maxid + num_mask_prof) % num_mask_prof
665 664 yy = jspectra[ich, mask_prof[ind], :]
666 665 jspectra[ich, mask_prof[maxid], :] = numpy.dot(
667 666 yy.transpose(), xx)
668 667
669 668 indAux = (jspectra[ich, :, :] < tmp_noise *
670 669 (1 - 1 / numpy.sqrt(num_incoh))).nonzero()
671 670 jspectra[ich, indAux[0], indAux[1]] = tmp_noise * \
672 671 (1 - 1 / numpy.sqrt(num_incoh))
673 672
674 673 # Remocion de Interferencia en el Cross Spectra
675 674 if jcspectra is None:
676 675 return jspectra, jcspectra
677 676 num_pairs = int(jcspectra.size / (num_prof * num_hei))
678 677 jcspectra = jcspectra.reshape(num_pairs, num_prof, num_hei)
679 678
680 679 for ip in range(num_pairs):
681 680
682 681 #-------------------------------------------
683 682
684 683 cspower = numpy.abs(jcspectra[ip, mask_prof, :])
685 684 cspower = cspower[:, hei_interf]
686 685 cspower = cspower.sum(axis=0)
687 686
688 687 cspsort = cspower.ravel().argsort()
689 688 junkcspc_interf = jcspectra[ip, :, hei_interf[cspsort[list(range(
690 689 offhei_interf, nhei_interf + offhei_interf))]]]
691 690 junkcspc_interf = junkcspc_interf.transpose()
692 691 jcspc_interf = junkcspc_interf.sum(axis=1) / nhei_interf
693 692
694 693 ind = numpy.abs(jcspc_interf[mask_prof]).ravel().argsort()
695 694
696 695 median_real = int(numpy.median(numpy.real(
697 696 junkcspc_interf[mask_prof[ind[list(range(3 * num_prof // 4))]], :])))
698 697 median_imag = int(numpy.median(numpy.imag(
699 698 junkcspc_interf[mask_prof[ind[list(range(3 * num_prof // 4))]], :])))
700 699 comp_mask_prof = [int(e) for e in comp_mask_prof]
701 700 junkcspc_interf[comp_mask_prof, :] = numpy.complex(
702 701 median_real, median_imag)
703 702
704 703 for iprof in range(num_prof):
705 704 ind = numpy.abs(junkcspc_interf[iprof, :]).ravel().argsort()
706 705 jcspc_interf[iprof] = junkcspc_interf[iprof, ind[nhei_interf // 2]]
707 706
708 707 # Removiendo la Interferencia
709 708 jcspectra[ip, :, ind_hei] = jcspectra[ip,
710 709 :, ind_hei] - jcspc_interf
711 710
712 711 ListAux = numpy.abs(jcspc_interf[mask_prof]).tolist()
713 712 maxid = ListAux.index(max(ListAux))
714 713
715 714 ind = numpy.array([-2, -1, 1, 2])
716 715 xx = numpy.zeros([4, 4])
717 716
718 717 for id1 in range(4):
719 718 xx[:, id1] = ind[id1]**numpy.asarray(list(range(4)))
720 719
721 720 xx_inv = numpy.linalg.inv(xx)
722 721 xx = xx_inv[:, 0]
723 722
724 723 ind = (ind + maxid + num_mask_prof) % num_mask_prof
725 724 yy = jcspectra[ip, mask_prof[ind], :]
726 725 jcspectra[ip, mask_prof[maxid], :] = numpy.dot(yy.transpose(), xx)
727 726
728 727 # Guardar Resultados
729 728 self.dataOut.data_spc = jspectra
730 729 self.dataOut.data_cspc = jcspectra
731 730
732 731 return 1
733 732
734 733 def run(self, dataOut, interf=2,hei_interf=None, nhei_interf=None, offhei_interf=None, mode=1):
735 734
736 735 self.dataOut = dataOut
737 736
738 737 if mode == 1:
739 738 self.removeInterference(interf=2,hei_interf=None, nhei_interf=None, offhei_interf=None)
740 739 elif mode == 2:
741 740 self.removeInterference2()
742 741
743 742 return self.dataOut
744 743
745 744
746 745 class deflip(Operation):
747 746
748 747 def run(self, dataOut):
749 748 # arreglo 1: (num_chan, num_profiles, num_heights)
750 749 self.dataOut = dataOut
751 750
752 751 # JULIA-oblicua, indice 2
753 752 # arreglo 2: (num_profiles, num_heights)
754 753 jspectra = self.dataOut.data_spc[2]
755 754 jspectra_tmp=numpy.zeros(jspectra.shape)
756 755 num_profiles=jspectra.shape[0]
757 756 freq_dc = int(num_profiles / 2)
758 757 # Flip con for
759 758 for j in range(num_profiles):
760 759 jspectra_tmp[num_profiles-j-1]= jspectra[j]
761 760 # Intercambio perfil de DC con perfil inmediato anterior
762 761 jspectra_tmp[freq_dc-1]= jspectra[freq_dc-1]
763 762 jspectra_tmp[freq_dc]= jspectra[freq_dc]
764 763 # canal modificado es re-escrito en el arreglo de canales
765 764 self.dataOut.data_spc[2] = jspectra_tmp
766 765
767 766 return self.dataOut
768 767
769 768
770 769 class IncohInt(Operation):
771 770
772 771 __profIndex = 0
773 772 __withOverapping = False
774 773
775 774 __byTime = False
776 775 __initime = None
777 776 __lastdatatime = None
778 777 __integrationtime = None
779 778
780 779 __buffer_spc = None
781 780 __buffer_cspc = None
782 781 __buffer_dc = None
783 782
784 783 __dataReady = False
785 784
786 785 __timeInterval = None
787 786
788 787 n = None
789 788
790 789 def __init__(self):
791 790
792 791 Operation.__init__(self)
793 792
794 793 def setup(self, n=None, timeInterval=None, overlapping=False):
795 794 """
796 795 Set the parameters of the integration class.
797 796
798 797 Inputs:
799 798
800 799 n : Number of coherent integrations
801 800 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
802 801 overlapping :
803 802
804 803 """
805 804
806 805 self.__initime = None
807 806 self.__lastdatatime = 0
808 807
809 808 self.__buffer_spc = 0
810 809 self.__buffer_cspc = 0
811 810 self.__buffer_dc = 0
812 811
813 812 self.__profIndex = 0
814 813 self.__dataReady = False
815 814 self.__byTime = False
816 815
817 816 if n is None and timeInterval is None:
818 817 raise ValueError("n or timeInterval should be specified ...")
819 818
820 819 if n is not None:
821 820 self.n = int(n)
822 821 else:
823 822
824 823 self.__integrationtime = int(timeInterval)
825 824 self.n = None
826 825 self.__byTime = True
827 826
828 827 def putData(self, data_spc, data_cspc, data_dc):
829 828 """
830 829 Add a profile to the __buffer_spc and increase in one the __profileIndex
831 830
832 831 """
833 832
834 833 self.__buffer_spc += data_spc
835 834
836 835 if data_cspc is None:
837 836 self.__buffer_cspc = None
838 837 else:
839 838 self.__buffer_cspc += data_cspc
840 839
841 840 if data_dc is None:
842 841 self.__buffer_dc = None
843 842 else:
844 843 self.__buffer_dc += data_dc
845 844
846 845 self.__profIndex += 1
847 846
848 847 return
849 848
850 849 def pushData(self):
851 850 """
852 851 Return the sum of the last profiles and the profiles used in the sum.
853 852
854 853 Affected:
855 854
856 855 self.__profileIndex
857 856
858 857 """
859 858
860 859 data_spc = self.__buffer_spc
861 860 data_cspc = self.__buffer_cspc
862 861 data_dc = self.__buffer_dc
863 862 n = self.__profIndex
864 863
865 864 self.__buffer_spc = 0
866 865 self.__buffer_cspc = 0
867 866 self.__buffer_dc = 0
868 867 self.__profIndex = 0
869 868
870 869 return data_spc, data_cspc, data_dc, n
871 870
872 871 def byProfiles(self, *args):
873 872
874 873 self.__dataReady = False
875 874 avgdata_spc = None
876 875 avgdata_cspc = None
877 876 avgdata_dc = None
878 877
879 878 self.putData(*args)
880 879
881 880 if self.__profIndex == self.n:
882 881
883 882 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
884 883 self.n = n
885 884 self.__dataReady = True
886 885
887 886 return avgdata_spc, avgdata_cspc, avgdata_dc
888 887
889 888 def byTime(self, datatime, *args):
890 889
891 890 self.__dataReady = False
892 891 avgdata_spc = None
893 892 avgdata_cspc = None
894 893 avgdata_dc = None
895 894
896 895 self.putData(*args)
897 896
898 897 if (datatime - self.__initime) >= self.__integrationtime:
899 898 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
900 899 self.n = n
901 900 self.__dataReady = True
902 901
903 902 return avgdata_spc, avgdata_cspc, avgdata_dc
904 903
905 904 def integrate(self, datatime, *args):
906 905
907 906 if self.__profIndex == 0:
908 907 self.__initime = datatime
909 908
910 909 if self.__byTime:
911 910 avgdata_spc, avgdata_cspc, avgdata_dc = self.byTime(
912 911 datatime, *args)
913 912 else:
914 913 avgdata_spc, avgdata_cspc, avgdata_dc = self.byProfiles(*args)
915 914
916 915 if not self.__dataReady:
917 916 return None, None, None, None
918 917
919 918 return self.__initime, avgdata_spc, avgdata_cspc, avgdata_dc
920 919
921 920 def run(self, dataOut, n=None, timeInterval=None, overlapping=False):
922 921 if n == 1:
923 922 return dataOut
924 923
925 924 dataOut.flagNoData = True
926 925
927 926 if not self.isConfig:
928 927 self.setup(n, timeInterval, overlapping)
929 928 self.isConfig = True
930 929
931 930 avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc = self.integrate(dataOut.utctime,
932 931 dataOut.data_spc,
933 932 dataOut.data_cspc,
934 933 dataOut.data_dc)
935 934
936 935 if self.__dataReady:
937 936
938 937 dataOut.data_spc = avgdata_spc
939 938 dataOut.data_cspc = avgdata_cspc
940 939 dataOut.data_dc = avgdata_dc
941 940 dataOut.nIncohInt *= self.n
942 941 dataOut.utctime = avgdatatime
943 942 dataOut.flagNoData = False
944 943
945 944 return dataOut
946 945
947 946 class dopplerFlip(Operation):
948 947
949 948 def run(self, dataOut):
950 949 # arreglo 1: (num_chan, num_profiles, num_heights)
951 950 self.dataOut = dataOut
952 951 # JULIA-oblicua, indice 2
953 952 # arreglo 2: (num_profiles, num_heights)
954 953 jspectra = self.dataOut.data_spc[2]
955 954 jspectra_tmp = numpy.zeros(jspectra.shape)
956 955 num_profiles = jspectra.shape[0]
957 956 freq_dc = int(num_profiles / 2)
958 957 # Flip con for
959 958 for j in range(num_profiles):
960 959 jspectra_tmp[num_profiles-j-1]= jspectra[j]
961 960 # Intercambio perfil de DC con perfil inmediato anterior
962 961 jspectra_tmp[freq_dc-1]= jspectra[freq_dc-1]
963 962 jspectra_tmp[freq_dc]= jspectra[freq_dc]
964 963 # canal modificado es re-escrito en el arreglo de canales
965 964 self.dataOut.data_spc[2] = jspectra_tmp
966 965
967 966 return self.dataOut No newline at end of file
@@ -1,291 +1,291
1 1 #!/usr/bin/env python
2 2 '''
3 3 Created on Jul 7, 2014
4 4
5 5 @author: roj-idl71
6 6 '''
7 7 import os, sys, json
8 8
9 9 #path = os.path.dirname(os.getcwd())
10 10 #path = os.path.join(path, 'source')
11 11 #sys.path.insert(0, path)
12 12
13 13 from schainpy.controller import Project
14 14
15 15 if __name__ == '__main__':
16 16
17 17 desc = "JULIA raw experiment "
18 18 filename = "schain.xml"
19 19
20 20 dpath = '/home/roberto/puma/JULIA_NEW/JULIA_EW/D2022/'
21 21 dpath = '/home/roberto/puma/JULIA_NEW/JULIA_EW/D2021/'
22 22 dpath = '/home/roberto/puma/JULIA_NEW/JULIA_EW/D2017/'
23 23 dpath = '/home/roberto/puma/JULIA_NEW/JULIA_EW/D2021/'
24 24 dpath = '/home/roberto/puma/JULIA_NEW/JULIA_EW/D2018/'
25 25 #dpath = '/home/roberto/puma/JULIA_NEW/JULIA_EW/D2017/'
26 26 #dpath = '/home/roberto/Folder_aux/D2019/'
27 27
28 28 ##dpath = '/home/roberto/puma/JULIA_EW_IMAGING/JULIA_EW/D2017/'
29 29 dpath = '/home/soporte/PUMA/JULIA_EW_IMAGING/JULIA_EW/D2017/'
30 30
31 31 figpath = '/home/soporte/DATA/Pictures/JULIA/EEJ/Skew_but_dop_is_shift'+'/'+dpath[-5:-1]
32 32 ppath = "/home/soporte/DATA/MLT/Oblique/2022_03/data_reshape"
33 33 spcdata = '/home/soporte/DATA/JULIA/EEJ/SPC'
34 34 path_parameters = '/home/soporte/DATA/JULIA/EEJ/Params'
35 35 db_range=['25','35']
36 36 db_range=['10','20']
37 37 #db_range=['14','20']
38 38 db_range=['10','23']
39 39 db_range=['13','20']
40 40 db_range=['21','30']
41 41 db_range=['15','30']
42 42 db_range=['13','28']
43 43 tiempo=['7','18']
44 44 altura1=[2,20]
45 45 altura1=[90,220]
46 46 velocity=['-80','80']
47 47 period=60
48 48 # PROJECT 1
49 49
50 50 show_spc = 0
51 51 save_spc = 0
52 52 fitting = 1
53 53 save_params = 1
54 54 plot_params = 0
55 55
56 56 controllerObj = Project()
57 57 controllerObj.setup(id = '191', name='altura1', description=desc)
58 58
59 59 readUnitConfObj1 = controllerObj.addReadUnit(datatype='SpectraReader',
60 60 path=dpath,
61 61 startDate='2017/09/01', #Check 21-29 Jun 2021, 18 May 2022
62 62 endDate='2017/09/30', #05,06,07-01-18
63 63 #startTime='06:00:00',
64 64 #endTime='18:00:00',
65 65 startTime='07:00:00',
66 66 #startTime='10:00:00',
67 67 #startTime='08:38:01',
68 68 #startTime='11:16:00',
69 69 #startTime='08:13:00',
70 70 endTime='17:59:59',
71 71 #startTime='16:30:00',
72 72 #endTime='17:30:59',
73 73 online=0,
74 74 walk=1,
75 75 expLabel='150EEJ',
76 76 getByBlock=1,
77 delay=20)
77 delay=20)
78 78
79 79 # opObj00 = readUnitConfObj.addOperation(name='printInfo')
80 80 # opObj00 = readUnitConfObj.addOperation(name='printNumberOfBlock')
81 81
82 82 procUnitConfObj1 = controllerObj.addProcUnit(datatype='SpectraProc', inputId=readUnitConfObj1.getId())
83 83
84 84 opObj11 = procUnitConfObj1.addOperation(name='selectChannels')
85 85 opObj11.addParameter(name='channelList', value='2,', format='intlist')
86 86
87 87 '''
88 88 opObj11 = procUnitConfObj1SPC.addOperation(name='removeDC')
89 89 opObj11.addParameter(name='mode', value='2', format='int')
90 90 '''
91 91
92 92 #opObj11 = procUnitConfObj1.addOperation(name='dopplerFlip') #It fixes the Doppler
93 93 #opObj11.addParameter(name='chann', value='0', format='int')
94 94
95 95 opObj11 = procUnitConfObj1.addOperation(name='removeInterference')
96 96
97 97 opObj11 = procUnitConfObj1.addOperation(name='IncohInt', optype='other')
98 98 #opObj11.addParameter(name='n', value='20', format='int')
99 99 opObj11.addParameter(name='n', value='1', format='int')
100 100
101 101 opObj11 = procUnitConfObj1.addOperation(name='GetSNR', optype='other')
102 102
103 103 if save_spc:
104 104 dataList=['data_spc',
105 105 'utctime']
106 106 metadataList=['nFFTPoints','VelRange','normFactor',
107 107 'heightList','timeZone']
108 108
109 109 op221 = procUnitConfObj1.addOperation(name='HDFWriter', optype='external')
110 110 op221.addParameter(name='path', value=spcdata)
111 111 #op221.addParameter(name='mode', value=1, format='int')
112 112 op221.addParameter(name='dataList', value=dataList)
113 113 op221.addParameter(name='metadataList', value=metadataList)
114 114 #op221.addParameter(name='blocksPerFile', value=500)
115 115 op221.addParameter(name='blocksPerFile', value=2000)
116 116
117 117 if show_spc:
118 118 #'''
119 119 # opObj11 = procUnitConfObj1SPC.addOperation(name='removeInterference')
120 120
121 121 opObj11 = procUnitConfObj1.addOperation(name='SpectraPlot', optype='other')
122 122 opObj11.addParameter(name='id', value='1', format='int')
123 123 opObj11.addParameter(name='wintitle', value='Oblique', format='str')
124 124 opObj11.addParameter(name='zmin', value=db_range[0], format='int')
125 125 opObj11.addParameter(name='zmax', value=db_range[1], format='int')
126 126 opObj11.addParameter(name='xaxis', value='velocity', format='str')
127 127 opObj11.addParameter(name='ymin', value=altura1[0], format='int')
128 128 opObj11.addParameter(name='ymax', value=altura1[1], format='int')
129 129 # opObj11.addParameter(name='xmin', value=velocity[0], format='int')
130 130 # opObj11.addParameter(name='xmax', value=velocity[1], format='int')
131 131 opObj11.addParameter(name='showprofile', value='1', format='int')
132 132 opObj11.addParameter(name='save', value=figpath, format='str')
133 133 #'''
134 134 #'''
135 135 opObj11 = procUnitConfObj1.addOperation(name='RTIPlot', optype='other')
136 136 opObj11.addParameter(name='id', value='10', format='int')
137 137 opObj11.addParameter(name='wintitle', value='JULIA EEJ RTI', format='str')
138 138 opObj11.addParameter(name='xmin', value=tiempo[0], format='float')
139 139 opObj11.addParameter(name='xmax', value=tiempo[1], format='float')
140 140 opObj11.addParameter(name='ymin', value=altura1[0], format='int')
141 141 opObj11.addParameter(name='ymax', value=altura1[1], format='int')
142 142 opObj11.addParameter(name='zmin', value=db_range[0], format='int')
143 143 opObj11.addParameter(name='zmax', value=db_range[1], format='int')
144 144 opObj11.addParameter(name='showprofile', value='1', format='int')
145 145 #opObj11.addParameter(name='save_period', value=40, format='str')
146 146 opObj11.addParameter(name='save', value=figpath, format='str')
147 147 #opObj11.addParameter(name='throttle', value=1, format='str')
148 148 #'''
149 149 '''
150 150 opObj11 = procUnitConfObj1.addOperation(name='SnrPlot', optype='other')
151 151 opObj11.addParameter(name='id', value='10', format='int')
152 152 opObj11.addParameter(name='wintitle', value='JULIA EEJ SNR', format='str')
153 153 opObj11.addParameter(name='xmin', value=tiempo[0], format='float')
154 154 opObj11.addParameter(name='xmax', value=tiempo[1], format='float')
155 155 opObj11.addParameter(name='ymin', value=altura1[0], format='int')
156 156 opObj11.addParameter(name='ymax', value=altura1[1], format='int')
157 157 opObj11.addParameter(name='zmin', value=0.1, format='int')
158 158 opObj11.addParameter(name='zmax', value=50, format='int')
159 159 #opObj11.addParameter(name='showprofile', value='1', format='int')
160 160 opObj11.addParameter(name='save', value=figpath, format='str')
161 161 '''
162 162 if fitting:
163 163 Dop = 'Max' #Plot and Save the Pos[Max_val] as the Doppler Shift
164 164 Dop = 'Shift' #Plot and Save the Skew Gaussian Shift as the Doppler Shift
165 165 opObj11 = procUnitConfObj1.addOperation(name='Oblique_Gauss_Fit', optype='other')
166 166 opObj11.addParameter(name='mode', value=9, format='int') #Skew
167 167 opObj11.addParameter(name='Dop', value=Dop)
168 168 #opObj11.addParameter(name='mode', value=11, format='int')
169 169
170 170 if save_params:
171 171 '''
172 172 dataList=['powerdB', 'Oblique_params', 'Oblique_param_errors', 'dplr_2_u', 'data_snr',
173 173 'utctime']
174 174 metadataList=['VelRange',
175 175 'heightList','timeZone']
176 176
177 177 op221 = procUnitConfObj1.addOperation(name='HDFWriter', optype='external')
178 178 op221.addParameter(name='path', value=path_parameters)
179 179 #op221.addParameter(name='mode', value=1, format='int')
180 180 op221.addParameter(name='dataList', value=dataList)
181 181 op221.addParameter(name='metadataList', value=metadataList)
182 182 #op221.addParameter(name='blocksPerFile', value=500)
183 183 op221.addParameter(name='blocksPerFile', value=2000)
184 184 '''
185 185
186 186 one = {'gdlatr': 'lat', 'gdlonr': 'lon', 'inttms': 'paramInterval'} #reader gdlatr-->lat only 1D
187 187
188 188 two = {
189 189 'snl': 'snl', #DeberΓ­a salir como el original pero mΓ‘s limpio
190 190 'RANGE': 'heightList', #<----- nmonics
191 191 'DOPP_T1_EEJ': ('Dop_EEJ_T1', (0)),
192 192 'DDOPP_T1_EEJ': ('Err_Dop_EEJ_T1', (0)),
193 193 'SPEC_W_T1_EEJ': ('Spec_W_T1', (0)),
194 194 'DSPEC_W_T1_EEJ': ('Err_Spec_W_T1', (0)),
195 195 'DOPP_T2_EEJ': ('Dop_EEJ_T2', (0)),
196 196 'DDOPP_T2_EEJ': ('Err_Dop_EEJ_T2', (0)),
197 197 'SPEC_W_T2_EEJ': ('Spec_W_T2', (0)),
198 198 'DSPEC_W_T2_EEJ': ('Err_Spec_W_T2', (0)),
199 199 } #writer
200 200
201 201
202 202 ind = ['range']
203 203
204 204 meta = {
205 205 'kinst': 840, #instrumnet code, 840 for JULIA, 14 for JULIA MP CSR
206 206 'kindat': 1962, #type of data #Este es el nuevo e igual para JULIA y JULIA CSR
207 207 'catalog': {
208 208 'principleInvestigator': 'Danny ScipiΓ³n',
209 209 'expPurpose': 'Equatorial Electrojet Parameters',
210 210 },
211 211 'header': {
212 212 'analyst': 'D. Hysell'
213 213 }
214 214 }
215 215
216 216
217 217 op_writer = procUnitConfObj1.addOperation(name='MADWriter', optype='external')
218 218 #op_writer.addParameter(name='path', value='/home/roberto/DATA/hdf5_outputs/Madrigal/EEJ')
219 219 #op_writer.addParameter(name='path', value='/home/roberto/DATA/hdf5_outputs/Madrigal/EEJ/Dop_Max_Val')
220 220 #op_writer.addParameter(name='path', value='/home/roberto/DATA/hdf5_outputs/Madrigal/EEJ/'+Dop+'/'+dpath[-5:-1])
221 221 #op_writer.addParameter(name='path', value='/home/roberto/DATA/hdf5_outputs/Madrigal/EEJ/CorrectFiles/no_snl_Test/01/'+Dop+'/'+dpath[-5:-1])
222 222 op_writer.addParameter(name='path', value='/home/soporte/DATA/hdf5_outputs/Madrigal/EEJ/FinalFiles/'+Dop+'/'+dpath[-5:-1])
223 223 op_writer.addParameter(name='format', value='hdf5', format='str')
224 224 op_writer.addParameter(name='oneDDict', value=json.dumps(one), format='str')
225 225 op_writer.addParameter(name='twoDDict', value=json.dumps(two), format='str')
226 226 op_writer.addParameter(name='ind2DList', value=json.dumps(ind), format='str')
227 227 op_writer.addParameter(name='metadata', value=json.dumps(meta), format='str')
228 228
229 229
230 230 if plot_params:
231 231 '''
232 232 opObj11 = procUnitConfObj1.addOperation(name='DopplerEEJPlot', optype='other')
233 233 opObj11.addParameter(name='id', value='10', format='int')
234 234 opObj11.addParameter(name='wintitle', value='Doppler EEJ', format='str')
235 235 opObj11.addParameter(name='xmin', value=tiempo[0], format='float')
236 236 opObj11.addParameter(name='xmax', value=tiempo[1], format='float')
237 237 opObj11.addParameter(name='ymin', value=altura1[0], format='int')
238 238 opObj11.addParameter(name='ymax', value=altura1[1], format='int')
239 239 #opObj11.addParameter(name='zmin', value=-250, format='int')
240 240 #opObj11.addParameter(name='zmax', value=250, format='int')
241 241 opObj11.addParameter(name='zlimits', value='(-400,400),(-250,250)')
242 242 #opObj11.addParameter(name='showprofile', value='1', format='int')
243 243 opObj11.addParameter(name='save', value=figpath, format='str')
244 244 #opObj11.addParameter(name='EEJtype', value=1, format='int')
245 245
246 246 opObj11 = procUnitConfObj1.addOperation(name='SpcWidthEEJPlot', optype='other')
247 247 opObj11.addParameter(name='id', value='10', format='int')
248 248 opObj11.addParameter(name='wintitle', value='Spectral Width EEJ', format='str')
249 249 opObj11.addParameter(name='xmin', value=tiempo[0], format='float')
250 250 opObj11.addParameter(name='xmax', value=tiempo[1], format='float')
251 251 opObj11.addParameter(name='ymin', value=altura1[0], format='int')
252 252 opObj11.addParameter(name='ymax', value=altura1[1], format='int')
253 253 #opObj11.addParameter(name='zmin', value=0., format='int')
254 254 #opObj11.addParameter(name='zmax', value=250, format='int')
255 255 opObj11.addParameter(name='zlimits', value='(0.1,100),(0.1,250)')
256 256 #opObj11.addParameter(name='showprofile', value='1', format='int')
257 257 opObj11.addParameter(name='save', value=figpath, format='str')
258 258 #opObj11.addParameter(name='EEJtype', value=1, format='int')
259 259 '''
260 260 opObj11 = procUnitConfObj1.addOperation(name='SpectraObliquePlot', optype='other')
261 261 opObj11.addParameter(name='id', value='1', format='int')
262 262 opObj11.addParameter(name='wintitle', value='Oblique', format='str')
263 263 opObj11.addParameter(name='zmin', value=db_range[0], format='int')
264 264 opObj11.addParameter(name='zmax', value=db_range[1], format='int')
265 265 opObj11.addParameter(name='xaxis', value='velocity', format='str')
266 266 opObj11.addParameter(name='ymin', value=altura1[0], format='int')
267 267 opObj11.addParameter(name='ymax', value=altura1[1], format='int')
268 268 # opObj11.addParameter(name='xmin', value=velocity[0], format='int')
269 269 # opObj11.addParameter(name='xmax', value=velocity[1], format='int')
270 270 opObj11.addParameter(name='showprofile', value='1', format='int')
271 271 opObj11.addParameter(name='save', value=figpath+'/400', format='str')
272 272
273 273 '''
274 274 opObj11 = procUnitConfObj1.addOperation(name='DopplerEEJPlot', optype='other')
275 275 opObj11.addParameter(name='id', value='10', format='int')
276 276 opObj11.addParameter(name='wintitle', value='Doppler EEJ Type II', format='str')
277 277 opObj11.addParameter(name='xmin', value=tiempo[0], format='float')
278 278 opObj11.addParameter(name='xmax', value=tiempo[1], format='float')
279 279 # opObj11.addParameter(name='ymin', value=altura1[0], format='int')
280 280 # opObj11.addParameter(name='ymax', value=altura1[1], format='int')
281 281 opObj11.addParameter(name='zmin', value=-250, format='int')
282 282 opObj11.addParameter(name='zmax', value=250, format='int')
283 283 #opObj11.addParameter(name='showprofile', value='1', format='int')
284 284 opObj11.addParameter(name='save', value=figpath, format='str')
285 285 opObj11.addParameter(name='EEJtype', value=2, format='int')
286 286 '''
287 287
288 288
289 289
290 290
291 291 controllerObj.start() No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now