##// END OF EJS Templates
.
joabAM -
r1403:ce3c4cf39b22
parent child
Show More
@@ -1,674 +1,677
1 1 import os
2 2 import time
3 3 import datetime
4 4
5 5 import numpy
6 6 import h5py
7 7
8 8 import schainpy.admin
9 9 from schainpy.model.data.jrodata import *
10 10 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
11 11 from schainpy.model.io.jroIO_base import *
12 12 from schainpy.utils import log
13 13
14 14
15 15 class HDFReader(Reader, ProcessingUnit):
16 16 """Processing unit to read HDF5 format files
17 17
18 18 This unit reads HDF5 files created with `HDFWriter` operation contains
19 19 by default two groups Data and Metadata all variables would be saved as `dataOut`
20 20 attributes.
21 21 It is possible to read any HDF5 file by given the structure in the `description`
22 22 parameter, also you can add extra values to metadata with the parameter `extras`.
23 23
24 24 Parameters:
25 25 -----------
26 26 path : str
27 27 Path where files are located.
28 28 startDate : date
29 29 Start date of the files
30 30 endDate : list
31 31 End date of the files
32 32 startTime : time
33 33 Start time of the files
34 34 endTime : time
35 35 End time of the files
36 36 description : dict, optional
37 37 Dictionary with the description of the HDF5 file
38 38 extras : dict, optional
39 39 Dictionary with extra metadata to be be added to `dataOut`
40 40
41 41 Examples
42 42 --------
43 43
44 44 desc = {
45 45 'Data': {
46 46 'data_output': ['u', 'v', 'w'],
47 47 'utctime': 'timestamps',
48 48 } ,
49 49 'Metadata': {
50 50 'heightList': 'heights'
51 51 }
52 52 }
53 53
54 54 desc = {
55 55 'Data': {
56 56 'data_output': 'winds',
57 57 'utctime': 'timestamps'
58 58 },
59 59 'Metadata': {
60 60 'heightList': 'heights'
61 61 }
62 62 }
63 63
64 64 extras = {
65 65 'timeZone': 300
66 66 }
67 67
68 68 reader = project.addReadUnit(
69 69 name='HDFReader',
70 70 path='/path/to/files',
71 71 startDate='2019/01/01',
72 72 endDate='2019/01/31',
73 73 startTime='00:00:00',
74 74 endTime='23:59:59',
75 75 # description=json.dumps(desc),
76 76 # extras=json.dumps(extras),
77 77 )
78 78
79 79 """
80 80
81 81 __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'description', 'extras']
82 82
83 83 def __init__(self):
84 84 ProcessingUnit.__init__(self)
85 85 self.dataOut = Parameters()
86 86 self.ext = ".hdf5"
87 87 self.optchar = "D"
88 88 self.meta = {}
89 89 self.data = {}
90 90 self.open_file = h5py.File
91 91 self.open_mode = 'r'
92 92 self.description = {}
93 93 self.extras = {}
94 94 self.filefmt = "*%Y%j***"
95 95 self.folderfmt = "*%Y%j"
96 96 self.utcoffset = 0
97 97
98 98 def setup(self, **kwargs):
99 99
100 100 self.set_kwargs(**kwargs)
101 101 if not self.ext.startswith('.'):
102 102 self.ext = '.{}'.format(self.ext)
103 103
104 104 if self.online:
105 105 log.log("Searching files in online mode...", self.name)
106 106
107 107 for nTries in range(self.nTries):
108 108 fullpath = self.searchFilesOnLine(self.path, self.startDate,
109 109 self.endDate, self.expLabel, self.ext, self.walk,
110 110 self.filefmt, self.folderfmt)
111 111 pathname, filename = os.path.split(fullpath)
112 112 #print(pathname,filename)
113 113 try:
114 114 fullpath = next(fullpath)
115 115
116 116 except:
117 117 fullpath = None
118 118
119 119 if fullpath:
120 120 break
121 121
122 122 log.warning(
123 123 'Waiting {} sec for a valid file in {}: try {} ...'.format(
124 124 self.delay, self.path, nTries + 1),
125 125 self.name)
126 126 time.sleep(self.delay)
127 127
128 128 if not(fullpath):
129 129 raise schainpy.admin.SchainError(
130 130 'There isn\'t any valid file in {}'.format(self.path))
131 131
132 132 pathname, filename = os.path.split(fullpath)
133 133 self.year = int(filename[1:5])
134 134 self.doy = int(filename[5:8])
135 135 self.set = int(filename[8:11]) - 1
136 136 else:
137 137 log.log("Searching files in {}".format(self.path), self.name)
138 138 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
139 139 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
140 140
141 141 self.setNextFile()
142 142
143 143 return
144 144
145 145
146 146 def readFirstHeader(self):
147 147 '''Read metadata and data'''
148 148
149 149 self.__readMetadata()
150 150 self.__readData()
151 151 self.__setBlockList()
152 152
153 153 if 'type' in self.meta:
154 154 ##print("Creting dataOut...")
155 155 self.dataOut = eval(self.meta['type'])()
156 156 ##print(vars(self.dataOut))
157 157
158 158 for attr in self.meta:
159 159 ##print("attr: ", attr)
160 160 ##print(type(self.dataOut).__name__)
161 161 setattr(self.dataOut, attr, self.meta[attr])
162 162
163 163 self.blockIndex = 0
164 164
165 165 return
166 166
167 167 def __setBlockList(self):
168 168 '''
169 169 Selects the data within the times defined
170 170
171 171 self.fp
172 172 self.startTime
173 173 self.endTime
174 174 self.blockList
175 175 self.blocksPerFile
176 176
177 177 '''
178 178
179 179 startTime = self.startTime
180 180 endTime = self.endTime
181 181 thisUtcTime = self.data['utctime'] + self.utcoffset
182 182 self.interval = numpy.min(thisUtcTime[1:] - thisUtcTime[:-1])
183 183 thisDatetime = datetime.datetime.utcfromtimestamp(thisUtcTime[0])
184 184 self.startFileDatetime = thisDatetime
185 185 thisDate = thisDatetime.date()
186 186 thisTime = thisDatetime.time()
187 187
188 188 startUtcTime = (datetime.datetime.combine(thisDate, startTime) - datetime.datetime(1970, 1, 1)).total_seconds()
189 189 endUtcTime = (datetime.datetime.combine(thisDate, endTime) - datetime.datetime(1970, 1, 1)).total_seconds()
190 190
191 191 ind = numpy.where(numpy.logical_and(thisUtcTime >= startUtcTime, thisUtcTime < endUtcTime))[0]
192 192
193 193 self.blockList = ind
194 194 self.blocksPerFile = len(ind)
195 195 self.blocksPerFile = len(thisUtcTime)
196 196 return
197 197
198 198 def __readMetadata(self):
199 199 '''
200 200 Reads Metadata
201 201 '''
202 202
203 203 meta = {}
204 204
205 205 if self.description:
206 206 for key, value in self.description['Metadata'].items():
207 207 meta[key] = self.fp[value][()]
208 208 else:
209 209 grp = self.fp['Metadata']
210 210 for name in grp:
211 211 meta[name] = grp[name][()]
212 212
213 213 if self.extras:
214 214 for key, value in self.extras.items():
215 215 meta[key] = value
216 216 self.meta = meta
217 217
218 218 return
219 219
220 220
221 221
222 222 def checkForRealPath(self, nextFile, nextDay):
223 223
224 224 # print("check FRP")
225 225 # dt = self.startFileDatetime + datetime.timedelta(1)
226 226 # filename = '{}.{}{}'.format(self.path, dt.strftime('%Y%m%d'), self.ext)
227 227 # fullfilename = os.path.join(self.path, filename)
228 228 # print("check Path ",fullfilename,filename)
229 229 # if os.path.exists(fullfilename):
230 230 # return fullfilename, filename
231 231 # return None, filename
232 232 return None,None
233 233
234 234 def __readData(self):
235 235
236 236 data = {}
237 237
238 238 if self.description:
239 239 for key, value in self.description['Data'].items():
240 240 if isinstance(value, str):
241 241 if isinstance(self.fp[value], h5py.Dataset):
242 242 data[key] = self.fp[value][()]
243 243 elif isinstance(self.fp[value], h5py.Group):
244 244 array = []
245 245 for ch in self.fp[value]:
246 246 array.append(self.fp[value][ch][()])
247 247 data[key] = numpy.array(array)
248 248 elif isinstance(value, list):
249 249 array = []
250 250 for ch in value:
251 251 array.append(self.fp[ch][()])
252 252 data[key] = numpy.array(array)
253 253 else:
254 254 grp = self.fp['Data']
255 255 for name in grp:
256 256 if isinstance(grp[name], h5py.Dataset):
257 257 array = grp[name][()]
258 258 elif isinstance(grp[name], h5py.Group):
259 259 array = []
260 260 for ch in grp[name]:
261 261 array.append(grp[name][ch][()])
262 262 array = numpy.array(array)
263 263 else:
264 264 log.warning('Unknown type: {}'.format(name))
265 265
266 266 if name in self.description:
267 267 key = self.description[name]
268 268 else:
269 269 key = name
270 270 data[key] = array
271 271
272 272 self.data = data
273 273 return
274 274
275 275 def getData(self):
276 276 if not self.isDateTimeInRange(self.startFileDatetime, self.startDate, self.endDate, self.startTime, self.endTime):
277 277 self.dataOut.flagNoData = True
278 278 self.blockIndex = self.blocksPerFile
279 279 #self.dataOut.error = True TERMINA EL PROGRAMA, removido
280 280 return
281 281 for attr in self.data:
282 282 #print("attr ",attr)
283 283 if self.data[attr].ndim == 1:
284 284 setattr(self.dataOut, attr, self.data[attr][self.blockIndex])
285 285 else:
286 286 setattr(self.dataOut, attr, self.data[attr][:, self.blockIndex])
287 287
288 288
289 289 self.blockIndex += 1
290 290
291 291 if self.blockIndex == 1:
292 292 log.log("Block No. {}/{} -> {}".format(
293 293 self.blockIndex,
294 294 self.blocksPerFile,
295 295 self.dataOut.datatime.ctime()), self.name)
296 296 else:
297 297 log.log("Block No. {}/{} ".format(
298 298 self.blockIndex,
299 299 self.blocksPerFile),self.name)
300 300
301 301 self.dataOut.flagNoData = False
302 302 self.dataOut.error = False
303 303 return
304 304
305 305 def run(self, **kwargs):
306 306
307 307 if not(self.isConfig):
308 308 self.setup(**kwargs)
309 309 self.isConfig = True
310 310
311 311 if self.blockIndex == self.blocksPerFile:
312 312 self.setNextFile()
313 313
314 314 self.getData()
315 315
316 316 return
317 317
318 318 @MPDecorator
319 319 class HDFWriter(Operation):
320 320 """Operation to write HDF5 files.
321 321
322 322 The HDF5 file contains by default two groups Data and Metadata where
323 323 you can save any `dataOut` attribute specified by `dataList` and `metadataList`
324 324 parameters, data attributes are normaly time dependent where the metadata
325 325 are not.
326 326 It is possible to customize the structure of the HDF5 file with the
327 327 optional description parameter see the examples.
328 328
329 329 Parameters:
330 330 -----------
331 331 path : str
332 332 Path where files will be saved.
333 333 blocksPerFile : int
334 334 Number of blocks per file
335 335 metadataList : list
336 336 List of the dataOut attributes that will be saved as metadata
337 337 dataList : int
338 338 List of the dataOut attributes that will be saved as data
339 339 setType : bool
340 340 If True the name of the files corresponds to the timestamp of the data
341 341 description : dict, optional
342 342 Dictionary with the desired description of the HDF5 file
343 343
344 344 Examples
345 345 --------
346 346
347 347 desc = {
348 348 'data_output': {'winds': ['z', 'w', 'v']},
349 349 'utctime': 'timestamps',
350 350 'heightList': 'heights'
351 351 }
352 352 desc = {
353 353 'data_output': ['z', 'w', 'v'],
354 354 'utctime': 'timestamps',
355 355 'heightList': 'heights'
356 356 }
357 357 desc = {
358 358 'Data': {
359 359 'data_output': 'winds',
360 360 'utctime': 'timestamps'
361 361 },
362 362 'Metadata': {
363 363 'heightList': 'heights'
364 364 }
365 365 }
366 366
367 367 writer = proc_unit.addOperation(name='HDFWriter')
368 368 writer.addParameter(name='path', value='/path/to/file')
369 369 writer.addParameter(name='blocksPerFile', value='32')
370 370 writer.addParameter(name='metadataList', value='heightList,timeZone')
371 371 writer.addParameter(name='dataList',value='data_output,utctime')
372 372 # writer.addParameter(name='description',value=json.dumps(desc))
373 373
374 374 """
375 375
376 376 ext = ".hdf5"
377 377 optchar = "D"
378 378 filename = None
379 379 path = None
380 380 setFile = None
381 381 fp = None
382 382 firsttime = True
383 383 #Configurations
384 384 blocksPerFile = None
385 385 blockIndex = None
386 386 dataOut = None
387 387 #Data Arrays
388 388 dataList = None
389 389 metadataList = None
390 390 currentDay = None
391 391 lastTime = None
392 392 timeZone = "ut"
393 393 hourLimit = 3
394 394 breakDays = True
395 395
396 396 def __init__(self):
397 397
398 398 Operation.__init__(self)
399 399 return
400 400
401 401 def setup(self, path=None, blocksPerFile=10, metadataList=None, dataList=None, setType=None,
402 description=None,timeZone = "ut",hourLimit = 3, breakDays=True):
402 description={},timeZone = "ut",hourLimit = 3, breakDays=True):
403 403 self.path = path
404 404 self.blocksPerFile = blocksPerFile
405 405 self.metadataList = metadataList
406 406 self.dataList = [s.strip() for s in dataList]
407 407 self.setType = setType
408 408 self.description = description
409 409 self.timeZone = timeZone
410 410 self.hourLimit = hourLimit
411 411 self.breakDays = breakDays
412 412
413 413 if self.metadataList is None:
414 414 self.metadataList = self.dataOut.metadata_list
415 415
416 416 tableList = []
417 417 dsList = []
418 418
419 419 for i in range(len(self.dataList)):
420 420 dsDict = {}
421 421 if hasattr(self.dataOut, self.dataList[i]):
422 422 dataAux = getattr(self.dataOut, self.dataList[i])
423 423 dsDict['variable'] = self.dataList[i]
424 424 else:
425 425 log.warning('Attribute {} not found in dataOut', self.name)
426 426 continue
427 427
428 428 if dataAux is None:
429 429 continue
430 430 elif isinstance(dataAux, (int, float, numpy.integer, numpy.float)):
431 431 dsDict['nDim'] = 0
432 432 else:
433 433 dsDict['nDim'] = len(dataAux.shape)
434 434 dsDict['shape'] = dataAux.shape
435 435 dsDict['dsNumber'] = dataAux.shape[0]
436 436 dsDict['dtype'] = dataAux.dtype
437 437
438 438 dsList.append(dsDict)
439 439
440 440 self.dsList = dsList
441 441 self.currentDay = self.dataOut.datatime.date()
442 442
443 443 def timeFlag(self):
444 444 currentTime = self.dataOut.utctime
445 timeTuple = None
445 446 if self.timeZone == "lt":
446 447 timeTuple = time.localtime(currentTime)
447 elif self.timeZone == "ut":
448 else :
448 449 timeTuple = time.gmtime(currentTime)
449 450
450 451 dataDay = timeTuple.tm_yday
451 452 #print("time UTC: ",currentTime, self.dataOut.datatime)
452 453 if self.lastTime is None:
453 454 self.lastTime = currentTime
454 455 self.currentDay = dataDay
455 456 return False
456 457
457 458 timeDiff = currentTime - self.lastTime
458 459
459 460 #Si el dia es diferente o si la diferencia entre un dato y otro supera self.hourLimit
460 461 if (dataDay != self.currentDay) and self.breakDays:
461 462 self.currentDay = dataDay
462 463 return True
463 464 elif timeDiff > self.hourLimit*60*60:
464 465 self.lastTime = currentTime
465 466 return True
466 467 else:
467 468 self.lastTime = currentTime
468 469 return False
469 470
470 471 def run(self, dataOut,**kwargs):
471 472
472 473 self.dataOut = dataOut
473 474 if not(self.isConfig):
474 475 self.setup(**kwargs)
475 476
476 477 self.isConfig = True
477 478 self.setNextFile()
478 479
479 480 self.putData()
480 481 return
481 482
482 483 def setNextFile(self):
483 484
484 485 ext = self.ext
485 486 path = self.path
486 487 setFile = self.setFile
488 timeTuple = None
487 489 if self.timeZone == "lt":
488 490 timeTuple = time.localtime(self.dataOut.utctime)
489 491 elif self.timeZone == "ut":
490 492 timeTuple = time.gmtime(self.dataOut.utctime)
491 493 #print("path: ",timeTuple)
492 494 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
493 495 fullpath = os.path.join(path, subfolder)
494 496
495 497 if os.path.exists(fullpath):
496 498 filesList = os.listdir(fullpath)
497 499 filesList = [k for k in filesList if k.startswith(self.optchar)]
498 500 if len( filesList ) > 0:
499 501 filesList = sorted(filesList, key=str.lower)
500 502 filen = filesList[-1]
501 503 # el filename debera tener el siguiente formato
502 504 # 0 1234 567 89A BCDE (hex)
503 505 # x YYYY DDD SSS .ext
504 506 if isNumber(filen[8:11]):
505 507 setFile = int(filen[8:11]) #inicializo mi contador de seteo al seteo del ultimo file
506 508 else:
507 509 setFile = -1
508 510 else:
509 511 setFile = -1 #inicializo mi contador de seteo
510 512 else:
511 513 os.makedirs(fullpath)
512 514 setFile = -1 #inicializo mi contador de seteo
513 515
514 516 if self.setType is None:
515 517 setFile += 1
516 518 file = '%s%4.4d%3.3d%03d%s' % (self.optchar,
517 519 timeTuple.tm_year,
518 520 timeTuple.tm_yday,
519 521 setFile,
520 522 ext )
521 523 else:
522 524 setFile = timeTuple.tm_hour*60+timeTuple.tm_min
523 525 file = '%s%4.4d%3.3d%04d%s' % (self.optchar,
524 526 timeTuple.tm_year,
525 527 timeTuple.tm_yday,
526 528 setFile,
527 529 ext )
528 530
529 531 self.filename = os.path.join( path, subfolder, file )
530 532
531 533 #Setting HDF5 File
532 534 self.fp = h5py.File(self.filename, 'w')
533 535 #write metadata
534 536 self.writeMetadata(self.fp)
535 537 #Write data
536 538 self.writeData(self.fp)
537 539
538 540 def getLabel(self, name, x=None):
539 541
540 542 if x is None:
541 543 if 'Data' in self.description:
542 544 data = self.description['Data']
543 545 if 'Metadata' in self.description:
544 546 data.update(self.description['Metadata'])
545 547 else:
546 548 data = self.description
547 549 if name in data:
548 550 if isinstance(data[name], str):
549 551 return data[name]
550 552 elif isinstance(data[name], list):
551 553 return None
552 554 elif isinstance(data[name], dict):
553 555 for key, value in data[name].items():
554 556 return key
555 557 return name
556 558 else:
557 559 if 'Metadata' in self.description:
558 560 meta = self.description['Metadata']
559 561 else:
560 562 meta = self.description
561 563 if name in meta:
562 564 if isinstance(meta[name], list):
563 565 return meta[name][x]
564 566 elif isinstance(meta[name], dict):
565 567 for key, value in meta[name].items():
566 568 return value[x]
567 569 if 'cspc' in name:
568 570 return 'pair{:02d}'.format(x)
569 571 else:
570 572 return 'channel{:02d}'.format(x)
571 573
572 574 def writeMetadata(self, fp):
573 575
574 576 if self.description:
575 577 if 'Metadata' in self.description:
576 578 grp = fp.create_group('Metadata')
577 579 else:
578 580 grp = fp
579 581 else:
580 582 grp = fp.create_group('Metadata')
581 583
582 584 for i in range(len(self.metadataList)):
583 585 if not hasattr(self.dataOut, self.metadataList[i]):
584 586 log.warning('Metadata: `{}` not found'.format(self.metadataList[i]), self.name)
585 587 continue
586 588 value = getattr(self.dataOut, self.metadataList[i])
587 589 if isinstance(value, bool):
588 590 if value is True:
589 591 value = 1
590 592 else:
591 593 value = 0
592 594 grp.create_dataset(self.getLabel(self.metadataList[i]), data=value)
593 595 return
594 596
595 597 def writeData(self, fp):
596 598
597 599 if self.description:
598 600 if 'Data' in self.description:
599 601 grp = fp.create_group('Data')
600 602 else:
601 603 grp = fp
602 604 else:
603 605 grp = fp.create_group('Data')
604 606
605 607 dtsets = []
606 608 data = []
607 609
608 610 for dsInfo in self.dsList:
609 611 if dsInfo['nDim'] == 0:
610 612 ds = grp.create_dataset(
611 613 self.getLabel(dsInfo['variable']),
612 614 (self.blocksPerFile, ),
613 615 chunks=True,
614 616 dtype=numpy.float64)
615 617 dtsets.append(ds)
616 618 data.append((dsInfo['variable'], -1))
617 619 else:
618 620 label = self.getLabel(dsInfo['variable'])
619 621 if label is not None:
620 622 sgrp = grp.create_group(label)
621 623 else:
622 624 sgrp = grp
623 625 for i in range(dsInfo['dsNumber']):
624 626 ds = sgrp.create_dataset(
625 627 self.getLabel(dsInfo['variable'], i),
626 628 (self.blocksPerFile, ) + dsInfo['shape'][1:],
627 629 chunks=True,
628 630 dtype=dsInfo['dtype'])
629 631 dtsets.append(ds)
630 632 data.append((dsInfo['variable'], i))
631 633 fp.flush()
632 634
633 635 log.log('Creating file: {}'.format(fp.filename), self.name)
634 636
635 637 self.ds = dtsets
636 638 self.data = data
637 639 self.firsttime = True
638 640 self.blockIndex = 0
639 641 return
640 642
641 643 def putData(self):
642 644
643 645 if (self.blockIndex == self.blocksPerFile) or self.timeFlag():
644 646 self.closeFile()
645 647 self.setNextFile()
648 print("breaking file")
646 649
647 650 for i, ds in enumerate(self.ds):
648 651 attr, ch = self.data[i]
649 652 if ch == -1:
650 653 ds[self.blockIndex] = getattr(self.dataOut, attr)
651 654 else:
652 655 ds[self.blockIndex] = getattr(self.dataOut, attr)[ch]
653 656
654 657 self.fp.flush()
655 658 self.blockIndex += 1
656 659 if self.blockIndex == 1:
657 660 log.log('Block No. {}/{} --> {}'.format(self.blockIndex, self.blocksPerFile,self.dataOut.datatime.ctime()), self.name)
658 661 else:
659 662 log.log('Block No. {}/{}'.format(self.blockIndex, self.blocksPerFile), self.name)
660 663 return
661 664
662 665 def closeFile(self):
663 666
664 667 if self.blockIndex != self.blocksPerFile:
665 668 for ds in self.ds:
666 669 ds.resize(self.blockIndex, axis=0)
667 670
668 671 if self.fp:
669 672 self.fp.flush()
670 673 self.fp.close()
671 674
672 675 def close(self):
673 676
674 677 self.closeFile()
@@ -1,1683 +1,1683
1 1 # Copyright (c) 2012-2020 Jicamarca Radio Observatory
2 2 # All rights reserved.
3 3 #
4 4 # Distributed under the terms of the BSD 3-clause license.
5 5 """Spectra processing Unit and operations
6 6
7 7 Here you will find the processing unit `SpectraProc` and several operations
8 8 to work with Spectra data type
9 9 """
10 10
11 11 import time
12 12 import itertools
13 13
14 14 import numpy
15 15 import math
16 16
17 17 from schainpy.model.proc.jroproc_base import ProcessingUnit, MPDecorator, Operation
18 18 from schainpy.model.data.jrodata import Spectra
19 19 from schainpy.model.data.jrodata import hildebrand_sekhon
20 20 from schainpy.utils import log
21 21
22 22 from scipy.optimize import curve_fit
23 23
24 24
25 25 class SpectraProc(ProcessingUnit):
26 26
27 27 def __init__(self):
28 28
29 29 ProcessingUnit.__init__(self)
30 30
31 31 self.buffer = None
32 32 self.firstdatatime = None
33 33 self.profIndex = 0
34 34 self.dataOut = Spectra()
35 35 self.id_min = None
36 36 self.id_max = None
37 37 self.setupReq = False #Agregar a todas las unidades de proc
38 38
39 39 def __updateSpecFromVoltage(self):
40 40
41 41 self.dataOut.timeZone = self.dataIn.timeZone
42 42 self.dataOut.dstFlag = self.dataIn.dstFlag
43 43 self.dataOut.errorCount = self.dataIn.errorCount
44 44 self.dataOut.useLocalTime = self.dataIn.useLocalTime
45 45 try:
46 46 self.dataOut.processingHeaderObj = self.dataIn.processingHeaderObj.copy()
47 47 except:
48 48 pass
49 49 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
50 50 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
51 51 self.dataOut.channelList = self.dataIn.channelList
52 52 self.dataOut.heightList = self.dataIn.heightList
53 53 self.dataOut.dtype = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
54 54 self.dataOut.nProfiles = self.dataOut.nFFTPoints
55 55 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
56 56 self.dataOut.utctime = self.firstdatatime
57 57 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData
58 58 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData
59 59 self.dataOut.flagShiftFFT = False
60 60 self.dataOut.nCohInt = self.dataIn.nCohInt
61 61 self.dataOut.nIncohInt = 1
62 62 self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
63 63 self.dataOut.frequency = self.dataIn.frequency
64 64 self.dataOut.realtime = self.dataIn.realtime
65 65 self.dataOut.azimuth = self.dataIn.azimuth
66 66 self.dataOut.zenith = self.dataIn.zenith
67 67 self.dataOut.codeList = self.dataIn.codeList
68 68 self.dataOut.azimuthList = self.dataIn.azimuthList
69 69 self.dataOut.elevationList = self.dataIn.elevationList
70 70
71 71 def __getFft(self):
72 72 """
73 73 Convierte valores de Voltaje a Spectra
74 74
75 75 Affected:
76 76 self.dataOut.data_spc
77 77 self.dataOut.data_cspc
78 78 self.dataOut.data_dc
79 79 self.dataOut.heightList
80 80 self.profIndex
81 81 self.buffer
82 82 self.dataOut.flagNoData
83 83 """
84 84 fft_volt = numpy.fft.fft(
85 85 self.buffer, n=self.dataOut.nFFTPoints, axis=1)
86 86 fft_volt = fft_volt.astype(numpy.dtype('complex'))
87 87 dc = fft_volt[:, 0, :]
88 88
89 89 # calculo de self-spectra
90 90 fft_volt = numpy.fft.fftshift(fft_volt, axes=(1,))
91 91 spc = fft_volt * numpy.conjugate(fft_volt)
92 92 spc = spc.real
93 93
94 94 blocksize = 0
95 95 blocksize += dc.size
96 96 blocksize += spc.size
97 97
98 98 cspc = None
99 99 pairIndex = 0
100 100 if self.dataOut.pairsList != None:
101 101 # calculo de cross-spectra
102 102 cspc = numpy.zeros(
103 103 (self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex')
104 104 for pair in self.dataOut.pairsList:
105 105 if pair[0] not in self.dataOut.channelList:
106 106 raise ValueError("Error getting CrossSpectra: pair 0 of %s is not in channelList = %s" % (
107 107 str(pair), str(self.dataOut.channelList)))
108 108 if pair[1] not in self.dataOut.channelList:
109 109 raise ValueError("Error getting CrossSpectra: pair 1 of %s is not in channelList = %s" % (
110 110 str(pair), str(self.dataOut.channelList)))
111 111
112 112 cspc[pairIndex, :, :] = fft_volt[pair[0], :, :] * \
113 113 numpy.conjugate(fft_volt[pair[1], :, :])
114 114 pairIndex += 1
115 115 blocksize += cspc.size
116 116
117 117 self.dataOut.data_spc = spc
118 118 self.dataOut.data_cspc = cspc
119 119 self.dataOut.data_dc = dc
120 120 self.dataOut.blockSize = blocksize
121 121 self.dataOut.flagShiftFFT = False
122 122
123 123 def run(self, nProfiles=None, nFFTPoints=None, pairsList=None, ippFactor=None, shift_fft=False):
124
124 #print("spectra run")
125 125 if self.dataIn.type == "Spectra":
126 126 self.dataOut.copy(self.dataIn)
127 127 if shift_fft:
128 128 #desplaza a la derecha en el eje 2 determinadas posiciones
129 129 shift = int(self.dataOut.nFFTPoints/2)
130 130 self.dataOut.data_spc = numpy.roll(self.dataOut.data_spc, shift , axis=1)
131 131
132 132 if self.dataOut.data_cspc is not None:
133 133 #desplaza a la derecha en el eje 2 determinadas posiciones
134 134 self.dataOut.data_cspc = numpy.roll(self.dataOut.data_cspc, shift, axis=1)
135 135 if pairsList:
136 136 self.__selectPairs(pairsList)
137 137
138 138 elif self.dataIn.type == "Voltage":
139 139
140 140 self.dataOut.flagNoData = True
141 141
142 142 if nFFTPoints == None:
143 143 raise ValueError("This SpectraProc.run() need nFFTPoints input variable")
144 144
145 145 if nProfiles == None:
146 146 nProfiles = nFFTPoints
147 147
148 148 if ippFactor == None:
149 149 self.dataOut.ippFactor = 1
150 150
151 151 self.dataOut.nFFTPoints = nFFTPoints
152 152
153 153 if self.buffer is None:
154 154 self.buffer = numpy.zeros((self.dataIn.nChannels,
155 155 nProfiles,
156 156 self.dataIn.nHeights),
157 157 dtype='complex')
158 158
159 159 if self.dataIn.flagDataAsBlock:
160 160 nVoltProfiles = self.dataIn.data.shape[1]
161 161
162 162 if nVoltProfiles == nProfiles:
163 163 self.buffer = self.dataIn.data.copy()
164 164 self.profIndex = nVoltProfiles
165 165
166 166 elif nVoltProfiles < nProfiles:
167 167
168 168 if self.profIndex == 0:
169 169 self.id_min = 0
170 170 self.id_max = nVoltProfiles
171 171
172 172 self.buffer[:, self.id_min:self.id_max,
173 173 :] = self.dataIn.data
174 174 self.profIndex += nVoltProfiles
175 175 self.id_min += nVoltProfiles
176 176 self.id_max += nVoltProfiles
177 177 else:
178 178 raise ValueError("The type object %s has %d profiles, it should just has %d profiles" % (
179 179 self.dataIn.type, self.dataIn.data.shape[1], nProfiles))
180 180 self.dataOut.flagNoData = True
181 181 else:
182 182 self.buffer[:, self.profIndex, :] = self.dataIn.data.copy()
183 183 self.profIndex += 1
184 184
185 185 if self.firstdatatime == None:
186 186 self.firstdatatime = self.dataIn.utctime
187 187
188 188 if self.profIndex == nProfiles:
189 189 self.__updateSpecFromVoltage()
190 190 if pairsList == None:
191 191 self.dataOut.pairsList = [pair for pair in itertools.combinations(self.dataOut.channelList, 2)]
192 192 else:
193 193 self.dataOut.pairsList = pairsList
194 194 self.__getFft()
195 195 self.dataOut.flagNoData = False
196 196 self.firstdatatime = None
197 197 self.profIndex = 0
198 198 else:
199 199 raise ValueError("The type of input object '%s' is not valid".format(
200 200 self.dataIn.type))
201 201
202 202 def __selectPairs(self, pairsList):
203 203
204 204 if not pairsList:
205 205 return
206 206
207 207 pairs = []
208 208 pairsIndex = []
209 209
210 210 for pair in pairsList:
211 211 if pair[0] not in self.dataOut.channelList or pair[1] not in self.dataOut.channelList:
212 212 continue
213 213 pairs.append(pair)
214 214 pairsIndex.append(pairs.index(pair))
215 215
216 216 self.dataOut.data_cspc = self.dataOut.data_cspc[pairsIndex]
217 217 self.dataOut.pairsList = pairs
218 218
219 219 return
220 220
221 221 def selectFFTs(self, minFFT, maxFFT ):
222 222 """
223 223 Selecciona un bloque de datos en base a un grupo de valores de puntos FFTs segun el rango
224 224 minFFT<= FFT <= maxFFT
225 225 """
226 226
227 227 if (minFFT > maxFFT):
228 228 raise ValueError("Error selecting heights: Height range (%d,%d) is not valid" % (minFFT, maxFFT))
229 229
230 230 if (minFFT < self.dataOut.getFreqRange()[0]):
231 231 minFFT = self.dataOut.getFreqRange()[0]
232 232
233 233 if (maxFFT > self.dataOut.getFreqRange()[-1]):
234 234 maxFFT = self.dataOut.getFreqRange()[-1]
235 235
236 236 minIndex = 0
237 237 maxIndex = 0
238 238 FFTs = self.dataOut.getFreqRange()
239 239
240 240 inda = numpy.where(FFTs >= minFFT)
241 241 indb = numpy.where(FFTs <= maxFFT)
242 242
243 243 try:
244 244 minIndex = inda[0][0]
245 245 except:
246 246 minIndex = 0
247 247
248 248 try:
249 249 maxIndex = indb[0][-1]
250 250 except:
251 251 maxIndex = len(FFTs)
252 252
253 253 self.selectFFTsByIndex(minIndex, maxIndex)
254 254
255 255 return 1
256 256
257 257 def getBeaconSignal(self, tauindex=0, channelindex=0, hei_ref=None):
258 258 newheis = numpy.where(
259 259 self.dataOut.heightList > self.dataOut.radarControllerHeaderObj.Taus[tauindex])
260 260
261 261 if hei_ref != None:
262 262 newheis = numpy.where(self.dataOut.heightList > hei_ref)
263 263
264 264 minIndex = min(newheis[0])
265 265 maxIndex = max(newheis[0])
266 266 data_spc = self.dataOut.data_spc[:, :, minIndex:maxIndex + 1]
267 267 heightList = self.dataOut.heightList[minIndex:maxIndex + 1]
268 268
269 269 # determina indices
270 270 nheis = int(self.dataOut.radarControllerHeaderObj.txB /
271 271 (self.dataOut.heightList[1] - self.dataOut.heightList[0]))
272 272 avg_dB = 10 * \
273 273 numpy.log10(numpy.sum(data_spc[channelindex, :, :], axis=0))
274 274 beacon_dB = numpy.sort(avg_dB)[-nheis:]
275 275 beacon_heiIndexList = []
276 276 for val in avg_dB.tolist():
277 277 if val >= beacon_dB[0]:
278 278 beacon_heiIndexList.append(avg_dB.tolist().index(val))
279 279
280 280 #data_spc = data_spc[:,:,beacon_heiIndexList]
281 281 data_cspc = None
282 282 if self.dataOut.data_cspc is not None:
283 283 data_cspc = self.dataOut.data_cspc[:, :, minIndex:maxIndex + 1]
284 284 #data_cspc = data_cspc[:,:,beacon_heiIndexList]
285 285
286 286 data_dc = None
287 287 if self.dataOut.data_dc is not None:
288 288 data_dc = self.dataOut.data_dc[:, minIndex:maxIndex + 1]
289 289 #data_dc = data_dc[:,beacon_heiIndexList]
290 290
291 291 self.dataOut.data_spc = data_spc
292 292 self.dataOut.data_cspc = data_cspc
293 293 self.dataOut.data_dc = data_dc
294 294 self.dataOut.heightList = heightList
295 295 self.dataOut.beacon_heiIndexList = beacon_heiIndexList
296 296
297 297 return 1
298 298
299 299 def selectFFTsByIndex(self, minIndex, maxIndex):
300 300 """
301 301
302 302 """
303 303
304 304 if (minIndex < 0) or (minIndex > maxIndex):
305 305 raise ValueError("Error selecting heights: Index range (%d,%d) is not valid" % (minIndex, maxIndex))
306 306
307 307 if (maxIndex >= self.dataOut.nProfiles):
308 308 maxIndex = self.dataOut.nProfiles-1
309 309
310 310 #Spectra
311 311 data_spc = self.dataOut.data_spc[:,minIndex:maxIndex+1,:]
312 312
313 313 data_cspc = None
314 314 if self.dataOut.data_cspc is not None:
315 315 data_cspc = self.dataOut.data_cspc[:,minIndex:maxIndex+1,:]
316 316
317 317 data_dc = None
318 318 if self.dataOut.data_dc is not None:
319 319 data_dc = self.dataOut.data_dc[minIndex:maxIndex+1,:]
320 320
321 321 self.dataOut.data_spc = data_spc
322 322 self.dataOut.data_cspc = data_cspc
323 323 self.dataOut.data_dc = data_dc
324 324
325 325 self.dataOut.ippSeconds = self.dataOut.ippSeconds*(self.dataOut.nFFTPoints / numpy.shape(data_cspc)[1])
326 326 self.dataOut.nFFTPoints = numpy.shape(data_cspc)[1]
327 327 self.dataOut.profilesPerBlock = numpy.shape(data_cspc)[1]
328 328
329 329 return 1
330 330
331 331 def getNoise(self, minHei=None, maxHei=None, minVel=None, maxVel=None):
332 332 # validacion de rango
333 333 if minHei == None:
334 334 minHei = self.dataOut.heightList[0]
335 335
336 336 if maxHei == None:
337 337 maxHei = self.dataOut.heightList[-1]
338 338
339 339 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
340 340 print('minHei: %.2f is out of the heights range' % (minHei))
341 341 print('minHei is setting to %.2f' % (self.dataOut.heightList[0]))
342 342 minHei = self.dataOut.heightList[0]
343 343
344 344 if (maxHei > self.dataOut.heightList[-1]) or (maxHei < minHei):
345 345 print('maxHei: %.2f is out of the heights range' % (maxHei))
346 346 print('maxHei is setting to %.2f' % (self.dataOut.heightList[-1]))
347 347 maxHei = self.dataOut.heightList[-1]
348 348
349 349 # validacion de velocidades
350 350 velrange = self.dataOut.getVelRange(1)
351 351
352 352 if minVel == None:
353 353 minVel = velrange[0]
354 354
355 355 if maxVel == None:
356 356 maxVel = velrange[-1]
357 357
358 358 if (minVel < velrange[0]) or (minVel > maxVel):
359 359 print('minVel: %.2f is out of the velocity range' % (minVel))
360 360 print('minVel is setting to %.2f' % (velrange[0]))
361 361 minVel = velrange[0]
362 362
363 363 if (maxVel > velrange[-1]) or (maxVel < minVel):
364 364 print('maxVel: %.2f is out of the velocity range' % (maxVel))
365 365 print('maxVel is setting to %.2f' % (velrange[-1]))
366 366 maxVel = velrange[-1]
367 367
368 368 # seleccion de indices para rango
369 369 minIndex = 0
370 370 maxIndex = 0
371 371 heights = self.dataOut.heightList
372 372
373 373 inda = numpy.where(heights >= minHei)
374 374 indb = numpy.where(heights <= maxHei)
375 375
376 376 try:
377 377 minIndex = inda[0][0]
378 378 except:
379 379 minIndex = 0
380 380
381 381 try:
382 382 maxIndex = indb[0][-1]
383 383 except:
384 384 maxIndex = len(heights)
385 385
386 386 if (minIndex < 0) or (minIndex > maxIndex):
387 387 raise ValueError("some value in (%d,%d) is not valid" % (
388 388 minIndex, maxIndex))
389 389
390 390 if (maxIndex >= self.dataOut.nHeights):
391 391 maxIndex = self.dataOut.nHeights - 1
392 392
393 393 # seleccion de indices para velocidades
394 394 indminvel = numpy.where(velrange >= minVel)
395 395 indmaxvel = numpy.where(velrange <= maxVel)
396 396 try:
397 397 minIndexVel = indminvel[0][0]
398 398 except:
399 399 minIndexVel = 0
400 400
401 401 try:
402 402 maxIndexVel = indmaxvel[0][-1]
403 403 except:
404 404 maxIndexVel = len(velrange)
405 405
406 406 # seleccion del espectro
407 407 data_spc = self.dataOut.data_spc[:,
408 408 minIndexVel:maxIndexVel + 1, minIndex:maxIndex + 1]
409 409 # estimacion de ruido
410 410 noise = numpy.zeros(self.dataOut.nChannels)
411 411
412 412 for channel in range(self.dataOut.nChannels):
413 413 daux = data_spc[channel, :, :]
414 414 sortdata = numpy.sort(daux, axis=None)
415 415 noise[channel] = hildebrand_sekhon(sortdata, self.dataOut.nIncohInt)
416 416
417 417 self.dataOut.noise_estimation = noise.copy()
418 418
419 419 return 1
420 420
421 421 class removeDC(Operation):
422 422
423 423 def run(self, dataOut, mode=2):
424 424 self.dataOut = dataOut
425 425 jspectra = self.dataOut.data_spc
426 426 jcspectra = self.dataOut.data_cspc
427 427
428 428 num_chan = jspectra.shape[0]
429 429 num_hei = jspectra.shape[2]
430 430
431 431 if jcspectra is not None:
432 432 jcspectraExist = True
433 433 num_pairs = jcspectra.shape[0]
434 434 else:
435 435 jcspectraExist = False
436 436
437 437 freq_dc = int(jspectra.shape[1] / 2)
438 438 ind_vel = numpy.array([-2, -1, 1, 2]) + freq_dc
439 439 ind_vel = ind_vel.astype(int)
440 440
441 441 if ind_vel[0] < 0:
442 442 ind_vel[list(range(0, 1))] = ind_vel[list(range(0, 1))] + self.num_prof
443 443
444 444 if mode == 1:
445 445 jspectra[:, freq_dc, :] = (
446 446 jspectra[:, ind_vel[1], :] + jspectra[:, ind_vel[2], :]) / 2 # CORRECCION
447 447
448 448 if jcspectraExist:
449 449 jcspectra[:, freq_dc, :] = (
450 450 jcspectra[:, ind_vel[1], :] + jcspectra[:, ind_vel[2], :]) / 2
451 451
452 452 if mode == 2:
453 453
454 454 vel = numpy.array([-2, -1, 1, 2])
455 455 xx = numpy.zeros([4, 4])
456 456
457 457 for fil in range(4):
458 458 xx[fil, :] = vel[fil]**numpy.asarray(list(range(4)))
459 459
460 460 xx_inv = numpy.linalg.inv(xx)
461 461 xx_aux = xx_inv[0, :]
462 462
463 463 for ich in range(num_chan):
464 464 yy = jspectra[ich, ind_vel, :]
465 465 jspectra[ich, freq_dc, :] = numpy.dot(xx_aux, yy)
466 466
467 467 junkid = jspectra[ich, freq_dc, :] <= 0
468 468 cjunkid = sum(junkid)
469 469
470 470 if cjunkid.any():
471 471 jspectra[ich, freq_dc, junkid.nonzero()] = (
472 472 jspectra[ich, ind_vel[1], junkid] + jspectra[ich, ind_vel[2], junkid]) / 2
473 473
474 474 if jcspectraExist:
475 475 for ip in range(num_pairs):
476 476 yy = jcspectra[ip, ind_vel, :]
477 477 jcspectra[ip, freq_dc, :] = numpy.dot(xx_aux, yy)
478 478
479 479 self.dataOut.data_spc = jspectra
480 480 self.dataOut.data_cspc = jcspectra
481 481
482 482 return self.dataOut
483 483
484 484 # import matplotlib.pyplot as plt
485 485
486 486 def fit_func( x, a0, a1, a2): #, a3, a4, a5):
487 487 z = (x - a1) / a2
488 488 y = a0 * numpy.exp(-z**2 / a2) #+ a3 + a4 * x + a5 * x**2
489 489 return y
490 490
491 491
492 492 class CleanRayleigh(Operation):
493 493
494 494 def __init__(self):
495 495
496 496 Operation.__init__(self)
497 497 self.i=0
498 498 self.isConfig = False
499 499 self.__dataReady = False
500 500 self.__profIndex = 0
501 501 self.byTime = False
502 502 self.byProfiles = False
503 503
504 504 self.bloques = None
505 505 self.bloque0 = None
506 506
507 507 self.index = 0
508 508
509 509 self.buffer = 0
510 510 self.buffer2 = 0
511 511 self.buffer3 = 0
512 512
513 513
514 514 def setup(self,dataOut,min_hei,max_hei,n, timeInterval,factor_stdv):
515 515
516 516 self.nChannels = dataOut.nChannels
517 517 self.nProf = dataOut.nProfiles
518 518 self.nPairs = dataOut.data_cspc.shape[0]
519 519 self.pairsArray = numpy.array(dataOut.pairsList)
520 520 self.spectra = dataOut.data_spc
521 521 self.cspectra = dataOut.data_cspc
522 522 self.heights = dataOut.heightList #alturas totales
523 523 self.nHeights = len(self.heights)
524 524 self.min_hei = min_hei
525 525 self.max_hei = max_hei
526 526 if (self.min_hei == None):
527 527 self.min_hei = 0
528 528 if (self.max_hei == None):
529 529 self.max_hei = dataOut.heightList[-1]
530 530 self.hval = ((self.max_hei>=self.heights) & (self.heights >= self.min_hei)).nonzero()
531 531 self.heightsClean = self.heights[self.hval] #alturas filtradas
532 532 self.hval = self.hval[0] # forma (N,), an solo N elementos -> Indices de alturas
533 533 self.nHeightsClean = len(self.heightsClean)
534 534 self.channels = dataOut.channelList
535 535 self.nChan = len(self.channels)
536 536 self.nIncohInt = dataOut.nIncohInt
537 537 self.__initime = dataOut.utctime
538 538 self.maxAltInd = self.hval[-1]+1
539 539 self.minAltInd = self.hval[0]
540 540
541 541 self.crosspairs = dataOut.pairsList
542 542 self.nPairs = len(self.crosspairs)
543 543 self.normFactor = dataOut.normFactor
544 544 self.nFFTPoints = dataOut.nFFTPoints
545 545 self.ippSeconds = dataOut.ippSeconds
546 546 self.currentTime = self.__initime
547 547 self.pairsArray = numpy.array(dataOut.pairsList)
548 548 self.factor_stdv = factor_stdv
549 549 #print("CHANNELS: ",[x for x in self.channels])
550 550
551 551 if n != None :
552 552 self.byProfiles = True
553 553 self.nIntProfiles = n
554 554 else:
555 555 self.__integrationtime = timeInterval
556 556
557 557 self.__dataReady = False
558 558 self.isConfig = True
559 559
560 560
561 561
562 562 def run(self, dataOut,min_hei=None,max_hei=None, n=None, timeInterval=10,factor_stdv=2.5):
563 563 #print (dataOut.utctime)
564 564 if not self.isConfig :
565 565 #print("Setting config")
566 566 self.setup(dataOut, min_hei,max_hei,n,timeInterval,factor_stdv)
567 567 #print("Config Done")
568 568 tini=dataOut.utctime
569 569
570 570 if self.byProfiles:
571 571 if self.__profIndex == self.nIntProfiles:
572 572 self.__dataReady = True
573 573 else:
574 574 if (tini - self.__initime) >= self.__integrationtime:
575 575 #print(tini - self.__initime,self.__profIndex)
576 576 self.__dataReady = True
577 577 self.__initime = tini
578 578
579 579 #if (tini.tm_min % 2) == 0 and (tini.tm_sec < 5 and self.fint==0):
580 580
581 581 if self.__dataReady:
582 582 #print("Data ready",self.__profIndex)
583 583 self.__profIndex = 0
584 584 jspc = self.buffer
585 585 jcspc = self.buffer2
586 586 #jnoise = self.buffer3
587 587 self.buffer = dataOut.data_spc
588 588 self.buffer2 = dataOut.data_cspc
589 589 #self.buffer3 = dataOut.noise
590 590 self.currentTime = dataOut.utctime
591 591 if numpy.any(jspc) :
592 592 #print( jspc.shape, jcspc.shape)
593 593 jspc = numpy.reshape(jspc,(int(len(jspc)/self.nChannels),self.nChannels,self.nFFTPoints,self.nHeights))
594 594 jcspc= numpy.reshape(jcspc,(int(len(jcspc)/self.nPairs),self.nPairs,self.nFFTPoints,self.nHeights))
595 595 self.__dataReady = False
596 596 #print( jspc.shape, jcspc.shape)
597 597 dataOut.flagNoData = False
598 598 else:
599 599 dataOut.flagNoData = True
600 600 self.__dataReady = False
601 601 return dataOut
602 602 else:
603 603 #print( len(self.buffer))
604 604 if numpy.any(self.buffer):
605 605 self.buffer = numpy.concatenate((self.buffer,dataOut.data_spc), axis=0)
606 606 self.buffer2 = numpy.concatenate((self.buffer2,dataOut.data_cspc), axis=0)
607 607 self.buffer3 += dataOut.data_dc
608 608 else:
609 609 self.buffer = dataOut.data_spc
610 610 self.buffer2 = dataOut.data_cspc
611 611 self.buffer3 = dataOut.data_dc
612 612 #print self.index, self.fint
613 613 #print self.buffer2.shape
614 614 dataOut.flagNoData = True ## NOTE: ?? revisar LUEGO
615 615 self.__profIndex += 1
616 616 return dataOut ## NOTE: REV
617 617
618 618
619 619 #index = tini.tm_hour*12+tini.tm_min/5
620 620 '''REVISAR'''
621 621 # jspc = jspc/self.nFFTPoints/self.normFactor
622 622 # jcspc = jcspc/self.nFFTPoints/self.normFactor
623 623
624 624
625 625
626 626 tmp_spectra,tmp_cspectra = self.cleanRayleigh(dataOut,jspc,jcspc,self.factor_stdv)
627 627 dataOut.data_spc = tmp_spectra
628 628 dataOut.data_cspc = tmp_cspectra
629 629
630 630 #dataOut.data_spc,dataOut.data_cspc = self.cleanRayleigh(dataOut,jspc,jcspc,self.factor_stdv)
631 631
632 632 dataOut.data_dc = self.buffer3
633 633 dataOut.nIncohInt *= self.nIntProfiles
634 634 dataOut.utctime = self.currentTime #tiempo promediado
635 635 #print("Time: ",time.localtime(dataOut.utctime))
636 636 # dataOut.data_spc = sat_spectra
637 637 # dataOut.data_cspc = sat_cspectra
638 638 self.buffer = 0
639 639 self.buffer2 = 0
640 640 self.buffer3 = 0
641 641
642 642 return dataOut
643 643
644 644 def cleanRayleigh(self,dataOut,spectra,cspectra,factor_stdv):
645 645 #print("OP cleanRayleigh")
646 646 #import matplotlib.pyplot as plt
647 647 #for k in range(149):
648 648 #channelsProcssd = []
649 649 #channelA_ok = False
650 650 #rfunc = cspectra.copy() #self.bloques
651 651 rfunc = spectra.copy()
652 652 #rfunc = cspectra
653 653 #val_spc = spectra*0.0 #self.bloque0*0.0
654 654 #val_cspc = cspectra*0.0 #self.bloques*0.0
655 655 #in_sat_spectra = spectra.copy() #self.bloque0
656 656 #in_sat_cspectra = cspectra.copy() #self.bloques
657 657
658 658
659 659 ###ONLY FOR TEST:
660 660 raxs = math.ceil(math.sqrt(self.nPairs))
661 661 caxs = math.ceil(self.nPairs/raxs)
662 662 if self.nPairs <4:
663 663 raxs = 2
664 664 caxs = 2
665 665 #print(raxs, caxs)
666 666 fft_rev = 14 #nFFT to plot
667 667 hei_rev = ((self.heights >= 550) & (self.heights <= 551)).nonzero() #hei to plot
668 668 hei_rev = hei_rev[0]
669 669 #print(hei_rev)
670 670
671 671 #print numpy.absolute(rfunc[:,0,0,14])
672 672
673 673 gauss_fit, covariance = None, None
674 674 for ih in range(self.minAltInd,self.maxAltInd):
675 675 for ifreq in range(self.nFFTPoints):
676 676 '''
677 677 ###ONLY FOR TEST:
678 678 if ifreq ==fft_rev and ih==hei_rev: #TO VIEW A SIGNLE FREQUENCY
679 679 fig, axs = plt.subplots(raxs, caxs)
680 680 fig2, axs2 = plt.subplots(raxs, caxs)
681 681 col_ax = 0
682 682 row_ax = 0
683 683 '''
684 684 #print(self.nPairs)
685 685 for ii in range(self.nChan): #PARES DE CANALES SELF y CROSS
686 686 # if self.crosspairs[ii][1]-self.crosspairs[ii][0] > 1: # APLICAR SOLO EN PARES CONTIGUOS
687 687 # continue
688 688 # if not self.crosspairs[ii][0] in channelsProcssd:
689 689 # channelA_ok = True
690 690 #print("pair: ",self.crosspairs[ii])
691 691 '''
692 692 ###ONLY FOR TEST:
693 693 if (col_ax%caxs==0 and col_ax!=0 and self.nPairs !=1):
694 694 col_ax = 0
695 695 row_ax += 1
696 696 '''
697 697 func2clean = 10*numpy.log10(numpy.absolute(rfunc[:,ii,ifreq,ih])) #Potencia?
698 698 #print(func2clean.shape)
699 699 val = (numpy.isfinite(func2clean)==True).nonzero()
700 700
701 701 if len(val)>0: #limitador
702 702 min_val = numpy.around(numpy.amin(func2clean)-2) #> (-40)
703 703 if min_val <= -40 :
704 704 min_val = -40
705 705 max_val = numpy.around(numpy.amax(func2clean)+2) #< 200
706 706 if max_val >= 200 :
707 707 max_val = 200
708 708 #print min_val, max_val
709 709 step = 1
710 710 #print("Getting bins and the histogram")
711 711 x_dist = min_val + numpy.arange(1 + ((max_val-(min_val))/step))*step
712 712 y_dist,binstep = numpy.histogram(func2clean,bins=range(int(min_val),int(max_val+2),step))
713 713 #print(len(y_dist),len(binstep[:-1]))
714 714 #print(row_ax,col_ax, " ..")
715 715 #print(self.pairsArray[ii][0],self.pairsArray[ii][1])
716 716 mean = numpy.sum(x_dist * y_dist) / numpy.sum(y_dist)
717 717 sigma = numpy.sqrt(numpy.sum(y_dist * (x_dist - mean)**2) / numpy.sum(y_dist))
718 718 parg = [numpy.amax(y_dist),mean,sigma]
719 719
720 720 newY = None
721 721
722 722 try :
723 723 gauss_fit, covariance = curve_fit(fit_func, x_dist, y_dist,p0=parg)
724 724 mode = gauss_fit[1]
725 725 stdv = gauss_fit[2]
726 726 #print(" FIT OK",gauss_fit)
727 727 '''
728 728 ###ONLY FOR TEST:
729 729 if ifreq ==fft_rev and ih==hei_rev: #TO VIEW A SIGNLE FREQUENCY
730 730 newY = fit_func(x_dist,gauss_fit[0],gauss_fit[1],gauss_fit[2])
731 731 axs[row_ax,col_ax].plot(binstep[:-1],y_dist,color='green')
732 732 axs[row_ax,col_ax].plot(binstep[:-1],newY,color='red')
733 733 axs[row_ax,col_ax].set_title("CH "+str(self.channels[ii]))
734 734 '''
735 735 except:
736 736 mode = mean
737 737 stdv = sigma
738 738 #print("FIT FAIL")
739 739 #continue
740 740
741 741
742 742 #print(mode,stdv)
743 743 #Removing echoes greater than mode + std_factor*stdv
744 744 noval = (abs(func2clean - mode)>=(factor_stdv*stdv)).nonzero()
745 745 #noval tiene los indices que se van a remover
746 746 #print("Chan ",ii," novals: ",len(noval[0]))
747 747 if len(noval[0]) > 0: #forma de array (N,) es igual a longitud (N)
748 748 novall = ((func2clean - mode) >= (factor_stdv*stdv)).nonzero()
749 749 #print(novall)
750 750 #print(" ",self.pairsArray[ii])
751 751 #cross_pairs = self.pairsArray[ii]
752 752 #Getting coherent echoes which are removed.
753 753 # if len(novall[0]) > 0:
754 754 #
755 755 # val_spc[novall[0],cross_pairs[0],ifreq,ih] = 1
756 756 # val_spc[novall[0],cross_pairs[1],ifreq,ih] = 1
757 757 # val_cspc[novall[0],ii,ifreq,ih] = 1
758 758 #print("OUT NOVALL 1")
759 759 try:
760 760 pair = (self.channels[ii],self.channels[ii + 1])
761 761 except:
762 762 pair = (99,99)
763 763 #print("par ", pair)
764 764 if ( pair in self.crosspairs):
765 765 q = self.crosspairs.index(pair)
766 766 #print("estΓ‘ aqui: ", q, (ii,ii + 1))
767 767 new_a = numpy.delete(cspectra[:,q,ifreq,ih], noval[0])
768 768 cspectra[noval,q,ifreq,ih] = numpy.mean(new_a) #mean CrossSpectra
769 769
770 770 #if channelA_ok:
771 771 #chA = self.channels.index(cross_pairs[0])
772 772 new_b = numpy.delete(spectra[:,ii,ifreq,ih], noval[0])
773 773 spectra[noval,ii,ifreq,ih] = numpy.mean(new_b) #mean Spectra Pair A
774 774 #channelA_ok = False
775 775
776 776 # chB = self.channels.index(cross_pairs[1])
777 777 # new_c = numpy.delete(spectra[:,chB,ifreq,ih], noval[0])
778 778 # spectra[noval,chB,ifreq,ih] = numpy.mean(new_c) #mean Spectra Pair B
779 779 #
780 780 # channelsProcssd.append(self.crosspairs[ii][0]) # save channel A
781 781 # channelsProcssd.append(self.crosspairs[ii][1]) # save channel B
782 782 '''
783 783 ###ONLY FOR TEST:
784 784 if ifreq ==fft_rev and ih==hei_rev: #TO VIEW A SIGNLE FREQUENCY
785 785 func2clean = 10*numpy.log10(numpy.absolute(spectra[:,ii,ifreq,ih]))
786 786 y_dist,binstep = numpy.histogram(func2clean,bins=range(int(min_val),int(max_val+2),step))
787 787 axs2[row_ax,col_ax].plot(binstep[:-1],newY,color='red')
788 788 axs2[row_ax,col_ax].plot(binstep[:-1],y_dist,color='green')
789 789 axs2[row_ax,col_ax].set_title("CH "+str(self.channels[ii]))
790 790 '''
791 791 '''
792 792 ###ONLY FOR TEST:
793 793 col_ax += 1 #contador de ploteo columnas
794 794 ##print(col_ax)
795 795 ###ONLY FOR TEST:
796 796 if ifreq ==fft_rev and ih==hei_rev: #TO VIEW A SIGNLE FREQUENCY
797 797 title = str(dataOut.datatime)+" nFFT: "+str(ifreq)+" Alt: "+str(self.heights[ih])+ " km"
798 798 title2 = str(dataOut.datatime)+" nFFT: "+str(ifreq)+" Alt: "+str(self.heights[ih])+ " km CLEANED"
799 799 fig.suptitle(title)
800 800 fig2.suptitle(title2)
801 801 plt.show()
802 802 '''
803 803 ##################################################################################################
804 804
805 805 #print("Getting average of the spectra and cross-spectra from incoherent echoes.")
806 806 out_spectra = numpy.zeros([self.nChan,self.nFFTPoints,self.nHeights], dtype=float) #+numpy.nan
807 807 out_cspectra = numpy.zeros([self.nPairs,self.nFFTPoints,self.nHeights], dtype=complex) #+numpy.nan
808 808 for ih in range(self.nHeights):
809 809 for ifreq in range(self.nFFTPoints):
810 810 for ich in range(self.nChan):
811 811 tmp = spectra[:,ich,ifreq,ih]
812 812 valid = (numpy.isfinite(tmp[:])==True).nonzero()
813 813
814 814 if len(valid[0]) >0 :
815 815 out_spectra[ich,ifreq,ih] = numpy.nansum(tmp)#/len(valid[0])
816 816
817 817 for icr in range(self.nPairs):
818 818 tmp = numpy.squeeze(cspectra[:,icr,ifreq,ih])
819 819 valid = (numpy.isfinite(tmp)==True).nonzero()
820 820 if len(valid[0]) > 0:
821 821 out_cspectra[icr,ifreq,ih] = numpy.nansum(tmp)#/len(valid[0])
822 822
823 823 return out_spectra, out_cspectra
824 824
825 825 def REM_ISOLATED_POINTS(self,array,rth):
826 826 # import matplotlib.pyplot as plt
827 827 if rth == None :
828 828 rth = 4
829 829 #print("REM ISO")
830 830 num_prof = len(array[0,:,0])
831 831 num_hei = len(array[0,0,:])
832 832 n2d = len(array[:,0,0])
833 833
834 834 for ii in range(n2d) :
835 835 #print ii,n2d
836 836 tmp = array[ii,:,:]
837 837 #print tmp.shape, array[ii,101,:],array[ii,102,:]
838 838
839 839 # fig = plt.figure(figsize=(6,5))
840 840 # left, bottom, width, height = 0.1, 0.1, 0.8, 0.8
841 841 # ax = fig.add_axes([left, bottom, width, height])
842 842 # x = range(num_prof)
843 843 # y = range(num_hei)
844 844 # cp = ax.contour(y,x,tmp)
845 845 # ax.clabel(cp, inline=True,fontsize=10)
846 846 # plt.show()
847 847
848 848 #indxs = WHERE(FINITE(tmp) AND tmp GT 0,cindxs)
849 849 tmp = numpy.reshape(tmp,num_prof*num_hei)
850 850 indxs1 = (numpy.isfinite(tmp)==True).nonzero()
851 851 indxs2 = (tmp > 0).nonzero()
852 852
853 853 indxs1 = (indxs1[0])
854 854 indxs2 = indxs2[0]
855 855 #indxs1 = numpy.array(indxs1[0])
856 856 #indxs2 = numpy.array(indxs2[0])
857 857 indxs = None
858 858 #print indxs1 , indxs2
859 859 for iv in range(len(indxs2)):
860 860 indv = numpy.array((indxs1 == indxs2[iv]).nonzero())
861 861 #print len(indxs2), indv
862 862 if len(indv[0]) > 0 :
863 863 indxs = numpy.concatenate((indxs,indxs2[iv]), axis=None)
864 864 # print indxs
865 865 indxs = indxs[1:]
866 866 #print(indxs, len(indxs))
867 867 if len(indxs) < 4 :
868 868 array[ii,:,:] = 0.
869 869 return
870 870
871 871 xpos = numpy.mod(indxs ,num_hei)
872 872 ypos = (indxs / num_hei)
873 873 sx = numpy.argsort(xpos) # Ordering respect to "x" (time)
874 874 #print sx
875 875 xpos = xpos[sx]
876 876 ypos = ypos[sx]
877 877
878 878 # *********************************** Cleaning isolated points **********************************
879 879 ic = 0
880 880 while True :
881 881 r = numpy.sqrt(list(numpy.power((xpos[ic]-xpos),2)+ numpy.power((ypos[ic]-ypos),2)))
882 882 #no_coh = WHERE(FINITE(r) AND (r LE rth),cno_coh)
883 883 #plt.plot(r)
884 884 #plt.show()
885 885 no_coh1 = (numpy.isfinite(r)==True).nonzero()
886 886 no_coh2 = (r <= rth).nonzero()
887 887 #print r, no_coh1, no_coh2
888 888 no_coh1 = numpy.array(no_coh1[0])
889 889 no_coh2 = numpy.array(no_coh2[0])
890 890 no_coh = None
891 891 #print valid1 , valid2
892 892 for iv in range(len(no_coh2)):
893 893 indv = numpy.array((no_coh1 == no_coh2[iv]).nonzero())
894 894 if len(indv[0]) > 0 :
895 895 no_coh = numpy.concatenate((no_coh,no_coh2[iv]), axis=None)
896 896 no_coh = no_coh[1:]
897 897 #print len(no_coh), no_coh
898 898 if len(no_coh) < 4 :
899 899 #print xpos[ic], ypos[ic], ic
900 900 # plt.plot(r)
901 901 # plt.show()
902 902 xpos[ic] = numpy.nan
903 903 ypos[ic] = numpy.nan
904 904
905 905 ic = ic + 1
906 906 if (ic == len(indxs)) :
907 907 break
908 908 #print( xpos, ypos)
909 909
910 910 indxs = (numpy.isfinite(list(xpos))==True).nonzero()
911 911 #print indxs[0]
912 912 if len(indxs[0]) < 4 :
913 913 array[ii,:,:] = 0.
914 914 return
915 915
916 916 xpos = xpos[indxs[0]]
917 917 ypos = ypos[indxs[0]]
918 918 for i in range(0,len(ypos)):
919 919 ypos[i]=int(ypos[i])
920 920 junk = tmp
921 921 tmp = junk*0.0
922 922
923 923 tmp[list(xpos + (ypos*num_hei))] = junk[list(xpos + (ypos*num_hei))]
924 924 array[ii,:,:] = numpy.reshape(tmp,(num_prof,num_hei))
925 925
926 926 #print array.shape
927 927 #tmp = numpy.reshape(tmp,(num_prof,num_hei))
928 928 #print tmp.shape
929 929
930 930 # fig = plt.figure(figsize=(6,5))
931 931 # left, bottom, width, height = 0.1, 0.1, 0.8, 0.8
932 932 # ax = fig.add_axes([left, bottom, width, height])
933 933 # x = range(num_prof)
934 934 # y = range(num_hei)
935 935 # cp = ax.contour(y,x,array[ii,:,:])
936 936 # ax.clabel(cp, inline=True,fontsize=10)
937 937 # plt.show()
938 938 return array
939 939
940 940
941 941 class IntegrationFaradaySpectra(Operation):
942 942
943 943 __profIndex = 0
944 944 __withOverapping = False
945 945
946 946 __byTime = False
947 947 __initime = None
948 948 __lastdatatime = None
949 949 __integrationtime = None
950 950
951 951 __buffer_spc = None
952 952 __buffer_cspc = None
953 953 __buffer_dc = None
954 954
955 955 __dataReady = False
956 956
957 957 __timeInterval = None
958 958
959 959 n = None
960 960
961 961 def __init__(self):
962 962
963 963 Operation.__init__(self)
964 964
965 965 def setup(self, dataOut,n=None, timeInterval=None, overlapping=False, DPL=None):
966 966 """
967 967 Set the parameters of the integration class.
968 968
969 969 Inputs:
970 970
971 971 n : Number of coherent integrations
972 972 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
973 973 overlapping :
974 974
975 975 """
976 976
977 977 self.__initime = None
978 978 self.__lastdatatime = 0
979 979
980 980 self.__buffer_spc = []
981 981 self.__buffer_cspc = []
982 982 self.__buffer_dc = 0
983 983
984 984 self.__profIndex = 0
985 985 self.__dataReady = False
986 986 self.__byTime = False
987 987
988 988 #self.ByLags = dataOut.ByLags ###REDEFINIR
989 989 self.ByLags = False
990 990
991 991 if DPL != None:
992 992 self.DPL=DPL
993 993 else:
994 994 #self.DPL=dataOut.DPL ###REDEFINIR
995 995 self.DPL=0
996 996
997 997 if n is None and timeInterval is None:
998 998 raise ValueError("n or timeInterval should be specified ...")
999 999
1000 1000 if n is not None:
1001 1001 self.n = int(n)
1002 1002 else:
1003 1003
1004 1004 self.__integrationtime = int(timeInterval)
1005 1005 self.n = None
1006 1006 self.__byTime = True
1007 1007
1008 1008 def putData(self, data_spc, data_cspc, data_dc):
1009 1009 """
1010 1010 Add a profile to the __buffer_spc and increase in one the __profileIndex
1011 1011
1012 1012 """
1013 1013
1014 1014 self.__buffer_spc.append(data_spc)
1015 1015
1016 1016 if data_cspc is None:
1017 1017 self.__buffer_cspc = None
1018 1018 else:
1019 1019 self.__buffer_cspc.append(data_cspc)
1020 1020
1021 1021 if data_dc is None:
1022 1022 self.__buffer_dc = None
1023 1023 else:
1024 1024 self.__buffer_dc += data_dc
1025 1025
1026 1026 self.__profIndex += 1
1027 1027
1028 1028 return
1029 1029
1030 1030 def hildebrand_sekhon_Integration(self,data,navg):
1031 1031
1032 1032 sortdata = numpy.sort(data, axis=None)
1033 1033 sortID=data.argsort()
1034 1034 lenOfData = len(sortdata)
1035 1035 nums_min = lenOfData*0.75
1036 1036 if nums_min <= 5:
1037 1037 nums_min = 5
1038 1038 sump = 0.
1039 1039 sumq = 0.
1040 1040 j = 0
1041 1041 cont = 1
1042 1042 while((cont == 1)and(j < lenOfData)):
1043 1043 sump += sortdata[j]
1044 1044 sumq += sortdata[j]**2
1045 1045 if j > nums_min:
1046 1046 rtest = float(j)/(j-1) + 1.0/navg
1047 1047 if ((sumq*j) > (rtest*sump**2)):
1048 1048 j = j - 1
1049 1049 sump = sump - sortdata[j]
1050 1050 sumq = sumq - sortdata[j]**2
1051 1051 cont = 0
1052 1052 j += 1
1053 1053 #lnoise = sump / j
1054 1054
1055 1055 return j,sortID
1056 1056
1057 1057 def pushData(self):
1058 1058 """
1059 1059 Return the sum of the last profiles and the profiles used in the sum.
1060 1060
1061 1061 Affected:
1062 1062
1063 1063 self.__profileIndex
1064 1064
1065 1065 """
1066 1066 bufferH=None
1067 1067 buffer=None
1068 1068 buffer1=None
1069 1069 buffer_cspc=None
1070 1070 self.__buffer_spc=numpy.array(self.__buffer_spc)
1071 1071 self.__buffer_cspc=numpy.array(self.__buffer_cspc)
1072 1072 freq_dc = int(self.__buffer_spc.shape[2] / 2)
1073 1073 #print("FREQ_DC",freq_dc,self.__buffer_spc.shape,self.nHeights)
1074 1074 for k in range(7,self.nHeights):
1075 1075 buffer_cspc=numpy.copy(self.__buffer_cspc[:,:,:,k])
1076 1076 outliers_IDs_cspc=[]
1077 1077 cspc_outliers_exist=False
1078 1078 #print("AQUIII")
1079 1079 for i in range(self.nChannels):#dataOut.nChannels):
1080 1080
1081 1081 buffer1=numpy.copy(self.__buffer_spc[:,i,:,k])
1082 1082 indexes=[]
1083 1083 #sortIDs=[]
1084 1084 outliers_IDs=[]
1085 1085
1086 1086 for j in range(self.nProfiles):
1087 1087 # if i==0 and j==freq_dc: #NOT CONSIDERING DC PROFILE AT CHANNEL 0
1088 1088 # continue
1089 1089 # if i==1 and j==0: #NOT CONSIDERING DC PROFILE AT CHANNEL 1
1090 1090 # continue
1091 1091 buffer=buffer1[:,j]
1092 1092 index,sortID=self.hildebrand_sekhon_Integration(buffer,1)
1093 1093
1094 1094 indexes.append(index)
1095 1095 #sortIDs.append(sortID)
1096 1096 outliers_IDs=numpy.append(outliers_IDs,sortID[index:])
1097 1097
1098 1098 outliers_IDs=numpy.array(outliers_IDs)
1099 1099 outliers_IDs=outliers_IDs.ravel()
1100 1100 outliers_IDs=numpy.unique(outliers_IDs)
1101 1101 outliers_IDs=outliers_IDs.astype(numpy.dtype('int64'))
1102 1102 indexes=numpy.array(indexes)
1103 1103 indexmin=numpy.min(indexes)
1104 1104
1105 1105 if indexmin != buffer1.shape[0]:
1106 1106 cspc_outliers_exist=True
1107 1107 ###sortdata=numpy.sort(buffer1,axis=0)
1108 1108 ###avg2=numpy.mean(sortdata[:indexmin,:],axis=0)
1109 1109 lt=outliers_IDs
1110 1110 avg=numpy.mean(buffer1[[t for t in range(buffer1.shape[0]) if t not in lt],:],axis=0)
1111 1111
1112 1112 for p in list(outliers_IDs):
1113 1113 buffer1[p,:]=avg
1114 1114
1115 1115 self.__buffer_spc[:,i,:,k]=numpy.copy(buffer1)
1116 1116 ###cspc IDs
1117 1117 #indexmin_cspc+=indexmin_cspc
1118 1118 outliers_IDs_cspc=numpy.append(outliers_IDs_cspc,outliers_IDs)
1119 1119
1120 1120 #if not breakFlag:
1121 1121 outliers_IDs_cspc=outliers_IDs_cspc.astype(numpy.dtype('int64'))
1122 1122 if cspc_outliers_exist:
1123 1123 #sortdata=numpy.sort(buffer_cspc,axis=0)
1124 1124 #avg=numpy.mean(sortdata[:indexmin_cpsc,:],axis=0)
1125 1125 lt=outliers_IDs_cspc
1126 1126
1127 1127 avg=numpy.mean(buffer_cspc[[t for t in range(buffer_cspc.shape[0]) if t not in lt],:],axis=0)
1128 1128 for p in list(outliers_IDs_cspc):
1129 1129 buffer_cspc[p,:]=avg
1130 1130
1131 1131 self.__buffer_cspc[:,:,:,k]=numpy.copy(buffer_cspc)
1132 1132 #else:
1133 1133 #break
1134 1134
1135 1135
1136 1136
1137 1137
1138 1138 buffer=None
1139 1139 bufferH=None
1140 1140 buffer1=None
1141 1141 buffer_cspc=None
1142 1142
1143 1143 #print("cpsc",self.__buffer_cspc[:,0,0,0,0])
1144 1144 #print(self.__profIndex)
1145 1145 #exit()
1146 1146
1147 1147 buffer=None
1148 1148 #print(self.__buffer_spc[:,1,3,20,0])
1149 1149 #print(self.__buffer_spc[:,1,5,37,0])
1150 1150 data_spc = numpy.sum(self.__buffer_spc,axis=0)
1151 1151 data_cspc = numpy.sum(self.__buffer_cspc,axis=0)
1152 1152
1153 1153 #print(numpy.shape(data_spc))
1154 1154 #data_spc[1,4,20,0]=numpy.nan
1155 1155
1156 1156 #data_cspc = self.__buffer_cspc
1157 1157 data_dc = self.__buffer_dc
1158 1158 n = self.__profIndex
1159 1159
1160 1160 self.__buffer_spc = []
1161 1161 self.__buffer_cspc = []
1162 1162 self.__buffer_dc = 0
1163 1163 self.__profIndex = 0
1164 1164
1165 1165 return data_spc, data_cspc, data_dc, n
1166 1166
1167 1167 def byProfiles(self, *args):
1168 1168
1169 1169 self.__dataReady = False
1170 1170 avgdata_spc = None
1171 1171 avgdata_cspc = None
1172 1172 avgdata_dc = None
1173 1173
1174 1174 self.putData(*args)
1175 1175
1176 1176 if self.__profIndex == self.n:
1177 1177
1178 1178 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1179 1179 self.n = n
1180 1180 self.__dataReady = True
1181 1181
1182 1182 return avgdata_spc, avgdata_cspc, avgdata_dc
1183 1183
1184 1184 def byTime(self, datatime, *args):
1185 1185
1186 1186 self.__dataReady = False
1187 1187 avgdata_spc = None
1188 1188 avgdata_cspc = None
1189 1189 avgdata_dc = None
1190 1190
1191 1191 self.putData(*args)
1192 1192
1193 1193 if (datatime - self.__initime) >= self.__integrationtime:
1194 1194 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1195 1195 self.n = n
1196 1196 self.__dataReady = True
1197 1197
1198 1198 return avgdata_spc, avgdata_cspc, avgdata_dc
1199 1199
1200 1200 def integrate(self, datatime, *args):
1201 1201
1202 1202 if self.__profIndex == 0:
1203 1203 self.__initime = datatime
1204 1204
1205 1205 if self.__byTime:
1206 1206 avgdata_spc, avgdata_cspc, avgdata_dc = self.byTime(
1207 1207 datatime, *args)
1208 1208 else:
1209 1209 avgdata_spc, avgdata_cspc, avgdata_dc = self.byProfiles(*args)
1210 1210
1211 1211 if not self.__dataReady:
1212 1212 return None, None, None, None
1213 1213
1214 1214 return self.__initime, avgdata_spc, avgdata_cspc, avgdata_dc
1215 1215
1216 1216 def run(self, dataOut, n=None, DPL = None,timeInterval=None, overlapping=False):
1217 1217 if n == 1:
1218 1218 return dataOut
1219 1219
1220 1220 dataOut.flagNoData = True
1221 1221
1222 1222 if not self.isConfig:
1223 1223 self.setup(dataOut, n, timeInterval, overlapping,DPL )
1224 1224 self.isConfig = True
1225 1225
1226 1226 if not self.ByLags:
1227 1227 self.nProfiles=dataOut.nProfiles
1228 1228 self.nChannels=dataOut.nChannels
1229 1229 self.nHeights=dataOut.nHeights
1230 1230 avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc = self.integrate(dataOut.utctime,
1231 1231 dataOut.data_spc,
1232 1232 dataOut.data_cspc,
1233 1233 dataOut.data_dc)
1234 1234 else:
1235 1235 self.nProfiles=dataOut.nProfiles
1236 1236 self.nChannels=dataOut.nChannels
1237 1237 self.nHeights=dataOut.nHeights
1238 1238 avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc = self.integrate(dataOut.utctime,
1239 1239 dataOut.dataLag_spc,
1240 1240 dataOut.dataLag_cspc,
1241 1241 dataOut.dataLag_dc)
1242 1242
1243 1243 if self.__dataReady:
1244 1244
1245 1245 if not self.ByLags:
1246 1246
1247 1247 dataOut.data_spc = numpy.squeeze(avgdata_spc)
1248 1248 dataOut.data_cspc = numpy.squeeze(avgdata_cspc)
1249 1249 dataOut.data_dc = avgdata_dc
1250 1250 else:
1251 1251 dataOut.dataLag_spc = avgdata_spc
1252 1252 dataOut.dataLag_cspc = avgdata_cspc
1253 1253 dataOut.dataLag_dc = avgdata_dc
1254 1254
1255 1255 dataOut.data_spc=dataOut.dataLag_spc[:,:,:,dataOut.LagPlot]
1256 1256 dataOut.data_cspc=dataOut.dataLag_cspc[:,:,:,dataOut.LagPlot]
1257 1257 dataOut.data_dc=dataOut.dataLag_dc[:,:,dataOut.LagPlot]
1258 1258
1259 1259
1260 1260 dataOut.nIncohInt *= self.n
1261 1261 dataOut.utctime = avgdatatime
1262 1262 dataOut.flagNoData = False
1263 1263
1264 1264 return dataOut
1265 1265
1266 1266 class removeInterference(Operation):
1267 1267
1268 1268 def removeInterference2(self):
1269 1269
1270 1270 cspc = self.dataOut.data_cspc
1271 1271 spc = self.dataOut.data_spc
1272 1272 Heights = numpy.arange(cspc.shape[2])
1273 1273 realCspc = numpy.abs(cspc)
1274 1274
1275 1275 for i in range(cspc.shape[0]):
1276 1276 LinePower= numpy.sum(realCspc[i], axis=0)
1277 1277 Threshold = numpy.amax(LinePower)-numpy.sort(LinePower)[len(Heights)-int(len(Heights)*0.1)]
1278 1278 SelectedHeights = Heights[ numpy.where( LinePower < Threshold ) ]
1279 1279 InterferenceSum = numpy.sum( realCspc[i,:,SelectedHeights], axis=0 )
1280 1280 InterferenceThresholdMin = numpy.sort(InterferenceSum)[int(len(InterferenceSum)*0.98)]
1281 1281 InterferenceThresholdMax = numpy.sort(InterferenceSum)[int(len(InterferenceSum)*0.99)]
1282 1282
1283 1283
1284 1284 InterferenceRange = numpy.where( ([InterferenceSum > InterferenceThresholdMin]))# , InterferenceSum < InterferenceThresholdMax]) )
1285 1285 #InterferenceRange = numpy.where( ([InterferenceRange < InterferenceThresholdMax]))
1286 1286 if len(InterferenceRange)<int(cspc.shape[1]*0.3):
1287 1287 cspc[i,InterferenceRange,:] = numpy.NaN
1288 1288
1289 1289 self.dataOut.data_cspc = cspc
1290 1290
1291 1291 def removeInterference(self, interf = 2, hei_interf = None, nhei_interf = None, offhei_interf = None):
1292 1292
1293 1293 jspectra = self.dataOut.data_spc
1294 1294 jcspectra = self.dataOut.data_cspc
1295 1295 jnoise = self.dataOut.getNoise()
1296 1296 num_incoh = self.dataOut.nIncohInt
1297 1297
1298 1298 num_channel = jspectra.shape[0]
1299 1299 num_prof = jspectra.shape[1]
1300 1300 num_hei = jspectra.shape[2]
1301 1301
1302 1302 # hei_interf
1303 1303 if hei_interf is None:
1304 1304 count_hei = int(num_hei / 2)
1305 1305 hei_interf = numpy.asmatrix(list(range(count_hei))) + num_hei - count_hei
1306 1306 hei_interf = numpy.asarray(hei_interf)[0]
1307 1307 # nhei_interf
1308 1308 if (nhei_interf == None):
1309 1309 nhei_interf = 5
1310 1310 if (nhei_interf < 1):
1311 1311 nhei_interf = 1
1312 1312 if (nhei_interf > count_hei):
1313 1313 nhei_interf = count_hei
1314 1314 if (offhei_interf == None):
1315 1315 offhei_interf = 0
1316 1316
1317 1317 ind_hei = list(range(num_hei))
1318 1318 # mask_prof = numpy.asarray(range(num_prof - 2)) + 1
1319 1319 # mask_prof[range(num_prof/2 - 1,len(mask_prof))] += 1
1320 1320 mask_prof = numpy.asarray(list(range(num_prof)))
1321 1321 num_mask_prof = mask_prof.size
1322 1322 comp_mask_prof = [0, num_prof / 2]
1323 1323
1324 1324 # noise_exist: Determina si la variable jnoise ha sido definida y contiene la informacion del ruido de cada canal
1325 1325 if (jnoise.size < num_channel or numpy.isnan(jnoise).any()):
1326 1326 jnoise = numpy.nan
1327 1327 noise_exist = jnoise[0] < numpy.Inf
1328 1328
1329 1329 # Subrutina de Remocion de la Interferencia
1330 1330 for ich in range(num_channel):
1331 1331 # Se ordena los espectros segun su potencia (menor a mayor)
1332 1332 power = jspectra[ich, mask_prof, :]
1333 1333 power = power[:, hei_interf]
1334 1334 power = power.sum(axis=0)
1335 1335 psort = power.ravel().argsort()
1336 1336
1337 1337 # Se estima la interferencia promedio en los Espectros de Potencia empleando
1338 1338 junkspc_interf = jspectra[ich, :, hei_interf[psort[list(range(
1339 1339 offhei_interf, nhei_interf + offhei_interf))]]]
1340 1340
1341 1341 if noise_exist:
1342 1342 # tmp_noise = jnoise[ich] / num_prof
1343 1343 tmp_noise = jnoise[ich]
1344 1344 junkspc_interf = junkspc_interf - tmp_noise
1345 1345 #junkspc_interf[:,comp_mask_prof] = 0
1346 1346
1347 1347 jspc_interf = junkspc_interf.sum(axis=0) / nhei_interf
1348 1348 jspc_interf = jspc_interf.transpose()
1349 1349 # Calculando el espectro de interferencia promedio
1350 1350 noiseid = numpy.where(
1351 1351 jspc_interf <= tmp_noise / numpy.sqrt(num_incoh))
1352 1352 noiseid = noiseid[0]
1353 1353 cnoiseid = noiseid.size
1354 1354 interfid = numpy.where(
1355 1355 jspc_interf > tmp_noise / numpy.sqrt(num_incoh))
1356 1356 interfid = interfid[0]
1357 1357 cinterfid = interfid.size
1358 1358
1359 1359 if (cnoiseid > 0):
1360 1360 jspc_interf[noiseid] = 0
1361 1361
1362 1362 # Expandiendo los perfiles a limpiar
1363 1363 if (cinterfid > 0):
1364 1364 new_interfid = (
1365 1365 numpy.r_[interfid - 1, interfid, interfid + 1] + num_prof) % num_prof
1366 1366 new_interfid = numpy.asarray(new_interfid)
1367 1367 new_interfid = {x for x in new_interfid}
1368 1368 new_interfid = numpy.array(list(new_interfid))
1369 1369 new_cinterfid = new_interfid.size
1370 1370 else:
1371 1371 new_cinterfid = 0
1372 1372
1373 1373 for ip in range(new_cinterfid):
1374 1374 ind = junkspc_interf[:, new_interfid[ip]].ravel().argsort()
1375 1375 jspc_interf[new_interfid[ip]
1376 1376 ] = junkspc_interf[ind[nhei_interf // 2], new_interfid[ip]]
1377 1377
1378 1378 jspectra[ich, :, ind_hei] = jspectra[ich, :,
1379 1379 ind_hei] - jspc_interf # Corregir indices
1380 1380
1381 1381 # Removiendo la interferencia del punto de mayor interferencia
1382 1382 ListAux = jspc_interf[mask_prof].tolist()
1383 1383 maxid = ListAux.index(max(ListAux))
1384 1384
1385 1385 if cinterfid > 0:
1386 1386 for ip in range(cinterfid * (interf == 2) - 1):
1387 1387 ind = (jspectra[ich, interfid[ip], :] < tmp_noise *
1388 1388 (1 + 1 / numpy.sqrt(num_incoh))).nonzero()
1389 1389 cind = len(ind)
1390 1390
1391 1391 if (cind > 0):
1392 1392 jspectra[ich, interfid[ip], ind] = tmp_noise * \
1393 1393 (1 + (numpy.random.uniform(cind) - 0.5) /
1394 1394 numpy.sqrt(num_incoh))
1395 1395
1396 1396 ind = numpy.array([-2, -1, 1, 2])
1397 1397 xx = numpy.zeros([4, 4])
1398 1398
1399 1399 for id1 in range(4):
1400 1400 xx[:, id1] = ind[id1]**numpy.asarray(list(range(4)))
1401 1401
1402 1402 xx_inv = numpy.linalg.inv(xx)
1403 1403 xx = xx_inv[:, 0]
1404 1404 ind = (ind + maxid + num_mask_prof) % num_mask_prof
1405 1405 yy = jspectra[ich, mask_prof[ind], :]
1406 1406 jspectra[ich, mask_prof[maxid], :] = numpy.dot(
1407 1407 yy.transpose(), xx)
1408 1408
1409 1409 indAux = (jspectra[ich, :, :] < tmp_noise *
1410 1410 (1 - 1 / numpy.sqrt(num_incoh))).nonzero()
1411 1411 jspectra[ich, indAux[0], indAux[1]] = tmp_noise * \
1412 1412 (1 - 1 / numpy.sqrt(num_incoh))
1413 1413
1414 1414 # Remocion de Interferencia en el Cross Spectra
1415 1415 if jcspectra is None:
1416 1416 return jspectra, jcspectra
1417 1417 num_pairs = int(jcspectra.size / (num_prof * num_hei))
1418 1418 jcspectra = jcspectra.reshape(num_pairs, num_prof, num_hei)
1419 1419
1420 1420 for ip in range(num_pairs):
1421 1421
1422 1422 #-------------------------------------------
1423 1423
1424 1424 cspower = numpy.abs(jcspectra[ip, mask_prof, :])
1425 1425 cspower = cspower[:, hei_interf]
1426 1426 cspower = cspower.sum(axis=0)
1427 1427
1428 1428 cspsort = cspower.ravel().argsort()
1429 1429 junkcspc_interf = jcspectra[ip, :, hei_interf[cspsort[list(range(
1430 1430 offhei_interf, nhei_interf + offhei_interf))]]]
1431 1431 junkcspc_interf = junkcspc_interf.transpose()
1432 1432 jcspc_interf = junkcspc_interf.sum(axis=1) / nhei_interf
1433 1433
1434 1434 ind = numpy.abs(jcspc_interf[mask_prof]).ravel().argsort()
1435 1435
1436 1436 median_real = int(numpy.median(numpy.real(
1437 1437 junkcspc_interf[mask_prof[ind[list(range(3 * num_prof // 4))]], :])))
1438 1438 median_imag = int(numpy.median(numpy.imag(
1439 1439 junkcspc_interf[mask_prof[ind[list(range(3 * num_prof // 4))]], :])))
1440 1440 comp_mask_prof = [int(e) for e in comp_mask_prof]
1441 1441 junkcspc_interf[comp_mask_prof, :] = numpy.complex(
1442 1442 median_real, median_imag)
1443 1443
1444 1444 for iprof in range(num_prof):
1445 1445 ind = numpy.abs(junkcspc_interf[iprof, :]).ravel().argsort()
1446 1446 jcspc_interf[iprof] = junkcspc_interf[iprof, ind[nhei_interf // 2]]
1447 1447
1448 1448 # Removiendo la Interferencia
1449 1449 jcspectra[ip, :, ind_hei] = jcspectra[ip,
1450 1450 :, ind_hei] - jcspc_interf
1451 1451
1452 1452 ListAux = numpy.abs(jcspc_interf[mask_prof]).tolist()
1453 1453 maxid = ListAux.index(max(ListAux))
1454 1454
1455 1455 ind = numpy.array([-2, -1, 1, 2])
1456 1456 xx = numpy.zeros([4, 4])
1457 1457
1458 1458 for id1 in range(4):
1459 1459 xx[:, id1] = ind[id1]**numpy.asarray(list(range(4)))
1460 1460
1461 1461 xx_inv = numpy.linalg.inv(xx)
1462 1462 xx = xx_inv[:, 0]
1463 1463
1464 1464 ind = (ind + maxid + num_mask_prof) % num_mask_prof
1465 1465 yy = jcspectra[ip, mask_prof[ind], :]
1466 1466 jcspectra[ip, mask_prof[maxid], :] = numpy.dot(yy.transpose(), xx)
1467 1467
1468 1468 # Guardar Resultados
1469 1469 self.dataOut.data_spc = jspectra
1470 1470 self.dataOut.data_cspc = jcspectra
1471 1471
1472 1472 return 1
1473 1473
1474 1474 def run(self, dataOut, interf = 2,hei_interf = None, nhei_interf = None, offhei_interf = None, mode=1):
1475 1475
1476 1476 self.dataOut = dataOut
1477 1477
1478 1478 if mode == 1:
1479 1479 self.removeInterference(interf = 2,hei_interf = None, nhei_interf = None, offhei_interf = None)
1480 1480 elif mode == 2:
1481 1481 self.removeInterference2()
1482 1482
1483 1483 return self.dataOut
1484 1484
1485 1485
1486 1486 class IncohInt(Operation):
1487 1487
1488 1488 __profIndex = 0
1489 1489 __withOverapping = False
1490 1490
1491 1491 __byTime = False
1492 1492 __initime = None
1493 1493 __lastdatatime = None
1494 1494 __integrationtime = None
1495 1495
1496 1496 __buffer_spc = None
1497 1497 __buffer_cspc = None
1498 1498 __buffer_dc = None
1499 1499
1500 1500 __dataReady = False
1501 1501
1502 1502 __timeInterval = None
1503 1503
1504 1504 n = None
1505 1505
1506 1506 def __init__(self):
1507 1507
1508 1508 Operation.__init__(self)
1509 1509
1510 1510 def setup(self, n=None, timeInterval=None, overlapping=False):
1511 1511 """
1512 1512 Set the parameters of the integration class.
1513 1513
1514 1514 Inputs:
1515 1515
1516 1516 n : Number of coherent integrations
1517 1517 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
1518 1518 overlapping :
1519 1519
1520 1520 """
1521 1521
1522 1522 self.__initime = None
1523 1523 self.__lastdatatime = 0
1524 1524
1525 1525 self.__buffer_spc = 0
1526 1526 self.__buffer_cspc = 0
1527 1527 self.__buffer_dc = 0
1528 1528
1529 1529 self.__profIndex = 0
1530 1530 self.__dataReady = False
1531 1531 self.__byTime = False
1532 1532
1533 1533 if n is None and timeInterval is None:
1534 1534 raise ValueError("n or timeInterval should be specified ...")
1535 1535
1536 1536 if n is not None:
1537 1537 self.n = int(n)
1538 1538 else:
1539 1539
1540 1540 self.__integrationtime = int(timeInterval)
1541 1541 self.n = None
1542 1542 self.__byTime = True
1543 1543
1544 1544 def putData(self, data_spc, data_cspc, data_dc):
1545 1545 """
1546 1546 Add a profile to the __buffer_spc and increase in one the __profileIndex
1547 1547
1548 1548 """
1549 1549
1550 1550 self.__buffer_spc += data_spc
1551 1551
1552 1552 if data_cspc is None:
1553 1553 self.__buffer_cspc = None
1554 1554 else:
1555 1555 self.__buffer_cspc += data_cspc
1556 1556
1557 1557 if data_dc is None:
1558 1558 self.__buffer_dc = None
1559 1559 else:
1560 1560 self.__buffer_dc += data_dc
1561 1561
1562 1562 self.__profIndex += 1
1563 1563
1564 1564 return
1565 1565
1566 1566 def pushData(self):
1567 1567 """
1568 1568 Return the sum of the last profiles and the profiles used in the sum.
1569 1569
1570 1570 Affected:
1571 1571
1572 1572 self.__profileIndex
1573 1573
1574 1574 """
1575 1575
1576 1576 data_spc = self.__buffer_spc
1577 1577 data_cspc = self.__buffer_cspc
1578 1578 data_dc = self.__buffer_dc
1579 1579 n = self.__profIndex
1580 1580
1581 1581 self.__buffer_spc = 0
1582 1582 self.__buffer_cspc = 0
1583 1583 self.__buffer_dc = 0
1584 1584 self.__profIndex = 0
1585 1585
1586 1586 return data_spc, data_cspc, data_dc, n
1587 1587
1588 1588 def byProfiles(self, *args):
1589 1589
1590 1590 self.__dataReady = False
1591 1591 avgdata_spc = None
1592 1592 avgdata_cspc = None
1593 1593 avgdata_dc = None
1594 1594
1595 1595 self.putData(*args)
1596 1596
1597 1597 if self.__profIndex == self.n:
1598 1598
1599 1599 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1600 1600 self.n = n
1601 1601 self.__dataReady = True
1602 1602
1603 1603 return avgdata_spc, avgdata_cspc, avgdata_dc
1604 1604
1605 1605 def byTime(self, datatime, *args):
1606 1606
1607 1607 self.__dataReady = False
1608 1608 avgdata_spc = None
1609 1609 avgdata_cspc = None
1610 1610 avgdata_dc = None
1611 1611
1612 1612 self.putData(*args)
1613 1613
1614 1614 if (datatime - self.__initime) >= self.__integrationtime:
1615 1615 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1616 1616 self.n = n
1617 1617 self.__dataReady = True
1618 1618
1619 1619 return avgdata_spc, avgdata_cspc, avgdata_dc
1620 1620
1621 1621 def integrate(self, datatime, *args):
1622 1622
1623 1623 if self.__profIndex == 0:
1624 1624 self.__initime = datatime
1625 1625
1626 1626 if self.__byTime:
1627 1627 avgdata_spc, avgdata_cspc, avgdata_dc = self.byTime(
1628 1628 datatime, *args)
1629 1629 else:
1630 1630 avgdata_spc, avgdata_cspc, avgdata_dc = self.byProfiles(*args)
1631 1631
1632 1632 if not self.__dataReady:
1633 1633 return None, None, None, None
1634 1634
1635 1635 return self.__initime, avgdata_spc, avgdata_cspc, avgdata_dc
1636 1636
1637 1637 def run(self, dataOut, n=None, timeInterval=None, overlapping=False):
1638 1638 if n == 1:
1639 1639 return dataOut
1640 1640
1641 1641 dataOut.flagNoData = True
1642 1642
1643 1643 if not self.isConfig:
1644 1644 self.setup(n, timeInterval, overlapping)
1645 1645 self.isConfig = True
1646 1646
1647 1647 avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc = self.integrate(dataOut.utctime,
1648 1648 dataOut.data_spc,
1649 1649 dataOut.data_cspc,
1650 1650 dataOut.data_dc)
1651 1651
1652 1652 if self.__dataReady:
1653 1653
1654 1654 dataOut.data_spc = avgdata_spc
1655 1655 dataOut.data_cspc = avgdata_cspc
1656 1656 dataOut.data_dc = avgdata_dc
1657 1657 dataOut.nIncohInt *= self.n
1658 1658 dataOut.utctime = avgdatatime
1659 1659 dataOut.flagNoData = False
1660 1660
1661 1661 return dataOut
1662 1662
1663 1663 class dopplerFlip(Operation):
1664 1664
1665 1665 def run(self, dataOut):
1666 1666 # arreglo 1: (num_chan, num_profiles, num_heights)
1667 1667 self.dataOut = dataOut
1668 1668 # JULIA-oblicua, indice 2
1669 1669 # arreglo 2: (num_profiles, num_heights)
1670 1670 jspectra = self.dataOut.data_spc[2]
1671 1671 jspectra_tmp = numpy.zeros(jspectra.shape)
1672 1672 num_profiles = jspectra.shape[0]
1673 1673 freq_dc = int(num_profiles / 2)
1674 1674 # Flip con for
1675 1675 for j in range(num_profiles):
1676 1676 jspectra_tmp[num_profiles-j-1]= jspectra[j]
1677 1677 # Intercambio perfil de DC con perfil inmediato anterior
1678 1678 jspectra_tmp[freq_dc-1]= jspectra[freq_dc-1]
1679 1679 jspectra_tmp[freq_dc]= jspectra[freq_dc]
1680 1680 # canal modificado es re-escrito en el arreglo de canales
1681 1681 self.dataOut.data_spc[2] = jspectra_tmp
1682 1682
1683 1683 return self.dataOut
General Comments 0
You need to be logged in to leave comments. Login now