##// END OF EJS Templates
para pruebas em PC AMISR
joabAM -
r1541:77e2d8d2b2ef
parent child
Show More
@@ -1,666 +1,667
1 1 ''''
2 2 Created on Set 9, 2015
3 3
4 4 @author: roj-idl71 Karim Kuyeng
5 5
6 6 @update: 2021, Joab Apaza
7 7 '''
8 8
9 9 import os
10 10 import sys
11 11 import glob
12 12 import fnmatch
13 13 import datetime
14 14 import time
15 15 import re
16 16 import h5py
17 17 import numpy
18 18
19 19 try:
20 20 from gevent import sleep
21 21 except:
22 22 from time import sleep
23 23
24 24 from schainpy.model.data.jroheaderIO import RadarControllerHeader, SystemHeader
25 25 from schainpy.model.data.jrodata import Voltage
26 26 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
27 27 from numpy import imag
28 28 from schainpy.utils import log
29 29
30 30
31 31 class AMISRReader(ProcessingUnit):
32 32 '''
33 33 classdocs
34 34 '''
35 35
36 36 def __init__(self):
37 37 '''
38 38 Constructor
39 39 '''
40 40
41 41 ProcessingUnit.__init__(self)
42 42
43 43 self.set = None
44 44 self.subset = None
45 45 self.extension_file = '.h5'
46 46 self.dtc_str = 'dtc'
47 47 self.dtc_id = 0
48 48 self.status = True
49 49 self.isConfig = False
50 50 self.dirnameList = []
51 51 self.filenameList = []
52 52 self.fileIndex = None
53 53 self.flagNoMoreFiles = False
54 54 self.flagIsNewFile = 0
55 55 self.filename = ''
56 56 self.amisrFilePointer = None
57 57 self.realBeamCode = []
58 58 self.beamCodeMap = None
59 59 self.azimuthList = []
60 60 self.elevationList = []
61 61 self.dataShape = None
62 62 self.flag_old_beams = False
63 63
64 64
65 65 self.profileIndex = 0
66 66
67 67
68 68 self.beamCodeByFrame = None
69 69 self.radacTimeByFrame = None
70 70
71 71 self.dataset = None
72 72
73 73 self.__firstFile = True
74 74
75 75 self.buffer = None
76 76
77 77 self.timezone = 'ut'
78 78
79 79 self.__waitForNewFile = 20
80 80 self.__filename_online = None
81 81 #Is really necessary create the output object in the initializer
82 82 self.dataOut = Voltage()
83 83 self.dataOut.error=False
84 84 self.margin_days = 1
85 85
86 86 def setup(self,path=None,
87 87 startDate=None,
88 88 endDate=None,
89 89 startTime=None,
90 90 endTime=None,
91 91 walk=True,
92 92 timezone='ut',
93 93 all=0,
94 94 code = None,
95 95 nCode = 0,
96 96 nBaud = 0,
97 97 online=False,
98 98 old_beams=False,
99 99 margin_days=1):
100 100
101 101
102 102
103 103 self.timezone = timezone
104 104 self.all = all
105 105 self.online = online
106 106 self.flag_old_beams = old_beams
107 107 self.code = code
108 108 self.nCode = int(nCode)
109 109 self.nBaud = int(nBaud)
110 110 self.margin_days = margin_days
111 111
112 112
113 113 #self.findFiles()
114 114 if not(online):
115 115 #Busqueda de archivos offline
116 116 self.searchFilesOffLine(path, startDate, endDate, startTime, endTime, walk)
117 117 else:
118 118 self.searchFilesOnLine(path, startDate, endDate, startTime,endTime,walk)
119 119
120 120 if not(self.filenameList):
121 121 raise schainpy.admin.SchainWarning("There is no files into the folder: %s"%(path))
122 122 sys.exit()
123 123
124 124 self.fileIndex = 0
125 125
126 126 self.readNextFile(online)
127 127
128 128 '''
129 129 Add code
130 130 '''
131 131 self.isConfig = True
132 132 # print("Setup Done")
133 133 pass
134 134
135 135
136 136 def readAMISRHeader(self,fp):
137 137
138 138 if self.isConfig and (not self.flagNoMoreFiles):
139 139 newShape = fp.get('Raw11/Data/Samples/Data').shape[1:]
140 140 if self.dataShape != newShape and newShape != None:
141 141 raise schainpy.admin.SchainError("NEW FILE HAS A DIFFERENT SHAPE: ")
142 142 print(self.dataShape,newShape,"\n")
143 143 return 0
144 144 else:
145 145 self.dataShape = fp.get('Raw11/Data/Samples/Data').shape[1:]
146 146
147 147
148 148 header = 'Raw11/Data/RadacHeader'
149 149 self.beamCodeByPulse = fp.get(header+'/BeamCode') # LIST OF BEAMS PER PROFILE, TO BE USED ON REARRANGE
150 150 if (self.startDate> datetime.date(2021, 7, 15)) or self.flag_old_beams: #Se cambiΓ³ la forma de extracciΓ³n de Apuntes el 17 o forzar con flag de reorganizaciΓ³n
151 151 self.beamcodeFile = fp['Setup/Beamcodefile'][()].decode()
152 152 self.trueBeams = self.beamcodeFile.split("\n")
153 153 self.trueBeams.pop()#remove last
154 154 [self.realBeamCode.append(x) for x in self.trueBeams if x not in self.realBeamCode]
155 155 self.beamCode = [int(x, 16) for x in self.realBeamCode]
156 156 else:
157 157 _beamCode= fp.get('Raw11/Data/Beamcodes') #se usa la manera previa al cambio de apuntes
158 158 self.beamCode = _beamCode[0,:]
159 159
160 160 if self.beamCodeMap == None:
161 161 self.beamCodeMap = fp['Setup/BeamcodeMap']
162 162 for beam in self.beamCode:
163 163 beamAziElev = numpy.where(self.beamCodeMap[:,0]==beam)
164 164 beamAziElev = beamAziElev[0].squeeze()
165 165 self.azimuthList.append(self.beamCodeMap[beamAziElev,1])
166 166 self.elevationList.append(self.beamCodeMap[beamAziElev,2])
167 167 #print("Beamssss: ",self.beamCodeMap[beamAziElev,1],self.beamCodeMap[beamAziElev,2])
168 168 #print(self.beamCode)
169 169 #self.code = fp.get(header+'/Code') # NOT USE FOR THIS
170 170 self.frameCount = fp.get(header+'/FrameCount')# NOT USE FOR THIS
171 171 self.modeGroup = fp.get(header+'/ModeGroup')# NOT USE FOR THIS
172 172 self.nsamplesPulse = fp.get(header+'/NSamplesPulse')# TO GET NSA OR USING DATA FOR THAT
173 173 self.pulseCount = fp.get(header+'/PulseCount')# NOT USE FOR THIS
174 174 self.radacTime = fp.get(header+'/RadacTime')# 1st TIME ON FILE ANDE CALCULATE THE REST WITH IPP*nindexprofile
175 175 self.timeCount = fp.get(header+'/TimeCount')# NOT USE FOR THIS
176 176 self.timeStatus = fp.get(header+'/TimeStatus')# NOT USE FOR THIS
177 177 self.rangeFromFile = fp.get('Raw11/Data/Samples/Range')
178 178 self.frequency = fp.get('Rx/Frequency')
179 179 txAus = fp.get('Raw11/Data/Pulsewidth')
180 180
181 181
182 182 self.nblocks = self.pulseCount.shape[0] #nblocks
183 183
184 184 self.nprofiles = self.pulseCount.shape[1] #nprofile
185 185 self.nsa = self.nsamplesPulse[0,0] #ngates
186 186 self.nchannels = len(self.beamCode)
187 187 self.ippSeconds = (self.radacTime[0][1] -self.radacTime[0][0]) #Ipp in seconds
188 188 #print("IPPS secs: ",self.ippSeconds)
189 189 #self.__waitForNewFile = self.nblocks # wait depending on the number of blocks since each block is 1 sec
190 190 self.__waitForNewFile = self.nblocks * self.nprofiles * self.ippSeconds # wait until new file is created
191 191
192 192 #filling radar controller header parameters
193 193 self.__ippKm = self.ippSeconds *.15*1e6 # in km
194 194 self.__txA = (txAus[()])*.15 #(ipp[us]*.15km/1us) in km
195 195 self.__txB = 0
196 196 nWindows=1
197 197 self.__nSamples = self.nsa
198 198 self.__firstHeight = self.rangeFromFile[0][0]/1000 #in km
199 199 self.__deltaHeight = (self.rangeFromFile[0][1] - self.rangeFromFile[0][0])/1000
200 200 #print("amisr-ipp:",self.ippSeconds, self.__ippKm)
201 201 #for now until understand why the code saved is different (code included even though code not in tuf file)
202 202 #self.__codeType = 0
203 203 # self.__nCode = None
204 204 # self.__nBaud = None
205 205 self.__code = self.code
206 206 self.__codeType = 0
207 207 if self.code != None:
208 208 self.__codeType = 1
209 209 self.__nCode = self.nCode
210 210 self.__nBaud = self.nBaud
211 211 #self.__code = 0
212 212
213 213 #filling system header parameters
214 214 self.__nSamples = self.nsa
215 215 self.newProfiles = self.nprofiles/self.nchannels
216 216 self.__channelList = list(range(self.nchannels))
217 217
218 218 self.__frequency = self.frequency[0][0]
219 219
220 220
221 221 return 1
222 222
223 223
224 224 def createBuffers(self):
225 225
226 226 pass
227 227
228 228 def __setParameters(self,path='', startDate='',endDate='',startTime='', endTime='', walk=''):
229 229 self.path = path
230 230 self.startDate = startDate
231 231 self.endDate = endDate
232 232 self.startTime = startTime
233 233 self.endTime = endTime
234 234 self.walk = walk
235 235
236 236 def __checkPath(self):
237 237 if os.path.exists(self.path):
238 238 self.status = 1
239 239 else:
240 240 self.status = 0
241 241 print('Path:%s does not exists'%self.path)
242 242
243 243 return
244 244
245 245
246 246 def __selDates(self, amisr_dirname_format):
247 247 try:
248 248 year = int(amisr_dirname_format[0:4])
249 249 month = int(amisr_dirname_format[4:6])
250 250 dom = int(amisr_dirname_format[6:8])
251 251 thisDate = datetime.date(year,month,dom)
252 252 #margen de un dΓ­a extra, igual luego se filtra for fecha y hora
253 253 if (thisDate>=(self.startDate - datetime.timedelta(days=self.margin_days)) and thisDate <= (self.endDate)+ datetime.timedelta(days=1)):
254 254 return amisr_dirname_format
255 255 except:
256 256 return None
257 257
258 258
259 259 def __findDataForDates(self,online=False):
260 260
261 261 if not(self.status):
262 262 return None
263 263
264 264 pat = '\d+.\d+'
265 265 dirnameList = [re.search(pat,x) for x in os.listdir(self.path)]
266 266 dirnameList = [x for x in dirnameList if x!=None]
267 267 dirnameList = [x.string for x in dirnameList]
268 268 if not(online):
269 269 dirnameList = [self.__selDates(x) for x in dirnameList]
270 270 dirnameList = [x for x in dirnameList if x!=None]
271 271 if len(dirnameList)>0:
272 272 self.status = 1
273 273 self.dirnameList = dirnameList
274 274 self.dirnameList.sort()
275 275 else:
276 276 self.status = 0
277 277 return None
278 278
279 279 def __getTimeFromData(self):
280 280 startDateTime_Reader = datetime.datetime.combine(self.startDate,self.startTime)
281 281 endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
282 282
283 283 print('Filtering Files from %s to %s'%(startDateTime_Reader, endDateTime_Reader))
284 284 print('........................................')
285 285 filter_filenameList = []
286 286 self.filenameList.sort()
287 287 total_files = len(self.filenameList)
288 288 #for i in range(len(self.filenameList)-1):
289 289 for i in range(total_files):
290 290 filename = self.filenameList[i]
291 291 #print("file-> ",filename)
292 292 try:
293 293 fp = h5py.File(filename,'r')
294 294 time_str = fp.get('Time/RadacTimeString')
295 295
296 296 startDateTimeStr_File = time_str[0][0].decode('UTF-8').split('.')[0]
297 297 #startDateTimeStr_File = "2019-12-16 09:21:11"
298 298 junk = time.strptime(startDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
299 299 startDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
300 300
301 301 #endDateTimeStr_File = "2019-12-16 11:10:11"
302 302 endDateTimeStr_File = time_str[-1][-1].decode('UTF-8').split('.')[0]
303 303 junk = time.strptime(endDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
304 304 endDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
305 305
306 306 fp.close()
307 307
308 308 #print("check time", startDateTime_File)
309 309 if self.timezone == 'lt':
310 310 startDateTime_File = startDateTime_File - datetime.timedelta(minutes = 300)
311 311 endDateTime_File = endDateTime_File - datetime.timedelta(minutes = 300)
312 312 if (startDateTime_File >=startDateTime_Reader and endDateTime_File<=endDateTime_Reader):
313 313 filter_filenameList.append(filename)
314 314
315 315 if (startDateTime_File>endDateTime_Reader):
316 316 break
317 317 except Exception as e:
318 318 log.warning("Error opening file {} -> {}".format(os.path.split(filename)[1],e))
319 319
320 320 filter_filenameList.sort()
321 321 self.filenameList = filter_filenameList
322 322
323 323 return 1
324 324
325 325 def __filterByGlob1(self, dirName):
326 326 filter_files = glob.glob1(dirName, '*.*%s'%self.extension_file)
327 327 filter_files.sort()
328 328 filterDict = {}
329 329 filterDict.setdefault(dirName)
330 330 filterDict[dirName] = filter_files
331 331 return filterDict
332 332
333 333 def __getFilenameList(self, fileListInKeys, dirList):
334 334 for value in fileListInKeys:
335 335 dirName = list(value.keys())[0]
336 336 for file in value[dirName]:
337 337 filename = os.path.join(dirName, file)
338 338 self.filenameList.append(filename)
339 339
340 340
341 341 def __selectDataForTimes(self, online=False):
342 342 #aun no esta implementado el filtro for tiempo
343 343 if not(self.status):
344 344 return None
345 345
346 346 dirList = [os.path.join(self.path,x) for x in self.dirnameList]
347 347 fileListInKeys = [self.__filterByGlob1(x) for x in dirList]
348 348 self.__getFilenameList(fileListInKeys, dirList)
349 349 if not(online):
350 350 #filtro por tiempo
351 351 if not(self.all):
352 352 self.__getTimeFromData()
353 353
354 354 if len(self.filenameList)>0:
355 355 self.status = 1
356 356 self.filenameList.sort()
357 357 else:
358 358 self.status = 0
359 359 return None
360 360
361 361 else:
362 362 #get the last file - 1
363 363 self.filenameList = [self.filenameList[-2]]
364 364 new_dirnameList = []
365 365 for dirname in self.dirnameList:
366 366 junk = numpy.array([dirname in x for x in self.filenameList])
367 367 junk_sum = junk.sum()
368 368 if junk_sum > 0:
369 369 new_dirnameList.append(dirname)
370 370 self.dirnameList = new_dirnameList
371 371 return 1
372 372
373 373 def searchFilesOnLine(self, path, startDate, endDate, startTime=datetime.time(0,0,0),
374 374 endTime=datetime.time(23,59,59),walk=True):
375 375
376 376 if endDate ==None:
377 377 startDate = datetime.datetime.utcnow().date()
378 378 endDate = datetime.datetime.utcnow().date()
379 379
380 380 self.__setParameters(path=path, startDate=startDate, endDate=endDate,startTime = startTime,endTime=endTime, walk=walk)
381 381
382 382 self.__checkPath()
383 383
384 384 self.__findDataForDates(online=True)
385 385
386 386 self.dirnameList = [self.dirnameList[-1]]
387 387
388 388 self.__selectDataForTimes(online=True)
389 389
390 390 return
391 391
392 392
393 393 def searchFilesOffLine(self,
394 394 path,
395 395 startDate,
396 396 endDate,
397 397 startTime=datetime.time(0,0,0),
398 398 endTime=datetime.time(23,59,59),
399 399 walk=True):
400 400
401 401 self.__setParameters(path, startDate, endDate, startTime, endTime, walk)
402 402
403 403 self.__checkPath()
404 404
405 405 self.__findDataForDates()
406 406
407 407 self.__selectDataForTimes()
408 408
409 409 for i in range(len(self.filenameList)):
410 410 print("%s" %(self.filenameList[i]))
411 411
412 412 return
413 413
414 414 def __setNextFileOffline(self):
415 415
416 416 try:
417 417 self.filename = self.filenameList[self.fileIndex]
418 418 self.amisrFilePointer = h5py.File(self.filename,'r')
419 419 self.fileIndex += 1
420 420 except:
421 421 self.flagNoMoreFiles = 1
422 422 raise schainpy.admin.SchainError('No more files to read')
423 423 return 0
424 424
425 425 self.flagIsNewFile = 1
426 426 print("Setting the file: %s"%self.filename)
427 427
428 428 return 1
429 429
430 430
431 431 def __setNextFileOnline(self):
432 432 filename = self.filenameList[0]
433 433 if self.__filename_online != None:
434 434 self.__selectDataForTimes(online=True)
435 435 filename = self.filenameList[0]
436 436 wait = 0
437 437 self.__waitForNewFile=300 ## DEBUG:
438 438 while self.__filename_online == filename:
439 439 print('waiting %d seconds to get a new file...'%(self.__waitForNewFile))
440 440 if wait == 5:
441 441 self.flagNoMoreFiles = 1
442 442 return 0
443 443 sleep(self.__waitForNewFile)
444 444 self.__selectDataForTimes(online=True)
445 445 filename = self.filenameList[0]
446 446 wait += 1
447 447
448 448 self.__filename_online = filename
449 449
450 450 self.amisrFilePointer = h5py.File(filename,'r')
451 451 self.flagIsNewFile = 1
452 452 self.filename = filename
453 453 print("Setting the file: %s"%self.filename)
454 454 return 1
455 455
456 456
457 457 def readData(self):
458 458 buffer = self.amisrFilePointer.get('Raw11/Data/Samples/Data')
459 459 re = buffer[:,:,:,0]
460 460 im = buffer[:,:,:,1]
461 461 dataset = re + im*1j
462 462
463 463 self.radacTime = self.amisrFilePointer.get('Raw11/Data/RadacHeader/RadacTime')
464 464 timeset = self.radacTime[:,0]
465 465
466 466 return dataset,timeset
467 467
468 468 def reshapeData(self):
469 469 #self.beamCodeByPulse, self.beamCode, self.nblocks, self.nprofiles, self.nsa,
470 470 channels = self.beamCodeByPulse[0,:]
471 471 nchan = self.nchannels
472 472 #self.newProfiles = self.nprofiles/nchan #must be defined on filljroheader
473 473 nblocks = self.nblocks
474 474 nsamples = self.nsa
475 475
476 476 #Dimensions : nChannels, nProfiles, nSamples
477 477 new_block = numpy.empty((nblocks, nchan, numpy.int_(self.newProfiles), nsamples), dtype="complex64")
478 478 ############################################
479 479
480 480 for thisChannel in range(nchan):
481 481 new_block[:,thisChannel,:,:] = self.dataset[:,numpy.where(channels==self.beamCode[thisChannel])[0],:]
482 482
483 483
484 484 new_block = numpy.transpose(new_block, (1,0,2,3))
485 485 new_block = numpy.reshape(new_block, (nchan,-1, nsamples))
486 486
487 487 return new_block
488 488
489 489 def updateIndexes(self):
490 490
491 491 pass
492 492
493 493 def fillJROHeader(self):
494 494
495 495 #fill radar controller header
496 496 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(ipp=self.__ippKm,
497 497 txA=self.__txA,
498 498 txB=0,
499 499 nWindows=1,
500 500 nHeights=self.__nSamples,
501 501 firstHeight=self.__firstHeight,
502 502 deltaHeight=self.__deltaHeight,
503 503 codeType=self.__codeType,
504 504 nCode=self.__nCode, nBaud=self.__nBaud,
505 505 code = self.__code,
506 506 fClock=1)
507 507 #fill system header
508 508 self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples,
509 509 nProfiles=self.newProfiles,
510 510 nChannels=len(self.__channelList),
511 511 adcResolution=14,
512 512 pciDioBusWidth=32)
513 513
514 514 self.dataOut.type = "Voltage"
515 515 self.dataOut.data = None
516 516 self.dataOut.dtype = numpy.dtype([('real','<i8'),('imag','<i8')])
517 517 # self.dataOut.nChannels = 0
518 518
519 519 # self.dataOut.nHeights = 0
520 520
521 521 self.dataOut.nProfiles = self.newProfiles*self.nblocks
522 522 #self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype = numpy.float)*self.__deltaHeigth
523 523 ranges = numpy.reshape(self.rangeFromFile[()],(-1))
524 524 self.dataOut.heightList = ranges/1000.0 #km
525 525 self.dataOut.channelList = self.__channelList
526 526 self.dataOut.blocksize = self.dataOut.nChannels * self.dataOut.nHeights
527 527
528 528 # self.dataOut.channelIndexList = None
529 529
530 530
531 531 self.dataOut.azimuthList = numpy.array(self.azimuthList)
532 532 self.dataOut.elevationList = numpy.array(self.elevationList)
533 533 self.dataOut.codeList = numpy.array(self.beamCode)
534 534 #print(self.dataOut.elevationList)
535 535 self.dataOut.flagNoData = True
536 536
537 537 #Set to TRUE if the data is discontinuous
538 538 self.dataOut.flagDiscontinuousBlock = False
539 539
540 540 self.dataOut.utctime = None
541 541
542 542 #self.dataOut.timeZone = -5 #self.__timezone/60 #timezone like jroheader, difference in minutes between UTC and localtime
543 543 if self.timezone == 'lt':
544 544 self.dataOut.timeZone = time.timezone / 60. #get the timezone in minutes
545 545 else:
546 546 self.dataOut.timeZone = 0 #by default time is UTC
547 547
548 548 self.dataOut.dstFlag = 0
549 549 self.dataOut.errorCount = 0
550 550 self.dataOut.nCohInt = 1
551 551 self.dataOut.flagDecodeData = False #asumo que la data esta decodificada
552 552 self.dataOut.flagDeflipData = False #asumo que la data esta sin flip
553 553 self.dataOut.flagShiftFFT = False
554 554 self.dataOut.ippSeconds = self.ippSeconds
555 555
556 556 #Time interval between profiles
557 557 #self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
558 558
559 559 self.dataOut.frequency = self.__frequency
560 560 self.dataOut.realtime = self.online
561 561 pass
562 562
563 563 def readNextFile(self,online=False):
564 564
565 565 if not(online):
566 566 newFile = self.__setNextFileOffline()
567 567 else:
568 568 newFile = self.__setNextFileOnline()
569 569
570 570 if not(newFile):
571 571 self.dataOut.error = True
572 572 return 0
573 573
574 574 if not self.readAMISRHeader(self.amisrFilePointer):
575 575 self.dataOut.error = True
576 576 return 0
577 577
578 578 self.createBuffers()
579 579 self.fillJROHeader()
580 580
581 581 #self.__firstFile = False
582 582
583 583
584 584
585 585 self.dataset,self.timeset = self.readData()
586 586
587 587 if self.endDate!=None:
588 588 endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
589 589 time_str = self.amisrFilePointer.get('Time/RadacTimeString')
590 590 startDateTimeStr_File = time_str[0][0].decode('UTF-8').split('.')[0]
591 591 junk = time.strptime(startDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
592 592 startDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
593 593 if self.timezone == 'lt':
594 594 startDateTime_File = startDateTime_File - datetime.timedelta(minutes = 300)
595 595 if (startDateTime_File>endDateTime_Reader):
596 596 return 0
597 597
598 598 self.jrodataset = self.reshapeData()
599 599 #----self.updateIndexes()
600 600 self.profileIndex = 0
601 601
602 602 return 1
603 603
604 604
605 605 def __hasNotDataInBuffer(self):
606 606 if self.profileIndex >= (self.newProfiles*self.nblocks):
607 607 return 1
608 608 return 0
609 609
610 610
611 611 def getData(self):
612 612
613 613 if self.flagNoMoreFiles:
614 614 self.dataOut.flagNoData = True
615 615 return 0
616 616
617 if self.__hasNotDataInBuffer():
617 if self.profileIndex >= (self.newProfiles*self.nblocks): #
618 #if self.__hasNotDataInBuffer():
618 619 if not (self.readNextFile(self.online)):
619 620 return 0
620 621
621 622
622 623 if self.dataset is None: # setear esta condicion cuando no hayan datos por leer
623 624 self.dataOut.flagNoData = True
624 625 return 0
625 626
626 627 #self.dataOut.data = numpy.reshape(self.jrodataset[self.profileIndex,:],(1,-1))
627 628
628 629 self.dataOut.data = self.jrodataset[:,self.profileIndex,:]
629 630
630 631 #print("R_t",self.timeset)
631 632
632 633 #self.dataOut.utctime = self.jrotimeset[self.profileIndex]
633 634 #verificar basic header de jro data y ver si es compatible con este valor
634 635 #self.dataOut.utctime = self.timeset + (self.profileIndex * self.ippSeconds * self.nchannels)
635 636 indexprof = numpy.mod(self.profileIndex, self.newProfiles)
636 637 indexblock = self.profileIndex/self.newProfiles
637 638 #print (indexblock, indexprof)
638 639 diffUTC = 0
639 640 t_comp = (indexprof * self.ippSeconds * self.nchannels) + diffUTC #
640 641
641 642 #print("utc :",indexblock," __ ",t_comp)
642 643 #print(numpy.shape(self.timeset))
643 644 self.dataOut.utctime = self.timeset[numpy.int_(indexblock)] + t_comp
644 645 #self.dataOut.utctime = self.timeset[self.profileIndex] + t_comp
645 646
646 647 self.dataOut.profileIndex = self.profileIndex
647 648 #print("N profile:",self.profileIndex,self.newProfiles,self.nblocks,self.dataOut.utctime)
648 649 self.dataOut.flagNoData = False
649 650 # if indexprof == 0:
650 651 # print("kamisr: ",self.dataOut.utctime)
651 652
652 653 self.profileIndex += 1
653 654
654 655 return self.dataOut.data #retorno necesario??
655 656
656 657
657 658 def run(self, **kwargs):
658 659 '''
659 660 This method will be called many times so here you should put all your code
660 661 '''
661 662 #print("running kamisr")
662 663 if not self.isConfig:
663 664 self.setup(**kwargs)
664 665 self.isConfig = True
665 666
666 667 self.getData()
@@ -1,2076 +1,2086
1 1 # Copyright (c) 2012-2020 Jicamarca Radio Observatory
2 2 # All rights reserved.
3 3 #
4 4 # Distributed under the terms of the BSD 3-clause license.
5 5 """Spectra processing Unit and operations
6 6
7 7 Here you will find the processing unit `SpectraProc` and several operations
8 8 to work with Spectra data type
9 9 """
10 10
11 11 import time
12 12 import itertools
13 13
14 14 import numpy
15 15 import math
16 16
17 17 from schainpy.model.proc.jroproc_base import ProcessingUnit, MPDecorator, Operation
18 18 from schainpy.model.data.jrodata import Spectra
19 19 from schainpy.model.data.jrodata import hildebrand_sekhon
20 20 from schainpy.model.data import _noise
21 21
22 22 from schainpy.utils import log
23 23 import matplotlib.pyplot as plt
24 24 #from scipy.optimize import curve_fit
25 25
26 26 class SpectraProc(ProcessingUnit):
27 27
28 28 def __init__(self):
29 29
30 30 ProcessingUnit.__init__(self)
31 31
32 32 self.buffer = None
33 33 self.firstdatatime = None
34 34 self.profIndex = 0
35 35 self.dataOut = Spectra()
36 36 self.id_min = None
37 37 self.id_max = None
38 38 self.setupReq = False #Agregar a todas las unidades de proc
39 39
40 40 def __updateSpecFromVoltage(self):
41 41
42 42
43 43
44 44 self.dataOut.timeZone = self.dataIn.timeZone
45 45 self.dataOut.dstFlag = self.dataIn.dstFlag
46 46 self.dataOut.errorCount = self.dataIn.errorCount
47 47 self.dataOut.useLocalTime = self.dataIn.useLocalTime
48 48 try:
49 49 self.dataOut.processingHeaderObj = self.dataIn.processingHeaderObj.copy()
50 50 except:
51 51 pass
52 52 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
53 53 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
54 54 self.dataOut.channelList = self.dataIn.channelList
55 55 self.dataOut.heightList = self.dataIn.heightList
56 56 self.dataOut.dtype = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
57 57 self.dataOut.nProfiles = self.dataOut.nFFTPoints
58 58 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
59 59 self.dataOut.utctime = self.firstdatatime
60 60 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData
61 61 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData
62 62 self.dataOut.flagShiftFFT = False
63 63 self.dataOut.nCohInt = self.dataIn.nCohInt
64 64 self.dataOut.nIncohInt = 1
65 65 self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
66 66 self.dataOut.frequency = self.dataIn.frequency
67 67 self.dataOut.realtime = self.dataIn.realtime
68 68 self.dataOut.azimuth = self.dataIn.azimuth
69 69 self.dataOut.zenith = self.dataIn.zenith
70 70 self.dataOut.codeList = self.dataIn.codeList
71 71 self.dataOut.azimuthList = self.dataIn.azimuthList
72 72 self.dataOut.elevationList = self.dataIn.elevationList
73 73
74 74
75 75 def __getFft(self):
76 76 """
77 77 Convierte valores de Voltaje a Spectra
78 78
79 79 Affected:
80 80 self.dataOut.data_spc
81 81 self.dataOut.data_cspc
82 82 self.dataOut.data_dc
83 83 self.dataOut.heightList
84 84 self.profIndex
85 85 self.buffer
86 86 self.dataOut.flagNoData
87 87 """
88 88 fft_volt = numpy.fft.fft(
89 89 self.buffer, n=self.dataOut.nFFTPoints, axis=1)
90 90 fft_volt = fft_volt.astype(numpy.dtype('complex'))
91 91 dc = fft_volt[:, 0, :]
92 92
93 93 # calculo de self-spectra
94 94 fft_volt = numpy.fft.fftshift(fft_volt, axes=(1,))
95 95 spc = fft_volt * numpy.conjugate(fft_volt)
96 96 spc = spc.real
97 97
98 98 blocksize = 0
99 99 blocksize += dc.size
100 100 blocksize += spc.size
101 101
102 102 cspc = None
103 103 pairIndex = 0
104 104 if self.dataOut.pairsList != None:
105 105 # calculo de cross-spectra
106 106 cspc = numpy.zeros(
107 107 (self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex')
108 108 for pair in self.dataOut.pairsList:
109 109 if pair[0] not in self.dataOut.channelList:
110 110 raise ValueError("Error getting CrossSpectra: pair 0 of %s is not in channelList = %s" % (
111 111 str(pair), str(self.dataOut.channelList)))
112 112 if pair[1] not in self.dataOut.channelList:
113 113 raise ValueError("Error getting CrossSpectra: pair 1 of %s is not in channelList = %s" % (
114 114 str(pair), str(self.dataOut.channelList)))
115 115
116 116 cspc[pairIndex, :, :] = fft_volt[pair[0], :, :] * \
117 117 numpy.conjugate(fft_volt[pair[1], :, :])
118 118 pairIndex += 1
119 119 blocksize += cspc.size
120 120
121 121 self.dataOut.data_spc = spc
122 122 self.dataOut.data_cspc = cspc
123 123 self.dataOut.data_dc = dc
124 124 self.dataOut.blockSize = blocksize
125 125 self.dataOut.flagShiftFFT = False
126 126
127 127 def run(self, nProfiles=None, nFFTPoints=None, pairsList=None, ippFactor=None, shift_fft=False):
128 128 #print("run spc proc")
129 129 try:
130 130 type = self.dataIn.type.decode("utf-8")
131 131 self.dataIn.type = type
132 132 except:
133 133 pass
134 134 if self.dataIn.type == "Spectra":
135 135
136 136 try:
137 137 self.dataOut.copy(self.dataIn)
138 138
139 139 except Exception as e:
140 140 print("Error dataIn ",e)
141 141
142 142 if shift_fft:
143 143 #desplaza a la derecha en el eje 2 determinadas posiciones
144 144 shift = int(self.dataOut.nFFTPoints/2)
145 145 self.dataOut.data_spc = numpy.roll(self.dataOut.data_spc, shift , axis=1)
146 146
147 147 if self.dataOut.data_cspc is not None:
148 148 #desplaza a la derecha en el eje 2 determinadas posiciones
149 149 self.dataOut.data_cspc = numpy.roll(self.dataOut.data_cspc, shift, axis=1)
150 150 if pairsList:
151 151 self.__selectPairs(pairsList)
152 152
153 153
154 154 elif self.dataIn.type == "Voltage":
155 155
156 156 self.dataOut.flagNoData = True
157 157
158 158 if nFFTPoints == None:
159 159 raise ValueError("This SpectraProc.run() need nFFTPoints input variable")
160 160
161 161 if nProfiles == None:
162 162 nProfiles = nFFTPoints
163 163
164 164 if ippFactor == None:
165 165 self.dataOut.ippFactor = 1
166 166
167 167 self.dataOut.nFFTPoints = nFFTPoints
168 168 #print(" volts ch,prof, h: ", self.dataIn.data.shape)
169 169 if self.buffer is None:
170 170 self.buffer = numpy.zeros((self.dataIn.nChannels,
171 171 nProfiles,
172 172 self.dataIn.nHeights),
173 173 dtype='complex')
174 174
175 175 if self.dataIn.flagDataAsBlock:
176 176 nVoltProfiles = self.dataIn.data.shape[1]
177 177
178 178 if nVoltProfiles == nProfiles:
179 179 self.buffer = self.dataIn.data.copy()
180 180 self.profIndex = nVoltProfiles
181 181
182 182 elif nVoltProfiles < nProfiles:
183 183
184 184 if self.profIndex == 0:
185 185 self.id_min = 0
186 186 self.id_max = nVoltProfiles
187 187
188 188 self.buffer[:, self.id_min:self.id_max,
189 189 :] = self.dataIn.data
190 190 self.profIndex += nVoltProfiles
191 191 self.id_min += nVoltProfiles
192 192 self.id_max += nVoltProfiles
193 193 else:
194 194 raise ValueError("The type object %s has %d profiles, it should just has %d profiles" % (
195 195 self.dataIn.type, self.dataIn.data.shape[1], nProfiles))
196 196 self.dataOut.flagNoData = True
197 197 else:
198 198 self.buffer[:, self.profIndex, :] = self.dataIn.data.copy()
199 199 self.profIndex += 1
200 200
201 201 if self.firstdatatime == None:
202 202 self.firstdatatime = self.dataIn.utctime
203 203
204 204 if self.profIndex == nProfiles:
205 205
206 206 self.__updateSpecFromVoltage()
207 207
208 208 if pairsList == None:
209 209 self.dataOut.pairsList = [pair for pair in itertools.combinations(self.dataOut.channelList, 2)]
210 210 else:
211 211 self.dataOut.pairsList = pairsList
212 212 self.__getFft()
213 213 self.dataOut.flagNoData = False
214 214 self.firstdatatime = None
215 215 self.profIndex = 0
216 216
217 217 elif self.dataIn.type == "Parameters":
218 218
219 219 self.dataOut.data_spc = self.dataIn.data_spc
220 220 self.dataOut.data_cspc = self.dataIn.data_cspc
221 221 self.dataOut.data_outlier = self.dataIn.data_outlier
222 222 self.dataOut.nProfiles = self.dataIn.nProfiles
223 223 self.dataOut.nIncohInt = self.dataIn.nIncohInt
224 224 self.dataOut.nFFTPoints = self.dataIn.nFFTPoints
225 225 self.dataOut.ippFactor = self.dataIn.ippFactor
226 226 self.dataOut.max_nIncohInt = self.dataIn.max_nIncohInt
227 227 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
228 228 self.dataOut.ipp = self.dataIn.ipp
229 229 #self.dataOut.abscissaList = self.dataIn.getVelRange(1)
230 230 #self.dataOut.spc_noise = self.dataIn.getNoise()
231 231 #self.dataOut.spc_range = (self.dataIn.getFreqRange(1) , self.dataIn.getAcfRange(1) , self.dataIn.getVelRange(1))
232 232 # self.dataOut.normFactor = self.dataIn.normFactor
233 233 if hasattr(self.dataIn, 'channelList'):
234 234 self.dataOut.channelList = self.dataIn.channelList
235 235 if hasattr(self.dataIn, 'pairsList'):
236 236 self.dataOut.pairsList = self.dataIn.pairsList
237 237 self.dataOut.groupList = self.dataIn.pairsList
238 238
239 239 self.dataOut.flagNoData = False
240 240
241 241 if hasattr(self.dataIn, 'ChanDist'): #Distances of receiver channels
242 242 self.dataOut.ChanDist = self.dataIn.ChanDist
243 243 else: self.dataOut.ChanDist = None
244 244
245 245 #if hasattr(self.dataIn, 'VelRange'): #Velocities range
246 246 # self.dataOut.VelRange = self.dataIn.VelRange
247 247 #else: self.dataOut.VelRange = None
248 248
249 249
250 250
251 251 else:
252 252 raise ValueError("The type of input object {} is not valid".format(
253 253 self.dataIn.type))
254 254
255 255
256 256 def __selectPairs(self, pairsList):
257 257
258 258 if not pairsList:
259 259 return
260 260
261 261 pairs = []
262 262 pairsIndex = []
263 263
264 264 for pair in pairsList:
265 265 if pair[0] not in self.dataOut.channelList or pair[1] not in self.dataOut.channelList:
266 266 continue
267 267 pairs.append(pair)
268 268 pairsIndex.append(pairs.index(pair))
269 269
270 270 self.dataOut.data_cspc = self.dataOut.data_cspc[pairsIndex]
271 271 self.dataOut.pairsList = pairs
272 272
273 273 return
274 274
275 275 def selectFFTs(self, minFFT, maxFFT ):
276 276 """
277 277 Selecciona un bloque de datos en base a un grupo de valores de puntos FFTs segun el rango
278 278 minFFT<= FFT <= maxFFT
279 279 """
280 280
281 281 if (minFFT > maxFFT):
282 282 raise ValueError("Error selecting heights: Height range (%d,%d) is not valid" % (minFFT, maxFFT))
283 283
284 284 if (minFFT < self.dataOut.getFreqRange()[0]):
285 285 minFFT = self.dataOut.getFreqRange()[0]
286 286
287 287 if (maxFFT > self.dataOut.getFreqRange()[-1]):
288 288 maxFFT = self.dataOut.getFreqRange()[-1]
289 289
290 290 minIndex = 0
291 291 maxIndex = 0
292 292 FFTs = self.dataOut.getFreqRange()
293 293
294 294 inda = numpy.where(FFTs >= minFFT)
295 295 indb = numpy.where(FFTs <= maxFFT)
296 296
297 297 try:
298 298 minIndex = inda[0][0]
299 299 except:
300 300 minIndex = 0
301 301
302 302 try:
303 303 maxIndex = indb[0][-1]
304 304 except:
305 305 maxIndex = len(FFTs)
306 306
307 307 self.selectFFTsByIndex(minIndex, maxIndex)
308 308
309 309 return 1
310 310
311 311 def getBeaconSignal(self, tauindex=0, channelindex=0, hei_ref=None):
312 312 newheis = numpy.where(
313 313 self.dataOut.heightList > self.dataOut.radarControllerHeaderObj.Taus[tauindex])
314 314
315 315 if hei_ref != None:
316 316 newheis = numpy.where(self.dataOut.heightList > hei_ref)
317 317
318 318 minIndex = min(newheis[0])
319 319 maxIndex = max(newheis[0])
320 320 data_spc = self.dataOut.data_spc[:, :, minIndex:maxIndex + 1]
321 321 heightList = self.dataOut.heightList[minIndex:maxIndex + 1]
322 322
323 323 # determina indices
324 324 nheis = int(self.dataOut.radarControllerHeaderObj.txB /
325 325 (self.dataOut.heightList[1] - self.dataOut.heightList[0]))
326 326 avg_dB = 10 * \
327 327 numpy.log10(numpy.sum(data_spc[channelindex, :, :], axis=0))
328 328 beacon_dB = numpy.sort(avg_dB)[-nheis:]
329 329 beacon_heiIndexList = []
330 330 for val in avg_dB.tolist():
331 331 if val >= beacon_dB[0]:
332 332 beacon_heiIndexList.append(avg_dB.tolist().index(val))
333 333
334 334 #data_spc = data_spc[:,:,beacon_heiIndexList]
335 335 data_cspc = None
336 336 if self.dataOut.data_cspc is not None:
337 337 data_cspc = self.dataOut.data_cspc[:, :, minIndex:maxIndex + 1]
338 338 #data_cspc = data_cspc[:,:,beacon_heiIndexList]
339 339
340 340 data_dc = None
341 341 if self.dataOut.data_dc is not None:
342 342 data_dc = self.dataOut.data_dc[:, minIndex:maxIndex + 1]
343 343 #data_dc = data_dc[:,beacon_heiIndexList]
344 344
345 345 self.dataOut.data_spc = data_spc
346 346 self.dataOut.data_cspc = data_cspc
347 347 self.dataOut.data_dc = data_dc
348 348 self.dataOut.heightList = heightList
349 349 self.dataOut.beacon_heiIndexList = beacon_heiIndexList
350 350
351 351 return 1
352 352
353 353 def selectFFTsByIndex(self, minIndex, maxIndex):
354 354 """
355 355
356 356 """
357 357
358 358 if (minIndex < 0) or (minIndex > maxIndex):
359 359 raise ValueError("Error selecting heights: Index range (%d,%d) is not valid" % (minIndex, maxIndex))
360 360
361 361 if (maxIndex >= self.dataOut.nProfiles):
362 362 maxIndex = self.dataOut.nProfiles-1
363 363
364 364 #Spectra
365 365 data_spc = self.dataOut.data_spc[:,minIndex:maxIndex+1,:]
366 366
367 367 data_cspc = None
368 368 if self.dataOut.data_cspc is not None:
369 369 data_cspc = self.dataOut.data_cspc[:,minIndex:maxIndex+1,:]
370 370
371 371 data_dc = None
372 372 if self.dataOut.data_dc is not None:
373 373 data_dc = self.dataOut.data_dc[minIndex:maxIndex+1,:]
374 374
375 375 self.dataOut.data_spc = data_spc
376 376 self.dataOut.data_cspc = data_cspc
377 377 self.dataOut.data_dc = data_dc
378 378
379 379 self.dataOut.ippSeconds = self.dataOut.ippSeconds*(self.dataOut.nFFTPoints / numpy.shape(data_cspc)[1])
380 380 self.dataOut.nFFTPoints = numpy.shape(data_cspc)[1]
381 381 self.dataOut.profilesPerBlock = numpy.shape(data_cspc)[1]
382 382
383 383 return 1
384 384
385 385 def getNoise(self, minHei=None, maxHei=None, minVel=None, maxVel=None):
386 386 # validacion de rango
387 387 if minHei == None:
388 388 minHei = self.dataOut.heightList[0]
389 389
390 390 if maxHei == None:
391 391 maxHei = self.dataOut.heightList[-1]
392 392
393 393 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
394 394 print('minHei: %.2f is out of the heights range' % (minHei))
395 395 print('minHei is setting to %.2f' % (self.dataOut.heightList[0]))
396 396 minHei = self.dataOut.heightList[0]
397 397
398 398 if (maxHei > self.dataOut.heightList[-1]) or (maxHei < minHei):
399 399 print('maxHei: %.2f is out of the heights range' % (maxHei))
400 400 print('maxHei is setting to %.2f' % (self.dataOut.heightList[-1]))
401 401 maxHei = self.dataOut.heightList[-1]
402 402
403 403 # validacion de velocidades
404 404 velrange = self.dataOut.getVelRange(1)
405 405
406 406 if minVel == None:
407 407 minVel = velrange[0]
408 408
409 409 if maxVel == None:
410 410 maxVel = velrange[-1]
411 411
412 412 if (minVel < velrange[0]) or (minVel > maxVel):
413 413 print('minVel: %.2f is out of the velocity range' % (minVel))
414 414 print('minVel is setting to %.2f' % (velrange[0]))
415 415 minVel = velrange[0]
416 416
417 417 if (maxVel > velrange[-1]) or (maxVel < minVel):
418 418 print('maxVel: %.2f is out of the velocity range' % (maxVel))
419 419 print('maxVel is setting to %.2f' % (velrange[-1]))
420 420 maxVel = velrange[-1]
421 421
422 422 # seleccion de indices para rango
423 423 minIndex = 0
424 424 maxIndex = 0
425 425 heights = self.dataOut.heightList
426 426
427 427 inda = numpy.where(heights >= minHei)
428 428 indb = numpy.where(heights <= maxHei)
429 429
430 430 try:
431 431 minIndex = inda[0][0]
432 432 except:
433 433 minIndex = 0
434 434
435 435 try:
436 436 maxIndex = indb[0][-1]
437 437 except:
438 438 maxIndex = len(heights)
439 439
440 440 if (minIndex < 0) or (minIndex > maxIndex):
441 441 raise ValueError("some value in (%d,%d) is not valid" % (
442 442 minIndex, maxIndex))
443 443
444 444 if (maxIndex >= self.dataOut.nHeights):
445 445 maxIndex = self.dataOut.nHeights - 1
446 446
447 447 # seleccion de indices para velocidades
448 448 indminvel = numpy.where(velrange >= minVel)
449 449 indmaxvel = numpy.where(velrange <= maxVel)
450 450 try:
451 451 minIndexVel = indminvel[0][0]
452 452 except:
453 453 minIndexVel = 0
454 454
455 455 try:
456 456 maxIndexVel = indmaxvel[0][-1]
457 457 except:
458 458 maxIndexVel = len(velrange)
459 459
460 460 # seleccion del espectro
461 461 data_spc = self.dataOut.data_spc[:,
462 462 minIndexVel:maxIndexVel + 1, minIndex:maxIndex + 1]
463 463 # estimacion de ruido
464 464 noise = numpy.zeros(self.dataOut.nChannels)
465 465
466 466 for channel in range(self.dataOut.nChannels):
467 467 daux = data_spc[channel, :, :]
468 468 sortdata = numpy.sort(daux, axis=None)
469 469 noise[channel] = hildebrand_sekhon(sortdata, self.dataOut.nIncohInt)
470 470
471 471 self.dataOut.noise_estimation = noise.copy()
472 472
473 473 return 1
474 474
475 475 class removeDC(Operation):
476 476
477 477 def run(self, dataOut, mode=2):
478 478 self.dataOut = dataOut
479 479 jspectra = self.dataOut.data_spc
480 480 jcspectra = self.dataOut.data_cspc
481 481
482 482 num_chan = jspectra.shape[0]
483 483 num_hei = jspectra.shape[2]
484 484
485 485 if jcspectra is not None:
486 486 jcspectraExist = True
487 487 num_pairs = jcspectra.shape[0]
488 488 else:
489 489 jcspectraExist = False
490 490
491 491 freq_dc = int(jspectra.shape[1] / 2)
492 492 ind_vel = numpy.array([-2, -1, 1, 2]) + freq_dc
493 493 ind_vel = ind_vel.astype(int)
494 494
495 495 if ind_vel[0] < 0:
496 496 ind_vel[list(range(0, 1))] = ind_vel[list(range(0, 1))] + self.num_prof
497 497
498 498 if mode == 1:
499 499 jspectra[:, freq_dc, :] = (
500 500 jspectra[:, ind_vel[1], :] + jspectra[:, ind_vel[2], :]) / 2 # CORRECCION
501 501
502 502 if jcspectraExist:
503 503 jcspectra[:, freq_dc, :] = (
504 504 jcspectra[:, ind_vel[1], :] + jcspectra[:, ind_vel[2], :]) / 2
505 505
506 506 if mode == 2:
507 507
508 508 vel = numpy.array([-2, -1, 1, 2])
509 509 xx = numpy.zeros([4, 4])
510 510
511 511 for fil in range(4):
512 512 xx[fil, :] = vel[fil]**numpy.asarray(list(range(4)))
513 513
514 514 xx_inv = numpy.linalg.inv(xx)
515 515 xx_aux = xx_inv[0, :]
516 516
517 517 for ich in range(num_chan):
518 518 yy = jspectra[ich, ind_vel, :]
519 519 jspectra[ich, freq_dc, :] = numpy.dot(xx_aux, yy)
520 520
521 521 junkid = jspectra[ich, freq_dc, :] <= 0
522 522 cjunkid = sum(junkid)
523 523
524 524 if cjunkid.any():
525 525 jspectra[ich, freq_dc, junkid.nonzero()] = (
526 526 jspectra[ich, ind_vel[1], junkid] + jspectra[ich, ind_vel[2], junkid]) / 2
527 527
528 528 if jcspectraExist:
529 529 for ip in range(num_pairs):
530 530 yy = jcspectra[ip, ind_vel, :]
531 531 jcspectra[ip, freq_dc, :] = numpy.dot(xx_aux, yy)
532 532
533 533 self.dataOut.data_spc = jspectra
534 534 self.dataOut.data_cspc = jcspectra
535 535
536 536 return self.dataOut
537 537
538 538 class getNoiseB(Operation):
539 539
540 540 __slots__ =('offset','warnings', 'isConfig', 'minIndex','maxIndex','minIndexFFT','maxIndexFFT')
541 541 def __init__(self):
542 542
543 543 Operation.__init__(self)
544 544 self.isConfig = False
545 545
546 546 def setup(self, offset=None, minHei=None, maxHei=None,minVel=None, maxVel=None, minFreq= None, maxFreq=None, warnings=False):
547 547
548 548 self.warnings = warnings
549 549 if minHei == None:
550 550 minHei = self.dataOut.heightList[0]
551 551
552 552 if maxHei == None:
553 553 maxHei = self.dataOut.heightList[-1]
554 554
555 555 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
556 556 if self.warnings:
557 557 print('minHei: %.2f is out of the heights range' % (minHei))
558 558 print('minHei is setting to %.2f' % (self.dataOut.heightList[0]))
559 559 minHei = self.dataOut.heightList[0]
560 560
561 561 if (maxHei > self.dataOut.heightList[-1]) or (maxHei < minHei):
562 562 if self.warnings:
563 563 print('maxHei: %.2f is out of the heights range' % (maxHei))
564 564 print('maxHei is setting to %.2f' % (self.dataOut.heightList[-1]))
565 565 maxHei = self.dataOut.heightList[-1]
566 566
567 567
568 568 #indices relativos a los puntos de fft, puede ser de acuerdo a velocidad o frecuencia
569 569 minIndexFFT = 0
570 570 maxIndexFFT = 0
571 571 # validacion de velocidades
572 572 indminPoint = None
573 573 indmaxPoint = None
574 574 if self.dataOut.type == 'Spectra':
575 575 if minVel == None and maxVel == None :
576 576
577 577 freqrange = self.dataOut.getFreqRange(1)
578 578
579 579 if minFreq == None:
580 580 minFreq = freqrange[0]
581 581
582 582 if maxFreq == None:
583 583 maxFreq = freqrange[-1]
584 584
585 585 if (minFreq < freqrange[0]) or (minFreq > maxFreq):
586 586 if self.warnings:
587 587 print('minFreq: %.2f is out of the frequency range' % (minFreq))
588 588 print('minFreq is setting to %.2f' % (freqrange[0]))
589 589 minFreq = freqrange[0]
590 590
591 591 if (maxFreq > freqrange[-1]) or (maxFreq < minFreq):
592 592 if self.warnings:
593 593 print('maxFreq: %.2f is out of the frequency range' % (maxFreq))
594 594 print('maxFreq is setting to %.2f' % (freqrange[-1]))
595 595 maxFreq = freqrange[-1]
596 596
597 597 indminPoint = numpy.where(freqrange >= minFreq)
598 598 indmaxPoint = numpy.where(freqrange <= maxFreq)
599 599
600 600 else:
601 601
602 602 velrange = self.dataOut.getVelRange(1)
603 603
604 604 if minVel == None:
605 605 minVel = velrange[0]
606 606
607 607 if maxVel == None:
608 608 maxVel = velrange[-1]
609 609
610 610 if (minVel < velrange[0]) or (minVel > maxVel):
611 611 if self.warnings:
612 612 print('minVel: %.2f is out of the velocity range' % (minVel))
613 613 print('minVel is setting to %.2f' % (velrange[0]))
614 614 minVel = velrange[0]
615 615
616 616 if (maxVel > velrange[-1]) or (maxVel < minVel):
617 617 if self.warnings:
618 618 print('maxVel: %.2f is out of the velocity range' % (maxVel))
619 619 print('maxVel is setting to %.2f' % (velrange[-1]))
620 620 maxVel = velrange[-1]
621 621
622 622 indminPoint = numpy.where(velrange >= minVel)
623 623 indmaxPoint = numpy.where(velrange <= maxVel)
624 624
625 625
626 626 # seleccion de indices para rango
627 627 minIndex = 0
628 628 maxIndex = 0
629 629 heights = self.dataOut.heightList
630 630
631 631 inda = numpy.where(heights >= minHei)
632 632 indb = numpy.where(heights <= maxHei)
633 633
634 634 try:
635 635 minIndex = inda[0][0]
636 636 except:
637 637 minIndex = 0
638 638
639 639 try:
640 640 maxIndex = indb[0][-1]
641 641 except:
642 642 maxIndex = len(heights)
643 643
644 644 if (minIndex < 0) or (minIndex > maxIndex):
645 645 raise ValueError("some value in (%d,%d) is not valid" % (
646 646 minIndex, maxIndex))
647 647
648 648 if (maxIndex >= self.dataOut.nHeights):
649 649 maxIndex = self.dataOut.nHeights - 1
650 650 #############################################################3
651 651 # seleccion de indices para velocidades
652 652 if self.dataOut.type == 'Spectra':
653 653 try:
654 654 minIndexFFT = indminPoint[0][0]
655 655 except:
656 656 minIndexFFT = 0
657 657
658 658 try:
659 659 maxIndexFFT = indmaxPoint[0][-1]
660 660 except:
661 661 maxIndexFFT = len( self.dataOut.getFreqRange(1))
662 662
663 663 self.minIndex, self.maxIndex, self.minIndexFFT, self.maxIndexFFT = minIndex, maxIndex, minIndexFFT, maxIndexFFT
664 664 self.isConfig = True
665 665 if offset!=None:
666 666 self.offset = 10**(offset/10)
667 #print("config getNoise Done")
667 #print("config getNoiseB Done")
668 668
669 669 def run(self, dataOut, offset=None, minHei=None, maxHei=None,minVel=None, maxVel=None, minFreq= None, maxFreq=None, warnings=False):
670 670 self.dataOut = dataOut
671 671
672 672 if not self.isConfig:
673 673 self.setup(offset, minHei, maxHei,minVel, maxVel, minFreq, maxFreq, warnings)
674 674
675 675 self.dataOut.noise_estimation = None
676 676 noise = None
677 677 if self.dataOut.type == 'Voltage':
678 678 noise = self.dataOut.getNoise(ymin_index=self.minIndex, ymax_index=self.maxIndex)
679 679 #print(minIndex, maxIndex,minIndexVel, maxIndexVel)
680 680 elif self.dataOut.type == 'Spectra':
681
681 #print(self.minIndex, self.maxIndex,self.minIndexFFT, self.maxIndexFFT, self.dataOut.nIncohInt)
682 682 noise = numpy.zeros( self.dataOut.nChannels)
683 norm = 1
683 684 for channel in range( self.dataOut.nChannels):
685 if not hasattr(self.dataOut.nIncohInt,'__len__'):
686 norm = 1
687 else:
684 688 norm = self.dataOut.max_nIncohInt/self.dataOut.nIncohInt[channel, self.minIndex:self.maxIndex]
685 #print("norm nIncoh: ", norm )
689 #print("norm nIncoh: ", norm ,self.dataOut.data_spc.shape)
686 690 daux = self.dataOut.data_spc[channel,self.minIndexFFT:self.maxIndexFFT, self.minIndex:self.maxIndex]
687 691 daux = numpy.multiply(daux, norm)
688 692 #print("offset: ", self.offset, 10*numpy.log10(self.offset))
689 693 #noise[channel] = self.getNoiseByMean(daux)/self.offset
694 #print(daux.shape, daux)
690 695 noise[channel] = self.getNoiseByHS(daux, self.dataOut.max_nIncohInt)/self.offset
691 696
697 # data = numpy.mean(daux,axis=1)
698 # sortdata = numpy.sort(data, axis=None)
699 # noise[channel] = _noise.hildebrand_sekhon(sortdata, self.dataOut.max_nIncohInt)/self.offset
700
692 701 #noise = self.dataOut.getNoise(xmin_index=self.minIndexFFT, xmax_index=self.maxIndexFFT, ymin_index=self.minIndex, ymax_index=self.maxIndex)
693 702 else:
694 703 noise = self.dataOut.getNoise(xmin_index=self.minIndexFFT, xmax_index=self.maxIndexFFT, ymin_index=self.minIndex, ymax_index=self.maxIndex)
695 704 self.dataOut.noise_estimation = noise.copy() # dataOut.noise
696 705 #print("2: ",10*numpy.log10(self.dataOut.noise_estimation/64))
697 706
698 707 #print(self.dataOut.flagNoData)
708 print("getNoise Done")
699 709 return self.dataOut
700 710
701 711 def getNoiseByMean(self,data):
702 712 #data debe estar ordenado
703 713 data = numpy.mean(data,axis=1)
704 714 sortdata = numpy.sort(data, axis=None)
705 715 #sortID=data.argsort()
706 716 #print(data.shape)
707 717
708 718 pnoise = None
709 719 j = 0
710 720
711 721 mean = numpy.mean(sortdata)
712 722 min = numpy.min(sortdata)
713 723 delta = mean - min
714 724 indexes = numpy.where(sortdata > (mean+delta))[0] #only array of indexes
715 725 #print(len(indexes))
716 726 if len(indexes)==0:
717 727 pnoise = numpy.mean(sortdata)
718 728 else:
719 729 j = indexes[0]
720 730 pnoise = numpy.mean(sortdata[0:j])
721 731
722 732 # from matplotlib import pyplot as plt
723 733 # plt.plot(sortdata)
724 734 # plt.vlines(j,(pnoise-delta),(pnoise+delta), color='r')
725 735 # plt.show()
726 736 #print("noise: ", 10*numpy.log10(pnoise))
727 737 return pnoise
728 738
729 739 def getNoiseByHS(self,data, navg):
730 740 #data debe estar ordenado
731 741 #data = numpy.mean(data,axis=1)
732 742 sortdata = numpy.sort(data, axis=None)
733 743
734 744 lenOfData = len(sortdata)
735 745 nums_min = lenOfData*0.05
736 746
737 747 if nums_min <= 5:
738 748
739 749 nums_min = 5
740 750
741 751 sump = 0.
742 752 sumq = 0.
743 753
744 754 j = 0
745 755 cont = 1
746 756
747 757 while((cont == 1)and(j < lenOfData)):
748 758
749 759 sump += sortdata[j]
750 760 sumq += sortdata[j]**2
751 761 #sumq -= sump**2
752 762 if j > nums_min:
753 763 rtest = float(j)/(j-1) + 1.0/0.1
754 764 #if ((sumq*j) > (sump**2)):
755 765 if ((sumq*j) > (rtest*sump**2)):
756 766 j = j - 1
757 767 sump = sump - sortdata[j]
758 768 sumq = sumq - sortdata[j]**2
759 769 cont = 0
760 770
761 771 j += 1
762 772
763 773 lnoise = sump / j
764 774
765 775 return lnoise
766 776
767 777
768 778
769 779 def fit_func( x, a0, a1, a2): #, a3, a4, a5):
770 780 z = (x - a1) / a2
771 781 y = a0 * numpy.exp(-z**2 / a2) #+ a3 + a4 * x + a5 * x**2
772 782 return y
773 783
774 784
775 785 class CleanRayleigh(Operation):
776 786
777 787 def __init__(self):
778 788
779 789 Operation.__init__(self)
780 790 self.i=0
781 791 self.isConfig = False
782 792 self.__dataReady = False
783 793 self.__profIndex = 0
784 794 self.byTime = False
785 795 self.byProfiles = False
786 796
787 797 self.bloques = None
788 798 self.bloque0 = None
789 799
790 800 self.index = 0
791 801
792 802 self.buffer = 0
793 803 self.buffer2 = 0
794 804 self.buffer3 = 0
795 805
796 806
797 807 def setup(self,dataOut,min_hei,max_hei,n, timeInterval,factor_stdv):
798 808
799 809 self.nChannels = dataOut.nChannels
800 810 self.nProf = dataOut.nProfiles
801 811 self.nPairs = dataOut.data_cspc.shape[0]
802 812 self.pairsArray = numpy.array(dataOut.pairsList)
803 813 self.spectra = dataOut.data_spc
804 814 self.cspectra = dataOut.data_cspc
805 815 self.heights = dataOut.heightList #alturas totales
806 816 self.nHeights = len(self.heights)
807 817 self.min_hei = min_hei
808 818 self.max_hei = max_hei
809 819 if (self.min_hei == None):
810 820 self.min_hei = 0
811 821 if (self.max_hei == None):
812 822 self.max_hei = dataOut.heightList[-1]
813 823 self.hval = ((self.max_hei>=self.heights) & (self.heights >= self.min_hei)).nonzero()
814 824 self.heightsClean = self.heights[self.hval] #alturas filtradas
815 825 self.hval = self.hval[0] # forma (N,), an solo N elementos -> Indices de alturas
816 826 self.nHeightsClean = len(self.heightsClean)
817 827 self.channels = dataOut.channelList
818 828 self.nChan = len(self.channels)
819 829 self.nIncohInt = dataOut.nIncohInt
820 830 self.__initime = dataOut.utctime
821 831 self.maxAltInd = self.hval[-1]+1
822 832 self.minAltInd = self.hval[0]
823 833
824 834 self.crosspairs = dataOut.pairsList
825 835 self.nPairs = len(self.crosspairs)
826 836 self.normFactor = dataOut.normFactor
827 837 self.nFFTPoints = dataOut.nFFTPoints
828 838 self.ippSeconds = dataOut.ippSeconds
829 839 self.currentTime = self.__initime
830 840 self.pairsArray = numpy.array(dataOut.pairsList)
831 841 self.factor_stdv = factor_stdv
832 842
833 843 if n != None :
834 844 self.byProfiles = True
835 845 self.nIntProfiles = n
836 846 else:
837 847 self.__integrationtime = timeInterval
838 848
839 849 self.__dataReady = False
840 850 self.isConfig = True
841 851
842 852
843 853
844 854 def run(self, dataOut,min_hei=None,max_hei=None, n=None, timeInterval=10,factor_stdv=2.5):
845 855 #print("runing cleanRayleigh")
846 856 if not self.isConfig :
847 857
848 858 self.setup(dataOut, min_hei,max_hei,n,timeInterval,factor_stdv)
849 859
850 860 tini=dataOut.utctime
851 861
852 862 if self.byProfiles:
853 863 if self.__profIndex == self.nIntProfiles:
854 864 self.__dataReady = True
855 865 else:
856 866 if (tini - self.__initime) >= self.__integrationtime:
857 867
858 868 self.__dataReady = True
859 869 self.__initime = tini
860 870
861 871 #if (tini.tm_min % 2) == 0 and (tini.tm_sec < 5 and self.fint==0):
862 872
863 873 if self.__dataReady:
864 874
865 875 self.__profIndex = 0
866 876 jspc = self.buffer
867 877 jcspc = self.buffer2
868 878 #jnoise = self.buffer3
869 879 self.buffer = dataOut.data_spc
870 880 self.buffer2 = dataOut.data_cspc
871 881 #self.buffer3 = dataOut.noise
872 882 self.currentTime = dataOut.utctime
873 883 if numpy.any(jspc) :
874 884 #print( jspc.shape, jcspc.shape)
875 885 jspc = numpy.reshape(jspc,(int(len(jspc)/self.nChannels),self.nChannels,self.nFFTPoints,self.nHeights))
876 886 try:
877 887 jcspc= numpy.reshape(jcspc,(int(len(jcspc)/self.nPairs),self.nPairs,self.nFFTPoints,self.nHeights))
878 888 except:
879 889 print("no cspc")
880 890 self.__dataReady = False
881 891 #print( jspc.shape, jcspc.shape)
882 892 dataOut.flagNoData = False
883 893 else:
884 894 dataOut.flagNoData = True
885 895 self.__dataReady = False
886 896 return dataOut
887 897 else:
888 898 #print( len(self.buffer))
889 899 if numpy.any(self.buffer):
890 900 self.buffer = numpy.concatenate((self.buffer,dataOut.data_spc), axis=0)
891 901 try:
892 902 self.buffer2 = numpy.concatenate((self.buffer2,dataOut.data_cspc), axis=0)
893 903 self.buffer3 += dataOut.data_dc
894 904 except:
895 905 pass
896 906 else:
897 907 self.buffer = dataOut.data_spc
898 908 self.buffer2 = dataOut.data_cspc
899 909 self.buffer3 = dataOut.data_dc
900 910 #print self.index, self.fint
901 911 #print self.buffer2.shape
902 912 dataOut.flagNoData = True ## NOTE: ?? revisar LUEGO
903 913 self.__profIndex += 1
904 914 return dataOut ## NOTE: REV
905 915
906 916
907 917 #index = tini.tm_hour*12+tini.tm_min/5
908 918 '''
909 919 #REVISAR
910 920 '''
911 921 # jspc = jspc/self.nFFTPoints/self.normFactor
912 922 # jcspc = jcspc/self.nFFTPoints/self.normFactor
913 923
914 924
915 925
916 926 tmp_spectra,tmp_cspectra = self.cleanRayleigh(dataOut,jspc,jcspc,self.factor_stdv)
917 927 dataOut.data_spc = tmp_spectra
918 928 dataOut.data_cspc = tmp_cspectra
919 929
920 930 #dataOut.data_spc,dataOut.data_cspc = self.cleanRayleigh(dataOut,jspc,jcspc,self.factor_stdv)
921 931
922 932 dataOut.data_dc = self.buffer3
923 933 dataOut.nIncohInt *= self.nIntProfiles
924 934 dataOut.max_nIncohInt = self.nIntProfiles
925 935 dataOut.utctime = self.currentTime #tiempo promediado
926 936 #print("Time: ",time.localtime(dataOut.utctime))
927 937 # dataOut.data_spc = sat_spectra
928 938 # dataOut.data_cspc = sat_cspectra
929 939 self.buffer = 0
930 940 self.buffer2 = 0
931 941 self.buffer3 = 0
932 942
933 943 return dataOut
934 944
935 945 def cleanRayleigh(self,dataOut,spectra,cspectra,factor_stdv):
936 946 print("OP cleanRayleigh")
937 947 #import matplotlib.pyplot as plt
938 948 #for k in range(149):
939 949 #channelsProcssd = []
940 950 #channelA_ok = False
941 951 #rfunc = cspectra.copy() #self.bloques
942 952 rfunc = spectra.copy()
943 953 #rfunc = cspectra
944 954 #val_spc = spectra*0.0 #self.bloque0*0.0
945 955 #val_cspc = cspectra*0.0 #self.bloques*0.0
946 956 #in_sat_spectra = spectra.copy() #self.bloque0
947 957 #in_sat_cspectra = cspectra.copy() #self.bloques
948 958
949 959
950 960 ###ONLY FOR TEST:
951 961 raxs = math.ceil(math.sqrt(self.nPairs))
952 962 if raxs == 0:
953 963 raxs = 1
954 964 caxs = math.ceil(self.nPairs/raxs)
955 965 if self.nPairs <4:
956 966 raxs = 2
957 967 caxs = 2
958 968 #print(raxs, caxs)
959 969 fft_rev = 14 #nFFT to plot
960 970 hei_rev = ((self.heights >= 550) & (self.heights <= 551)).nonzero() #hei to plot
961 971 hei_rev = hei_rev[0]
962 972 #print(hei_rev)
963 973
964 974 #print numpy.absolute(rfunc[:,0,0,14])
965 975
966 976 gauss_fit, covariance = None, None
967 977 for ih in range(self.minAltInd,self.maxAltInd):
968 978 for ifreq in range(self.nFFTPoints):
969 979 '''
970 980 ###ONLY FOR TEST:
971 981 if ifreq ==fft_rev and ih==hei_rev: #TO VIEW A SIGNLE FREQUENCY
972 982 fig, axs = plt.subplots(raxs, caxs)
973 983 fig2, axs2 = plt.subplots(raxs, caxs)
974 984 col_ax = 0
975 985 row_ax = 0
976 986 '''
977 987 #print(self.nPairs)
978 988 for ii in range(self.nChan): #PARES DE CANALES SELF y CROSS
979 989 # if self.crosspairs[ii][1]-self.crosspairs[ii][0] > 1: # APLICAR SOLO EN PARES CONTIGUOS
980 990 # continue
981 991 # if not self.crosspairs[ii][0] in channelsProcssd:
982 992 # channelA_ok = True
983 993 #print("pair: ",self.crosspairs[ii])
984 994 '''
985 995 ###ONLY FOR TEST:
986 996 if (col_ax%caxs==0 and col_ax!=0 and self.nPairs !=1):
987 997 col_ax = 0
988 998 row_ax += 1
989 999 '''
990 1000 func2clean = 10*numpy.log10(numpy.absolute(rfunc[:,ii,ifreq,ih])) #Potencia?
991 1001 #print(func2clean.shape)
992 1002 val = (numpy.isfinite(func2clean)==True).nonzero()
993 1003
994 1004 if len(val)>0: #limitador
995 1005 min_val = numpy.around(numpy.amin(func2clean)-2) #> (-40)
996 1006 if min_val <= -40 :
997 1007 min_val = -40
998 1008 max_val = numpy.around(numpy.amax(func2clean)+2) #< 200
999 1009 if max_val >= 200 :
1000 1010 max_val = 200
1001 1011 #print min_val, max_val
1002 1012 step = 1
1003 1013 #print("Getting bins and the histogram")
1004 1014 x_dist = min_val + numpy.arange(1 + ((max_val-(min_val))/step))*step
1005 1015 y_dist,binstep = numpy.histogram(func2clean,bins=range(int(min_val),int(max_val+2),step))
1006 1016 #print(len(y_dist),len(binstep[:-1]))
1007 1017 #print(row_ax,col_ax, " ..")
1008 1018 #print(self.pairsArray[ii][0],self.pairsArray[ii][1])
1009 1019 mean = numpy.sum(x_dist * y_dist) / numpy.sum(y_dist)
1010 1020 sigma = numpy.sqrt(numpy.sum(y_dist * (x_dist - mean)**2) / numpy.sum(y_dist))
1011 1021 parg = [numpy.amax(y_dist),mean,sigma]
1012 1022
1013 1023 newY = None
1014 1024
1015 1025 try :
1016 1026 gauss_fit, covariance = curve_fit(fit_func, x_dist, y_dist,p0=parg)
1017 1027 mode = gauss_fit[1]
1018 1028 stdv = gauss_fit[2]
1019 1029 #print(" FIT OK",gauss_fit)
1020 1030 '''
1021 1031 ###ONLY FOR TEST:
1022 1032 if ifreq ==fft_rev and ih==hei_rev: #TO VIEW A SIGNLE FREQUENCY
1023 1033 newY = fit_func(x_dist,gauss_fit[0],gauss_fit[1],gauss_fit[2])
1024 1034 axs[row_ax,col_ax].plot(binstep[:-1],y_dist,color='green')
1025 1035 axs[row_ax,col_ax].plot(binstep[:-1],newY,color='red')
1026 1036 axs[row_ax,col_ax].set_title("CH "+str(self.channels[ii]))
1027 1037 '''
1028 1038 except:
1029 1039 mode = mean
1030 1040 stdv = sigma
1031 1041 #print("FIT FAIL")
1032 1042 #continue
1033 1043
1034 1044
1035 1045 #print(mode,stdv)
1036 1046 #Removing echoes greater than mode + std_factor*stdv
1037 1047 noval = (abs(func2clean - mode)>=(factor_stdv*stdv)).nonzero()
1038 1048 #noval tiene los indices que se van a remover
1039 1049 #print("Chan ",ii," novals: ",len(noval[0]))
1040 1050 if len(noval[0]) > 0: #forma de array (N,) es igual a longitud (N)
1041 1051 novall = ((func2clean - mode) >= (factor_stdv*stdv)).nonzero()
1042 1052 #print(novall)
1043 1053 #print(" ",self.pairsArray[ii])
1044 1054 #cross_pairs = self.pairsArray[ii]
1045 1055 #Getting coherent echoes which are removed.
1046 1056 # if len(novall[0]) > 0:
1047 1057 #
1048 1058 # val_spc[novall[0],cross_pairs[0],ifreq,ih] = 1
1049 1059 # val_spc[novall[0],cross_pairs[1],ifreq,ih] = 1
1050 1060 # val_cspc[novall[0],ii,ifreq,ih] = 1
1051 1061 #print("OUT NOVALL 1")
1052 1062 try:
1053 1063 pair = (self.channels[ii],self.channels[ii + 1])
1054 1064 except:
1055 1065 pair = (99,99)
1056 1066 #print("par ", pair)
1057 1067 if ( pair in self.crosspairs):
1058 1068 q = self.crosspairs.index(pair)
1059 1069 #print("estΓ‘ aqui: ", q, (ii,ii + 1))
1060 1070 new_a = numpy.delete(cspectra[:,q,ifreq,ih], noval[0])
1061 1071 cspectra[noval,q,ifreq,ih] = numpy.mean(new_a) #mean CrossSpectra
1062 1072
1063 1073 #if channelA_ok:
1064 1074 #chA = self.channels.index(cross_pairs[0])
1065 1075 new_b = numpy.delete(spectra[:,ii,ifreq,ih], noval[0])
1066 1076 spectra[noval,ii,ifreq,ih] = numpy.mean(new_b) #mean Spectra Pair A
1067 1077 #channelA_ok = False
1068 1078
1069 1079 # chB = self.channels.index(cross_pairs[1])
1070 1080 # new_c = numpy.delete(spectra[:,chB,ifreq,ih], noval[0])
1071 1081 # spectra[noval,chB,ifreq,ih] = numpy.mean(new_c) #mean Spectra Pair B
1072 1082 #
1073 1083 # channelsProcssd.append(self.crosspairs[ii][0]) # save channel A
1074 1084 # channelsProcssd.append(self.crosspairs[ii][1]) # save channel B
1075 1085 '''
1076 1086 ###ONLY FOR TEST:
1077 1087 if ifreq ==fft_rev and ih==hei_rev: #TO VIEW A SIGNLE FREQUENCY
1078 1088 func2clean = 10*numpy.log10(numpy.absolute(spectra[:,ii,ifreq,ih]))
1079 1089 y_dist,binstep = numpy.histogram(func2clean,bins=range(int(min_val),int(max_val+2),step))
1080 1090 axs2[row_ax,col_ax].plot(binstep[:-1],newY,color='red')
1081 1091 axs2[row_ax,col_ax].plot(binstep[:-1],y_dist,color='green')
1082 1092 axs2[row_ax,col_ax].set_title("CH "+str(self.channels[ii]))
1083 1093 '''
1084 1094 '''
1085 1095 ###ONLY FOR TEST:
1086 1096 col_ax += 1 #contador de ploteo columnas
1087 1097 ##print(col_ax)
1088 1098 ###ONLY FOR TEST:
1089 1099 if ifreq ==fft_rev and ih==hei_rev: #TO VIEW A SIGNLE FREQUENCY
1090 1100 title = str(dataOut.datatime)+" nFFT: "+str(ifreq)+" Alt: "+str(self.heights[ih])+ " km"
1091 1101 title2 = str(dataOut.datatime)+" nFFT: "+str(ifreq)+" Alt: "+str(self.heights[ih])+ " km CLEANED"
1092 1102 fig.suptitle(title)
1093 1103 fig2.suptitle(title2)
1094 1104 plt.show()
1095 1105 '''
1096 1106 ##################################################################################################
1097 1107
1098 1108 #print("Getting average of the spectra and cross-spectra from incoherent echoes.")
1099 1109 out_spectra = numpy.zeros([self.nChan,self.nFFTPoints,self.nHeights], dtype=float) #+numpy.nan
1100 1110 out_cspectra = numpy.zeros([self.nPairs,self.nFFTPoints,self.nHeights], dtype=complex) #+numpy.nan
1101 1111 for ih in range(self.nHeights):
1102 1112 for ifreq in range(self.nFFTPoints):
1103 1113 for ich in range(self.nChan):
1104 1114 tmp = spectra[:,ich,ifreq,ih]
1105 1115 valid = (numpy.isfinite(tmp[:])==True).nonzero()
1106 1116
1107 1117 if len(valid[0]) >0 :
1108 1118 out_spectra[ich,ifreq,ih] = numpy.nansum(tmp)#/len(valid[0])
1109 1119
1110 1120 for icr in range(self.nPairs):
1111 1121 tmp = numpy.squeeze(cspectra[:,icr,ifreq,ih])
1112 1122 valid = (numpy.isfinite(tmp)==True).nonzero()
1113 1123 if len(valid[0]) > 0:
1114 1124 out_cspectra[icr,ifreq,ih] = numpy.nansum(tmp)#/len(valid[0])
1115 1125
1116 1126 return out_spectra, out_cspectra
1117 1127
1118 1128 def REM_ISOLATED_POINTS(self,array,rth):
1119 1129 # import matplotlib.pyplot as plt
1120 1130 if rth == None :
1121 1131 rth = 4
1122 1132 #print("REM ISO")
1123 1133 num_prof = len(array[0,:,0])
1124 1134 num_hei = len(array[0,0,:])
1125 1135 n2d = len(array[:,0,0])
1126 1136
1127 1137 for ii in range(n2d) :
1128 1138 #print ii,n2d
1129 1139 tmp = array[ii,:,:]
1130 1140 #print tmp.shape, array[ii,101,:],array[ii,102,:]
1131 1141
1132 1142 # fig = plt.figure(figsize=(6,5))
1133 1143 # left, bottom, width, height = 0.1, 0.1, 0.8, 0.8
1134 1144 # ax = fig.add_axes([left, bottom, width, height])
1135 1145 # x = range(num_prof)
1136 1146 # y = range(num_hei)
1137 1147 # cp = ax.contour(y,x,tmp)
1138 1148 # ax.clabel(cp, inline=True,fontsize=10)
1139 1149 # plt.show()
1140 1150
1141 1151 #indxs = WHERE(FINITE(tmp) AND tmp GT 0,cindxs)
1142 1152 tmp = numpy.reshape(tmp,num_prof*num_hei)
1143 1153 indxs1 = (numpy.isfinite(tmp)==True).nonzero()
1144 1154 indxs2 = (tmp > 0).nonzero()
1145 1155
1146 1156 indxs1 = (indxs1[0])
1147 1157 indxs2 = indxs2[0]
1148 1158 #indxs1 = numpy.array(indxs1[0])
1149 1159 #indxs2 = numpy.array(indxs2[0])
1150 1160 indxs = None
1151 1161 #print indxs1 , indxs2
1152 1162 for iv in range(len(indxs2)):
1153 1163 indv = numpy.array((indxs1 == indxs2[iv]).nonzero())
1154 1164 #print len(indxs2), indv
1155 1165 if len(indv[0]) > 0 :
1156 1166 indxs = numpy.concatenate((indxs,indxs2[iv]), axis=None)
1157 1167 # print indxs
1158 1168 indxs = indxs[1:]
1159 1169 #print(indxs, len(indxs))
1160 1170 if len(indxs) < 4 :
1161 1171 array[ii,:,:] = 0.
1162 1172 return
1163 1173
1164 1174 xpos = numpy.mod(indxs ,num_hei)
1165 1175 ypos = (indxs / num_hei)
1166 1176 sx = numpy.argsort(xpos) # Ordering respect to "x" (time)
1167 1177 #print sx
1168 1178 xpos = xpos[sx]
1169 1179 ypos = ypos[sx]
1170 1180
1171 1181 # *********************************** Cleaning isolated points **********************************
1172 1182 ic = 0
1173 1183 while True :
1174 1184 r = numpy.sqrt(list(numpy.power((xpos[ic]-xpos),2)+ numpy.power((ypos[ic]-ypos),2)))
1175 1185 #no_coh = WHERE(FINITE(r) AND (r LE rth),cno_coh)
1176 1186 #plt.plot(r)
1177 1187 #plt.show()
1178 1188 no_coh1 = (numpy.isfinite(r)==True).nonzero()
1179 1189 no_coh2 = (r <= rth).nonzero()
1180 1190 #print r, no_coh1, no_coh2
1181 1191 no_coh1 = numpy.array(no_coh1[0])
1182 1192 no_coh2 = numpy.array(no_coh2[0])
1183 1193 no_coh = None
1184 1194 #print valid1 , valid2
1185 1195 for iv in range(len(no_coh2)):
1186 1196 indv = numpy.array((no_coh1 == no_coh2[iv]).nonzero())
1187 1197 if len(indv[0]) > 0 :
1188 1198 no_coh = numpy.concatenate((no_coh,no_coh2[iv]), axis=None)
1189 1199 no_coh = no_coh[1:]
1190 1200 #print len(no_coh), no_coh
1191 1201 if len(no_coh) < 4 :
1192 1202 #print xpos[ic], ypos[ic], ic
1193 1203 # plt.plot(r)
1194 1204 # plt.show()
1195 1205 xpos[ic] = numpy.nan
1196 1206 ypos[ic] = numpy.nan
1197 1207
1198 1208 ic = ic + 1
1199 1209 if (ic == len(indxs)) :
1200 1210 break
1201 1211 #print( xpos, ypos)
1202 1212
1203 1213 indxs = (numpy.isfinite(list(xpos))==True).nonzero()
1204 1214 #print indxs[0]
1205 1215 if len(indxs[0]) < 4 :
1206 1216 array[ii,:,:] = 0.
1207 1217 return
1208 1218
1209 1219 xpos = xpos[indxs[0]]
1210 1220 ypos = ypos[indxs[0]]
1211 1221 for i in range(0,len(ypos)):
1212 1222 ypos[i]=int(ypos[i])
1213 1223 junk = tmp
1214 1224 tmp = junk*0.0
1215 1225
1216 1226 tmp[list(xpos + (ypos*num_hei))] = junk[list(xpos + (ypos*num_hei))]
1217 1227 array[ii,:,:] = numpy.reshape(tmp,(num_prof,num_hei))
1218 1228
1219 1229 #print array.shape
1220 1230 #tmp = numpy.reshape(tmp,(num_prof,num_hei))
1221 1231 #print tmp.shape
1222 1232
1223 1233 # fig = plt.figure(figsize=(6,5))
1224 1234 # left, bottom, width, height = 0.1, 0.1, 0.8, 0.8
1225 1235 # ax = fig.add_axes([left, bottom, width, height])
1226 1236 # x = range(num_prof)
1227 1237 # y = range(num_hei)
1228 1238 # cp = ax.contour(y,x,array[ii,:,:])
1229 1239 # ax.clabel(cp, inline=True,fontsize=10)
1230 1240 # plt.show()
1231 1241 return array
1232 1242
1233 1243
1234 1244 class IntegrationFaradaySpectra(Operation):
1235 1245
1236 1246 __profIndex = 0
1237 1247 __withOverapping = False
1238 1248
1239 1249 __byTime = False
1240 1250 __initime = None
1241 1251 __lastdatatime = None
1242 1252 __integrationtime = None
1243 1253
1244 1254 __buffer_spc = None
1245 1255 __buffer_cspc = None
1246 1256 __buffer_dc = None
1247 1257
1248 1258 __dataReady = False
1249 1259
1250 1260 __timeInterval = None
1251 1261 n_ints = None #matriz de numero de integracions (CH,HEI)
1252 1262 n = None
1253 1263 minHei_ind = None
1254 1264 maxHei_ind = None
1255 1265 navg = 1.0
1256 1266 factor = 0.0
1257 1267 dataoutliers = None # (CHANNELS, HEIGHTS)
1258 1268
1259 1269 def __init__(self):
1260 1270
1261 1271 Operation.__init__(self)
1262 1272
1263 1273 def setup(self, dataOut,n=None, timeInterval=None, overlapping=False, DPL=None, minHei=None, maxHei=None, avg=1,factor=0.75):
1264 1274 """
1265 1275 Set the parameters of the integration class.
1266 1276
1267 1277 Inputs:
1268 1278
1269 1279 n : Number of coherent integrations
1270 1280 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
1271 1281 overlapping :
1272 1282
1273 1283 """
1274 1284
1275 1285 self.__initime = None
1276 1286 self.__lastdatatime = 0
1277 1287
1278 1288 self.__buffer_spc = []
1279 1289 self.__buffer_cspc = []
1280 1290 self.__buffer_dc = 0
1281 1291
1282 1292 self.__profIndex = 0
1283 1293 self.__dataReady = False
1284 1294 self.__byTime = False
1285 1295
1286 1296 self.factor = factor
1287 1297 self.navg = avg
1288 1298 #self.ByLags = dataOut.ByLags ###REDEFINIR
1289 1299 self.ByLags = False
1290 1300 self.maxProfilesInt = 1
1291 1301
1292 1302 if DPL != None:
1293 1303 self.DPL=DPL
1294 1304 else:
1295 1305 #self.DPL=dataOut.DPL ###REDEFINIR
1296 1306 self.DPL=0
1297 1307
1298 1308 if n is None and timeInterval is None:
1299 1309 raise ValueError("n or timeInterval should be specified ...")
1300 1310
1301 1311 if n is not None:
1302 1312 self.n = int(n)
1303 1313 else:
1304 1314 self.__integrationtime = int(timeInterval)
1305 1315 self.n = None
1306 1316 self.__byTime = True
1307 1317
1308 1318 if minHei == None:
1309 1319 minHei = self.dataOut.heightList[0]
1310 1320
1311 1321 if maxHei == None:
1312 1322 maxHei = self.dataOut.heightList[-1]
1313 1323
1314 1324 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
1315 1325 print('minHei: %.2f is out of the heights range' % (minHei))
1316 1326 print('minHei is setting to %.2f' % (self.dataOut.heightList[0]))
1317 1327 minHei = self.dataOut.heightList[0]
1318 1328
1319 1329 if (maxHei > self.dataOut.heightList[-1]) or (maxHei < minHei):
1320 1330 print('maxHei: %.2f is out of the heights range' % (maxHei))
1321 1331 print('maxHei is setting to %.2f' % (self.dataOut.heightList[-1]))
1322 1332 maxHei = self.dataOut.heightList[-1]
1323 1333
1324 1334 ind_list1 = numpy.where(self.dataOut.heightList >= minHei)
1325 1335 ind_list2 = numpy.where(self.dataOut.heightList <= maxHei)
1326 1336 self.minHei_ind = ind_list1[0][0]
1327 1337 self.maxHei_ind = ind_list2[0][-1]
1328 1338
1329 1339 def putData(self, data_spc, data_cspc, data_dc):
1330 1340 """
1331 1341 Add a profile to the __buffer_spc and increase in one the __profileIndex
1332 1342
1333 1343 """
1334 1344
1335 1345 self.__buffer_spc.append(data_spc)
1336 1346
1337 1347 if data_cspc is None:
1338 1348 self.__buffer_cspc = None
1339 1349 else:
1340 1350 self.__buffer_cspc.append(data_cspc)
1341 1351
1342 1352 if data_dc is None:
1343 1353 self.__buffer_dc = None
1344 1354 else:
1345 1355 self.__buffer_dc += data_dc
1346 1356
1347 1357 self.__profIndex += 1
1348 1358
1349 1359 return
1350 1360
1351 1361 def hildebrand_sekhon_Integration(self,sortdata,navg, factor):
1352 1362 #data debe estar ordenado
1353 1363 #sortdata = numpy.sort(data, axis=None)
1354 1364 #sortID=data.argsort()
1355 1365 lenOfData = len(sortdata)
1356 1366 nums_min = lenOfData*factor
1357 1367 if nums_min <= 5:
1358 1368 nums_min = 5
1359 1369 sump = 0.
1360 1370 sumq = 0.
1361 1371 j = 0
1362 1372 cont = 1
1363 1373 while((cont == 1)and(j < lenOfData)):
1364 1374 sump += sortdata[j]
1365 1375 sumq += sortdata[j]**2
1366 1376 if j > nums_min:
1367 1377 rtest = float(j)/(j-1) + 1.0/navg
1368 1378 if ((sumq*j) > (rtest*sump**2)):
1369 1379 j = j - 1
1370 1380 sump = sump - sortdata[j]
1371 1381 sumq = sumq - sortdata[j]**2
1372 1382 cont = 0
1373 1383 j += 1
1374 1384 #lnoise = sump / j
1375 1385 #print("H S done")
1376 1386 #return j,sortID
1377 1387 return j
1378 1388
1379 1389
1380 1390 def pushData(self):
1381 1391 """
1382 1392 Return the sum of the last profiles and the profiles used in the sum.
1383 1393
1384 1394 Affected:
1385 1395
1386 1396 self.__profileIndex
1387 1397
1388 1398 """
1389 1399 bufferH=None
1390 1400 buffer=None
1391 1401 buffer1=None
1392 1402 buffer_cspc=None
1393 1403 self.__buffer_spc=numpy.array(self.__buffer_spc)
1394 1404 try:
1395 1405 self.__buffer_cspc=numpy.array(self.__buffer_cspc)
1396 1406 except :
1397 1407 #print("No cpsc",e)
1398 1408 pass
1399 1409 #print("FREQ_DC",self.__buffer_spc.shape,self.__buffer_cspc.shape)
1400 1410
1401 1411 freq_dc = int(self.__buffer_spc.shape[2] / 2)
1402 1412 #print("FREQ_DC",freq_dc,self.__buffer_spc.shape,self.nHeights)
1403 1413
1404 1414 self.dataOutliers = numpy.zeros((self.nChannels,self.nHeights)) # --> almacen de outliers
1405 1415
1406 1416 for k in range(self.minHei_ind,self.maxHei_ind):
1407 1417 try:
1408 1418 buffer_cspc=numpy.copy(self.__buffer_cspc[:,:,:,k])
1409 1419 except:
1410 1420 #print("No cpsc",e)
1411 1421 pass
1412 1422 outliers_IDs_cspc=[]
1413 1423 cspc_outliers_exist=False
1414 1424 for i in range(self.nChannels):#dataOut.nChannels):
1415 1425
1416 1426 buffer1=numpy.copy(self.__buffer_spc[:,i,:,k])
1417 1427 indexes=[]
1418 1428 #sortIDs=[]
1419 1429 outliers_IDs=[]
1420 1430
1421 1431 for j in range(self.nProfiles): #frecuencias en el tiempo
1422 1432 # if i==0 and j==freq_dc: #NOT CONSIDERING DC PROFILE AT CHANNEL 0
1423 1433 # continue
1424 1434 # if i==1 and j==0: #NOT CONSIDERING DC PROFILE AT CHANNEL 1
1425 1435 # continue
1426 1436 buffer=buffer1[:,j]
1427 1437 sortdata = numpy.sort(buffer, axis=None)
1428 1438
1429 1439 sortID=buffer.argsort()
1430 1440 index = _noise.hildebrand_sekhon2(sortdata,self.navg)
1431 1441
1432 1442 #index,sortID=self.hildebrand_sekhon_Integration(buffer,1,self.factor)
1433 1443
1434 1444 # fig,ax = plt.subplots()
1435 1445 # ax.set_title(str(k)+" "+str(j))
1436 1446 # x=range(len(sortdata))
1437 1447 # ax.scatter(x,sortdata)
1438 1448 # ax.axvline(index)
1439 1449 # plt.show()
1440 1450
1441 1451 indexes.append(index)
1442 1452 #sortIDs.append(sortID)
1443 1453 outliers_IDs=numpy.append(outliers_IDs,sortID[index:])
1444 1454
1445 1455 #print("Outliers: ",outliers_IDs)
1446 1456 outliers_IDs=numpy.array(outliers_IDs)
1447 1457 outliers_IDs=outliers_IDs.ravel()
1448 1458 outliers_IDs=numpy.unique(outliers_IDs)
1449 1459 outliers_IDs=outliers_IDs.astype(numpy.dtype('int64'))
1450 1460 indexes=numpy.array(indexes)
1451 1461 indexmin=numpy.min(indexes)
1452 1462
1453 1463
1454 1464 #print(indexmin,buffer1.shape[0], k)
1455 1465
1456 1466 # fig,ax = plt.subplots()
1457 1467 # ax.plot(sortdata)
1458 1468 # ax2 = ax.twinx()
1459 1469 # x=range(len(indexes))
1460 1470 # #plt.scatter(x,indexes)
1461 1471 # ax2.scatter(x,indexes)
1462 1472 # plt.show()
1463 1473
1464 1474 if indexmin != buffer1.shape[0]:
1465 1475 if self.nChannels > 1:
1466 1476 cspc_outliers_exist= True
1467 1477
1468 1478 lt=outliers_IDs
1469 1479 #avg=numpy.mean(buffer1[[t for t in range(buffer1.shape[0]) if t not in lt],:],axis=0)
1470 1480
1471 1481 for p in list(outliers_IDs):
1472 1482 #buffer1[p,:]=avg
1473 1483 buffer1[p,:] = numpy.NaN
1474 1484
1475 1485 self.dataOutliers[i,k] = len(outliers_IDs)
1476 1486
1477 1487 self.__buffer_spc[:,i,:,k]=numpy.copy(buffer1)
1478 1488
1479 1489 outliers_IDs_cspc=numpy.append(outliers_IDs_cspc,outliers_IDs)
1480 1490
1481 1491
1482 1492
1483 1493 outliers_IDs_cspc=outliers_IDs_cspc.astype(numpy.dtype('int64'))
1484 1494 if cspc_outliers_exist :
1485 1495
1486 1496 lt=outliers_IDs_cspc
1487 1497
1488 1498 #avg=numpy.mean(buffer_cspc[[t for t in range(buffer_cspc.shape[0]) if t not in lt],:],axis=0)
1489 1499 for p in list(outliers_IDs_cspc):
1490 1500 #buffer_cspc[p,:]=avg
1491 1501 buffer_cspc[p,:] = numpy.NaN
1492 1502
1493 1503 try:
1494 1504 self.__buffer_cspc[:,:,:,k]=numpy.copy(buffer_cspc)
1495 1505 except:
1496 1506 #print("No cpsc",e)
1497 1507 pass
1498 1508 #else:
1499 1509 #break
1500 1510
1501 1511
1502 1512
1503 1513 nOutliers = len(outliers_IDs)
1504 1514 #print("Outliers n: ",self.dataOutliers,nOutliers)
1505 1515 buffer=None
1506 1516 bufferH=None
1507 1517 buffer1=None
1508 1518 buffer_cspc=None
1509 1519
1510 1520
1511 1521 buffer=None
1512 1522
1513 1523 #data_spc = numpy.sum(self.__buffer_spc,axis=0)
1514 1524 data_spc = numpy.nansum(self.__buffer_spc,axis=0)
1515 1525 try:
1516 1526 #data_cspc = numpy.sum(self.__buffer_cspc,axis=0)
1517 1527 data_cspc = numpy.nansum(self.__buffer_cspc,axis=0)
1518 1528 except:
1519 1529 #print("No cpsc",e)
1520 1530 pass
1521 1531
1522 1532
1523 1533 data_dc = self.__buffer_dc
1524 1534 #(CH, HEIGH)
1525 1535 self.maxProfilesInt = self.__profIndex
1526 1536 n = self.__profIndex - self.dataOutliers # n becomes a matrix
1527 1537
1528 1538 self.__buffer_spc = []
1529 1539 self.__buffer_cspc = []
1530 1540 self.__buffer_dc = 0
1531 1541 self.__profIndex = 0
1532 1542
1533 1543 return data_spc, data_cspc, data_dc, n
1534 1544
1535 1545 def byProfiles(self, *args):
1536 1546
1537 1547 self.__dataReady = False
1538 1548 avgdata_spc = None
1539 1549 avgdata_cspc = None
1540 1550 avgdata_dc = None
1541 1551
1542 1552 self.putData(*args)
1543 1553
1544 1554 if self.__profIndex == self.n:
1545 1555
1546 1556 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1547 1557 self.n_ints = n
1548 1558 self.__dataReady = True
1549 1559
1550 1560 return avgdata_spc, avgdata_cspc, avgdata_dc
1551 1561
1552 1562 def byTime(self, datatime, *args):
1553 1563
1554 1564 self.__dataReady = False
1555 1565 avgdata_spc = None
1556 1566 avgdata_cspc = None
1557 1567 avgdata_dc = None
1558 1568
1559 1569 self.putData(*args)
1560 1570
1561 1571 if (datatime - self.__initime) >= self.__integrationtime:
1562 1572 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1563 1573 self.n_ints = n
1564 1574 self.__dataReady = True
1565 1575
1566 1576 return avgdata_spc, avgdata_cspc, avgdata_dc
1567 1577
1568 1578 def integrate(self, datatime, *args):
1569 1579
1570 1580 if self.__profIndex == 0:
1571 1581 self.__initime = datatime
1572 1582
1573 1583 if self.__byTime:
1574 1584 avgdata_spc, avgdata_cspc, avgdata_dc = self.byTime(
1575 1585 datatime, *args)
1576 1586 else:
1577 1587 avgdata_spc, avgdata_cspc, avgdata_dc = self.byProfiles(*args)
1578 1588
1579 1589 if not self.__dataReady:
1580 1590 return None, None, None, None
1581 1591
1582 1592 return self.__initime, avgdata_spc, avgdata_cspc, avgdata_dc
1583 1593
1584 1594 def run(self, dataOut, n=None, DPL = None,timeInterval=None, overlapping=False, minHei=None, maxHei=None, avg=1, factor=0.75):
1585 1595 self.dataOut = dataOut
1586 1596 if n == 1:
1587 1597 return self.dataOut
1588 1598
1589 1599
1590 1600 if self.dataOut.nChannels == 1:
1591 1601 self.dataOut.data_cspc = None #si es un solo canal no vale la pena acumular DATOS
1592 1602 #print(self.dataOut.data_spc.shape, self.dataOut.data_cspc)
1593 1603 if not self.isConfig:
1594 1604 self.setup(self.dataOut, n, timeInterval, overlapping,DPL ,minHei, maxHei, avg, factor)
1595 1605 self.isConfig = True
1596 1606
1597 1607 if not self.ByLags:
1598 1608 self.nProfiles=self.dataOut.nProfiles
1599 1609 self.nChannels=self.dataOut.nChannels
1600 1610 self.nHeights=self.dataOut.nHeights
1601 1611 avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc = self.integrate(self.dataOut.utctime,
1602 1612 self.dataOut.data_spc,
1603 1613 self.dataOut.data_cspc,
1604 1614 self.dataOut.data_dc)
1605 1615 else:
1606 1616 self.nProfiles=self.dataOut.nProfiles
1607 1617 self.nChannels=self.dataOut.nChannels
1608 1618 self.nHeights=self.dataOut.nHeights
1609 1619 avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc = self.integrate(self.dataOut.utctime,
1610 1620 self.dataOut.dataLag_spc,
1611 1621 self.dataOut.dataLag_cspc,
1612 1622 self.dataOut.dataLag_dc)
1613 1623 self.dataOut.flagNoData = True
1614 1624 if self.__dataReady:
1615 1625
1616 1626 if not self.ByLags:
1617 1627 if self.nChannels == 1:
1618 1628 #print("f int", avgdata_spc.shape)
1619 1629 self.dataOut.data_spc = avgdata_spc
1620 1630 self.dataOut.data_cspc = avgdata_spc
1621 1631 else:
1622 1632 self.dataOut.data_spc = numpy.squeeze(avgdata_spc)
1623 1633 self.dataOut.data_cspc = numpy.squeeze(avgdata_cspc)
1624 1634 self.dataOut.data_dc = avgdata_dc
1625 1635 self.dataOut.data_outlier = self.dataOutliers
1626 1636
1627 1637 else:
1628 1638 self.dataOut.dataLag_spc = avgdata_spc
1629 1639 self.dataOut.dataLag_cspc = avgdata_cspc
1630 1640 self.dataOut.dataLag_dc = avgdata_dc
1631 1641
1632 1642 self.dataOut.data_spc=self.dataOut.dataLag_spc[:,:,:,self.dataOut.LagPlot]
1633 1643 self.dataOut.data_cspc=self.dataOut.dataLag_cspc[:,:,:,self.dataOut.LagPlot]
1634 1644 self.dataOut.data_dc=self.dataOut.dataLag_dc[:,:,self.dataOut.LagPlot]
1635 1645
1636 1646
1637 1647 self.dataOut.nIncohInt *= self.n_ints
1638 1648 self.dataOut.max_nIncohInt = self.maxProfilesInt
1639 1649 #print(self.dataOut.max_nIncohInt)
1640 1650 self.dataOut.utctime = avgdatatime
1641 1651 self.dataOut.flagNoData = False
1642 1652 #print("Faraday Integration DONE...")
1643 1653 #print(self.dataOut.flagNoData)
1644 1654 return self.dataOut
1645 1655
1646 1656 class removeInterference(Operation):
1647 1657
1648 1658 def removeInterference2(self):
1649 1659
1650 1660 cspc = self.dataOut.data_cspc
1651 1661 spc = self.dataOut.data_spc
1652 1662 Heights = numpy.arange(cspc.shape[2])
1653 1663 realCspc = numpy.abs(cspc)
1654 1664
1655 1665 for i in range(cspc.shape[0]):
1656 1666 LinePower= numpy.sum(realCspc[i], axis=0)
1657 1667 Threshold = numpy.amax(LinePower)-numpy.sort(LinePower)[len(Heights)-int(len(Heights)*0.1)]
1658 1668 SelectedHeights = Heights[ numpy.where( LinePower < Threshold ) ]
1659 1669 InterferenceSum = numpy.sum( realCspc[i,:,SelectedHeights], axis=0 )
1660 1670 InterferenceThresholdMin = numpy.sort(InterferenceSum)[int(len(InterferenceSum)*0.98)]
1661 1671 InterferenceThresholdMax = numpy.sort(InterferenceSum)[int(len(InterferenceSum)*0.99)]
1662 1672
1663 1673
1664 1674 InterferenceRange = numpy.where( ([InterferenceSum > InterferenceThresholdMin]))# , InterferenceSum < InterferenceThresholdMax]) )
1665 1675 #InterferenceRange = numpy.where( ([InterferenceRange < InterferenceThresholdMax]))
1666 1676 if len(InterferenceRange)<int(cspc.shape[1]*0.3):
1667 1677 cspc[i,InterferenceRange,:] = numpy.NaN
1668 1678
1669 1679 self.dataOut.data_cspc = cspc
1670 1680
1671 1681 def removeInterference(self, interf = 2, hei_interf = None, nhei_interf = None, offhei_interf = None):
1672 1682
1673 1683 jspectra = self.dataOut.data_spc
1674 1684 jcspectra = self.dataOut.data_cspc
1675 1685 jnoise = self.dataOut.getNoise()
1676 1686 num_incoh = self.dataOut.nIncohInt
1677 1687
1678 1688 num_channel = jspectra.shape[0]
1679 1689 num_prof = jspectra.shape[1]
1680 1690 num_hei = jspectra.shape[2]
1681 1691
1682 1692 # hei_interf
1683 1693 if hei_interf is None:
1684 1694 count_hei = int(num_hei / 2)
1685 1695 hei_interf = numpy.asmatrix(list(range(count_hei))) + num_hei - count_hei
1686 1696 hei_interf = numpy.asarray(hei_interf)[0]
1687 1697 # nhei_interf
1688 1698 if (nhei_interf == None):
1689 1699 nhei_interf = 5
1690 1700 if (nhei_interf < 1):
1691 1701 nhei_interf = 1
1692 1702 if (nhei_interf > count_hei):
1693 1703 nhei_interf = count_hei
1694 1704 if (offhei_interf == None):
1695 1705 offhei_interf = 0
1696 1706
1697 1707 ind_hei = list(range(num_hei))
1698 1708 # mask_prof = numpy.asarray(range(num_prof - 2)) + 1
1699 1709 # mask_prof[range(num_prof/2 - 1,len(mask_prof))] += 1
1700 1710 mask_prof = numpy.asarray(list(range(num_prof)))
1701 1711 num_mask_prof = mask_prof.size
1702 1712 comp_mask_prof = [0, num_prof / 2]
1703 1713
1704 1714 # noise_exist: Determina si la variable jnoise ha sido definida y contiene la informacion del ruido de cada canal
1705 1715 if (jnoise.size < num_channel or numpy.isnan(jnoise).any()):
1706 1716 jnoise = numpy.nan
1707 1717 noise_exist = jnoise[0] < numpy.Inf
1708 1718
1709 1719 # Subrutina de Remocion de la Interferencia
1710 1720 for ich in range(num_channel):
1711 1721 # Se ordena los espectros segun su potencia (menor a mayor)
1712 1722 power = jspectra[ich, mask_prof, :]
1713 1723 power = power[:, hei_interf]
1714 1724 power = power.sum(axis=0)
1715 1725 psort = power.ravel().argsort()
1716 1726
1717 1727 # Se estima la interferencia promedio en los Espectros de Potencia empleando
1718 1728 junkspc_interf = jspectra[ich, :, hei_interf[psort[list(range(
1719 1729 offhei_interf, nhei_interf + offhei_interf))]]]
1720 1730
1721 1731 if noise_exist:
1722 1732 # tmp_noise = jnoise[ich] / num_prof
1723 1733 tmp_noise = jnoise[ich]
1724 1734 junkspc_interf = junkspc_interf - tmp_noise
1725 1735 #junkspc_interf[:,comp_mask_prof] = 0
1726 1736
1727 1737 jspc_interf = junkspc_interf.sum(axis=0) / nhei_interf
1728 1738 jspc_interf = jspc_interf.transpose()
1729 1739 # Calculando el espectro de interferencia promedio
1730 1740 noiseid = numpy.where(
1731 1741 jspc_interf <= tmp_noise / numpy.sqrt(num_incoh))
1732 1742 noiseid = noiseid[0]
1733 1743 cnoiseid = noiseid.size
1734 1744 interfid = numpy.where(
1735 1745 jspc_interf > tmp_noise / numpy.sqrt(num_incoh))
1736 1746 interfid = interfid[0]
1737 1747 cinterfid = interfid.size
1738 1748
1739 1749 if (cnoiseid > 0):
1740 1750 jspc_interf[noiseid] = 0
1741 1751
1742 1752 # Expandiendo los perfiles a limpiar
1743 1753 if (cinterfid > 0):
1744 1754 new_interfid = (
1745 1755 numpy.r_[interfid - 1, interfid, interfid + 1] + num_prof) % num_prof
1746 1756 new_interfid = numpy.asarray(new_interfid)
1747 1757 new_interfid = {x for x in new_interfid}
1748 1758 new_interfid = numpy.array(list(new_interfid))
1749 1759 new_cinterfid = new_interfid.size
1750 1760 else:
1751 1761 new_cinterfid = 0
1752 1762
1753 1763 for ip in range(new_cinterfid):
1754 1764 ind = junkspc_interf[:, new_interfid[ip]].ravel().argsort()
1755 1765 jspc_interf[new_interfid[ip]
1756 1766 ] = junkspc_interf[ind[nhei_interf // 2], new_interfid[ip]]
1757 1767
1758 1768 jspectra[ich, :, ind_hei] = jspectra[ich, :,
1759 1769 ind_hei] - jspc_interf # Corregir indices
1760 1770
1761 1771 # Removiendo la interferencia del punto de mayor interferencia
1762 1772 ListAux = jspc_interf[mask_prof].tolist()
1763 1773 maxid = ListAux.index(max(ListAux))
1764 1774
1765 1775 if cinterfid > 0:
1766 1776 for ip in range(cinterfid * (interf == 2) - 1):
1767 1777 ind = (jspectra[ich, interfid[ip], :] < tmp_noise *
1768 1778 (1 + 1 / numpy.sqrt(num_incoh))).nonzero()
1769 1779 cind = len(ind)
1770 1780
1771 1781 if (cind > 0):
1772 1782 jspectra[ich, interfid[ip], ind] = tmp_noise * \
1773 1783 (1 + (numpy.random.uniform(cind) - 0.5) /
1774 1784 numpy.sqrt(num_incoh))
1775 1785
1776 1786 ind = numpy.array([-2, -1, 1, 2])
1777 1787 xx = numpy.zeros([4, 4])
1778 1788
1779 1789 for id1 in range(4):
1780 1790 xx[:, id1] = ind[id1]**numpy.asarray(list(range(4)))
1781 1791
1782 1792 xx_inv = numpy.linalg.inv(xx)
1783 1793 xx = xx_inv[:, 0]
1784 1794 ind = (ind + maxid + num_mask_prof) % num_mask_prof
1785 1795 yy = jspectra[ich, mask_prof[ind], :]
1786 1796 jspectra[ich, mask_prof[maxid], :] = numpy.dot(
1787 1797 yy.transpose(), xx)
1788 1798
1789 1799 indAux = (jspectra[ich, :, :] < tmp_noise *
1790 1800 (1 - 1 / numpy.sqrt(num_incoh))).nonzero()
1791 1801 jspectra[ich, indAux[0], indAux[1]] = tmp_noise * \
1792 1802 (1 - 1 / numpy.sqrt(num_incoh))
1793 1803
1794 1804 # Remocion de Interferencia en el Cross Spectra
1795 1805 if jcspectra is None:
1796 1806 return jspectra, jcspectra
1797 1807 num_pairs = int(jcspectra.size / (num_prof * num_hei))
1798 1808 jcspectra = jcspectra.reshape(num_pairs, num_prof, num_hei)
1799 1809
1800 1810 for ip in range(num_pairs):
1801 1811
1802 1812 #-------------------------------------------
1803 1813
1804 1814 cspower = numpy.abs(jcspectra[ip, mask_prof, :])
1805 1815 cspower = cspower[:, hei_interf]
1806 1816 cspower = cspower.sum(axis=0)
1807 1817
1808 1818 cspsort = cspower.ravel().argsort()
1809 1819 junkcspc_interf = jcspectra[ip, :, hei_interf[cspsort[list(range(
1810 1820 offhei_interf, nhei_interf + offhei_interf))]]]
1811 1821 junkcspc_interf = junkcspc_interf.transpose()
1812 1822 jcspc_interf = junkcspc_interf.sum(axis=1) / nhei_interf
1813 1823
1814 1824 ind = numpy.abs(jcspc_interf[mask_prof]).ravel().argsort()
1815 1825
1816 1826 median_real = int(numpy.median(numpy.real(
1817 1827 junkcspc_interf[mask_prof[ind[list(range(3 * num_prof // 4))]], :])))
1818 1828 median_imag = int(numpy.median(numpy.imag(
1819 1829 junkcspc_interf[mask_prof[ind[list(range(3 * num_prof // 4))]], :])))
1820 1830 comp_mask_prof = [int(e) for e in comp_mask_prof]
1821 1831 junkcspc_interf[comp_mask_prof, :] = numpy.complex(
1822 1832 median_real, median_imag)
1823 1833
1824 1834 for iprof in range(num_prof):
1825 1835 ind = numpy.abs(junkcspc_interf[iprof, :]).ravel().argsort()
1826 1836 jcspc_interf[iprof] = junkcspc_interf[iprof, ind[nhei_interf // 2]]
1827 1837
1828 1838 # Removiendo la Interferencia
1829 1839 jcspectra[ip, :, ind_hei] = jcspectra[ip,
1830 1840 :, ind_hei] - jcspc_interf
1831 1841
1832 1842 ListAux = numpy.abs(jcspc_interf[mask_prof]).tolist()
1833 1843 maxid = ListAux.index(max(ListAux))
1834 1844
1835 1845 ind = numpy.array([-2, -1, 1, 2])
1836 1846 xx = numpy.zeros([4, 4])
1837 1847
1838 1848 for id1 in range(4):
1839 1849 xx[:, id1] = ind[id1]**numpy.asarray(list(range(4)))
1840 1850
1841 1851 xx_inv = numpy.linalg.inv(xx)
1842 1852 xx = xx_inv[:, 0]
1843 1853
1844 1854 ind = (ind + maxid + num_mask_prof) % num_mask_prof
1845 1855 yy = jcspectra[ip, mask_prof[ind], :]
1846 1856 jcspectra[ip, mask_prof[maxid], :] = numpy.dot(yy.transpose(), xx)
1847 1857
1848 1858 # Guardar Resultados
1849 1859 self.dataOut.data_spc = jspectra
1850 1860 self.dataOut.data_cspc = jcspectra
1851 1861
1852 1862 return 1
1853 1863
1854 1864 def run(self, dataOut, interf = 2,hei_interf = None, nhei_interf = None, offhei_interf = None, mode=1):
1855 1865
1856 1866 self.dataOut = dataOut
1857 1867
1858 1868 if mode == 1:
1859 1869 self.removeInterference(interf = 2,hei_interf = None, nhei_interf = None, offhei_interf = None)
1860 1870 elif mode == 2:
1861 1871 self.removeInterference2()
1862 1872
1863 1873 return self.dataOut
1864 1874
1865 1875
1866 1876 class IncohInt(Operation):
1867 1877
1868 1878 __profIndex = 0
1869 1879 __withOverapping = False
1870 1880
1871 1881 __byTime = False
1872 1882 __initime = None
1873 1883 __lastdatatime = None
1874 1884 __integrationtime = None
1875 1885
1876 1886 __buffer_spc = None
1877 1887 __buffer_cspc = None
1878 1888 __buffer_dc = None
1879 1889
1880 1890 __dataReady = False
1881 1891
1882 1892 __timeInterval = None
1883 1893 incohInt = 0
1884 1894 nOutliers = 0
1885 1895 n = None
1886 1896
1887 1897 def __init__(self):
1888 1898
1889 1899 Operation.__init__(self)
1890 1900
1891 1901 def setup(self, n=None, timeInterval=None, overlapping=False):
1892 1902 """
1893 1903 Set the parameters of the integration class.
1894 1904
1895 1905 Inputs:
1896 1906
1897 1907 n : Number of coherent integrations
1898 1908 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
1899 1909 overlapping :
1900 1910
1901 1911 """
1902 1912
1903 1913 self.__initime = None
1904 1914 self.__lastdatatime = 0
1905 1915
1906 1916 self.__buffer_spc = 0
1907 1917 self.__buffer_cspc = 0
1908 1918 self.__buffer_dc = 0
1909 1919
1910 1920 self.__profIndex = 0
1911 1921 self.__dataReady = False
1912 1922 self.__byTime = False
1913 1923 self.incohInt = 0
1914 1924 self.nOutliers = 0
1915 1925 if n is None and timeInterval is None:
1916 1926 raise ValueError("n or timeInterval should be specified ...")
1917 1927
1918 1928 if n is not None:
1919 1929 self.n = int(n)
1920 1930 else:
1921 1931
1922 1932 self.__integrationtime = int(timeInterval)
1923 1933 self.n = None
1924 1934 self.__byTime = True
1925 1935
1926 1936 def putData(self, data_spc, data_cspc, data_dc):
1927 1937 """
1928 1938 Add a profile to the __buffer_spc and increase in one the __profileIndex
1929 1939
1930 1940 """
1931 1941 if data_spc.all() == numpy.nan :
1932 1942 print("nan ")
1933 1943 return
1934 1944 self.__buffer_spc += data_spc
1935 1945
1936 1946 if data_cspc is None:
1937 1947 self.__buffer_cspc = None
1938 1948 else:
1939 1949 self.__buffer_cspc += data_cspc
1940 1950
1941 1951 if data_dc is None:
1942 1952 self.__buffer_dc = None
1943 1953 else:
1944 1954 self.__buffer_dc += data_dc
1945 1955
1946 1956 self.__profIndex += 1
1947 1957
1948 1958 return
1949 1959
1950 1960 def pushData(self):
1951 1961 """
1952 1962 Return the sum of the last profiles and the profiles used in the sum.
1953 1963
1954 1964 Affected:
1955 1965
1956 1966 self.__profileIndex
1957 1967
1958 1968 """
1959 1969
1960 1970 data_spc = self.__buffer_spc
1961 1971 data_cspc = self.__buffer_cspc
1962 1972 data_dc = self.__buffer_dc
1963 1973 n = self.__profIndex
1964 1974
1965 1975 self.__buffer_spc = 0
1966 1976 self.__buffer_cspc = 0
1967 1977 self.__buffer_dc = 0
1968 1978
1969 1979
1970 1980 return data_spc, data_cspc, data_dc, n
1971 1981
1972 1982 def byProfiles(self, *args):
1973 1983
1974 1984 self.__dataReady = False
1975 1985 avgdata_spc = None
1976 1986 avgdata_cspc = None
1977 1987 avgdata_dc = None
1978 1988
1979 1989 self.putData(*args)
1980 1990
1981 1991 if self.__profIndex == self.n:
1982 1992
1983 1993 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1984 1994 self.n = n
1985 1995 self.__dataReady = True
1986 1996
1987 1997 return avgdata_spc, avgdata_cspc, avgdata_dc
1988 1998
1989 1999 def byTime(self, datatime, *args):
1990 2000
1991 2001 self.__dataReady = False
1992 2002 avgdata_spc = None
1993 2003 avgdata_cspc = None
1994 2004 avgdata_dc = None
1995 2005
1996 2006 self.putData(*args)
1997 2007
1998 2008 if (datatime - self.__initime) >= self.__integrationtime:
1999 2009 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
2000 2010 self.n = n
2001 2011 self.__dataReady = True
2002 2012
2003 2013 return avgdata_spc, avgdata_cspc, avgdata_dc
2004 2014
2005 2015 def integrate(self, datatime, *args):
2006 2016
2007 2017 if self.__profIndex == 0:
2008 2018 self.__initime = datatime
2009 2019
2010 2020 if self.__byTime:
2011 2021 avgdata_spc, avgdata_cspc, avgdata_dc = self.byTime(
2012 2022 datatime, *args)
2013 2023 else:
2014 2024 avgdata_spc, avgdata_cspc, avgdata_dc = self.byProfiles(*args)
2015 2025
2016 2026 if not self.__dataReady:
2017 2027 return None, None, None, None
2018 2028
2019 2029 return self.__initime, avgdata_spc, avgdata_cspc, avgdata_dc
2020 2030
2021 2031 def run(self, dataOut, n=None, timeInterval=None, overlapping=False):
2022 2032 if n == 1:
2023 2033 return dataOut
2024 2034
2025 2035 if dataOut.flagNoData == True:
2026 2036 return dataOut
2027 2037
2028 2038 dataOut.flagNoData = True
2029 2039
2030 2040 if not self.isConfig:
2031 2041 self.setup(n, timeInterval, overlapping)
2032 2042 self.isConfig = True
2033 2043
2034 2044 avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc = self.integrate(dataOut.utctime,
2035 2045 dataOut.data_spc,
2036 2046 dataOut.data_cspc,
2037 2047 dataOut.data_dc)
2038 2048 self.incohInt += dataOut.nIncohInt
2039 2049 self.nOutliers += dataOut.data_outlier
2040 2050 if self.__dataReady:
2041 2051 #print("prof: ",dataOut.max_nIncohInt,self.__profIndex)
2042 2052 dataOut.data_spc = avgdata_spc
2043 2053 dataOut.data_cspc = avgdata_cspc
2044 2054 dataOut.data_dc = avgdata_dc
2045 2055 dataOut.nIncohInt = self.incohInt
2046 2056 dataOut.data_outlier = self.nOutliers
2047 2057 dataOut.utctime = avgdatatime
2048 2058 dataOut.flagNoData = False
2049 2059 dataOut.max_nIncohInt += self.__profIndex
2050 2060 self.incohInt = 0
2051 2061 self.nOutliers = 0
2052 2062 self.__profIndex = 0
2053
2063 #print("IncohInt Done")
2054 2064 return dataOut
2055 2065
2056 2066 class dopplerFlip(Operation):
2057 2067
2058 2068 def run(self, dataOut):
2059 2069 # arreglo 1: (num_chan, num_profiles, num_heights)
2060 2070 self.dataOut = dataOut
2061 2071 # JULIA-oblicua, indice 2
2062 2072 # arreglo 2: (num_profiles, num_heights)
2063 2073 jspectra = self.dataOut.data_spc[2]
2064 2074 jspectra_tmp = numpy.zeros(jspectra.shape)
2065 2075 num_profiles = jspectra.shape[0]
2066 2076 freq_dc = int(num_profiles / 2)
2067 2077 # Flip con for
2068 2078 for j in range(num_profiles):
2069 2079 jspectra_tmp[num_profiles-j-1]= jspectra[j]
2070 2080 # Intercambio perfil de DC con perfil inmediato anterior
2071 2081 jspectra_tmp[freq_dc-1]= jspectra[freq_dc-1]
2072 2082 jspectra_tmp[freq_dc]= jspectra[freq_dc]
2073 2083 # canal modificado es re-escrito en el arreglo de canales
2074 2084 self.dataOut.data_spc[2] = jspectra_tmp
2075 2085
2076 2086 return self.dataOut
@@ -1,2369 +1,2361
1 1 import sys
2 2 import numpy,math
3 3 from scipy import interpolate
4 4 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
5 5 from schainpy.model.data.jrodata import Voltage,hildebrand_sekhon
6 6 from schainpy.utils import log
7 7 from schainpy.model.io.utils import getHei_index
8 8 from time import time
9 9 #import datetime
10 10 import numpy
11 11 #import copy
12 12 from schainpy.model.data import _noise
13 13
14 14 class VoltageProc(ProcessingUnit):
15 15
16 16 def __init__(self):
17 17
18 18 ProcessingUnit.__init__(self)
19 19
20 20 self.dataOut = Voltage()
21 21 self.flip = 1
22 22 self.setupReq = False
23 23
24 24 def run(self):
25 25 #print("running volt proc")
26 26 if self.dataIn.type == 'AMISR':
27 27 self.__updateObjFromAmisrInput()
28 28
29 29 if self.dataOut.buffer_empty:
30 30 if self.dataIn.type == 'Voltage':
31 31 self.dataOut.copy(self.dataIn)
32 32 #print("new volts reading")
33 33
34 34
35 35 def __updateObjFromAmisrInput(self):
36 36
37 37 self.dataOut.timeZone = self.dataIn.timeZone
38 38 self.dataOut.dstFlag = self.dataIn.dstFlag
39 39 self.dataOut.errorCount = self.dataIn.errorCount
40 40 self.dataOut.useLocalTime = self.dataIn.useLocalTime
41 41
42 42 self.dataOut.flagNoData = self.dataIn.flagNoData
43 43 self.dataOut.data = self.dataIn.data
44 44 self.dataOut.utctime = self.dataIn.utctime
45 45 self.dataOut.channelList = self.dataIn.channelList
46 46 #self.dataOut.timeInterval = self.dataIn.timeInterval
47 47 self.dataOut.heightList = self.dataIn.heightList
48 48 self.dataOut.nProfiles = self.dataIn.nProfiles
49 49
50 50 self.dataOut.nCohInt = self.dataIn.nCohInt
51 51 self.dataOut.ippSeconds = self.dataIn.ippSeconds
52 52 self.dataOut.frequency = self.dataIn.frequency
53 53
54 54 self.dataOut.azimuth = self.dataIn.azimuth
55 55 self.dataOut.zenith = self.dataIn.zenith
56 56
57 57 self.dataOut.beam.codeList = self.dataIn.beam.codeList
58 58 self.dataOut.beam.azimuthList = self.dataIn.beam.azimuthList
59 59 self.dataOut.beam.zenithList = self.dataIn.beam.zenithList
60 60
61 61
62 62 class selectChannels(Operation):
63 63
64 64 def run(self, dataOut, channelList=None):
65 65 self.channelList = channelList
66 66 if self.channelList == None:
67 67 print("Missing channelList")
68 68 return dataOut
69 69 channelIndexList = []
70 70
71 71 if type(dataOut.channelList) is not list: #leer array desde HDF5
72 72 try:
73 73 dataOut.channelList = dataOut.channelList.tolist()
74 74 except Exception as e:
75 75 print("Select Channels: ",e)
76 76 for channel in self.channelList:
77 77 if channel not in dataOut.channelList:
78 78 raise ValueError("Channel %d is not in %s" %(channel, str(dataOut.channelList)))
79 79
80 80 index = dataOut.channelList.index(channel)
81 81 channelIndexList.append(index)
82 82 dataOut = self.selectChannelsByIndex(dataOut,channelIndexList)
83 83 return dataOut
84 84
85 85 def selectChannelsByIndex(self, dataOut, channelIndexList):
86 86 """
87 87 Selecciona un bloque de datos en base a canales segun el channelIndexList
88 88
89 89 Input:
90 90 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
91 91
92 92 Affected:
93 93 dataOut.data
94 94 dataOut.channelIndexList
95 95 dataOut.nChannels
96 96 dataOut.m_ProcessingHeader.totalSpectra
97 97 dataOut.systemHeaderObj.numChannels
98 98 dataOut.m_ProcessingHeader.blockSize
99 99
100 100 Return:
101 101 None
102 102 """
103 103 #print("selectChannelsByIndex")
104 104 # for channelIndex in channelIndexList:
105 105 # if channelIndex not in dataOut.channelIndexList:
106 106 # raise ValueError("The value %d in channelIndexList is not valid" %channelIndex)
107 107
108 108 if dataOut.type == 'Voltage':
109 109 if dataOut.flagDataAsBlock:
110 110 """
111 111 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
112 112 """
113 113 data = dataOut.data[channelIndexList,:,:]
114 114 else:
115 115 data = dataOut.data[channelIndexList,:]
116 116
117 117 dataOut.data = data
118 118 # dataOut.channelList = [dataOut.channelList[i] for i in channelIndexList]
119 119 dataOut.channelList = range(len(channelIndexList))
120 120
121 121 elif dataOut.type == 'Spectra':
122 122 if hasattr(dataOut, 'data_spc'):
123 123 if dataOut.data_spc is None:
124 124 raise ValueError("data_spc is None")
125 125 return dataOut
126 126 else:
127 127 data_spc = dataOut.data_spc[channelIndexList, :]
128 128 dataOut.data_spc = data_spc
129 129
130 130 # if hasattr(dataOut, 'data_dc') :# and
131 131 # if dataOut.data_dc is None:
132 132 # raise ValueError("data_dc is None")
133 133 # return dataOut
134 134 # else:
135 135 # data_dc = dataOut.data_dc[channelIndexList, :]
136 136 # dataOut.data_dc = data_dc
137 137 # dataOut.channelList = [dataOut.channelList[i] for i in channelIndexList]
138 138 dataOut.channelList = channelIndexList
139 139 dataOut = self.__selectPairsByChannel(dataOut,channelIndexList)
140 140
141 141 return dataOut
142 142
143 143 def __selectPairsByChannel(self, dataOut, channelList=None):
144 144 #print("__selectPairsByChannel")
145 145 if channelList == None:
146 146 return
147 147
148 148 pairsIndexListSelected = []
149 149 for pairIndex in dataOut.pairsIndexList:
150 150 # First pair
151 151 if dataOut.pairsList[pairIndex][0] not in channelList:
152 152 continue
153 153 # Second pair
154 154 if dataOut.pairsList[pairIndex][1] not in channelList:
155 155 continue
156 156
157 157 pairsIndexListSelected.append(pairIndex)
158 158 if not pairsIndexListSelected:
159 159 dataOut.data_cspc = None
160 160 dataOut.pairsList = []
161 161 return
162 162
163 163 dataOut.data_cspc = dataOut.data_cspc[pairsIndexListSelected]
164 164 dataOut.pairsList = [dataOut.pairsList[i]
165 165 for i in pairsIndexListSelected]
166 166
167 167 return dataOut
168 168
169 169 class selectHeights(Operation):
170 170
171 171 def run(self, dataOut, minHei=None, maxHei=None, minIndex=None, maxIndex=None):
172 172 """
173 173 Selecciona un bloque de datos en base a un grupo de valores de alturas segun el rango
174 174 minHei <= height <= maxHei
175 175
176 176 Input:
177 177 minHei : valor minimo de altura a considerar
178 178 maxHei : valor maximo de altura a considerar
179 179
180 180 Affected:
181 181 Indirectamente son cambiados varios valores a travez del metodo selectHeightsByIndex
182 182
183 183 Return:
184 184 1 si el metodo se ejecuto con exito caso contrario devuelve 0
185 185 """
186 186
187 187 self.dataOut = dataOut
188 188
189 189 if minHei and maxHei:
190 190
191 191 if (minHei < dataOut.heightList[0]):
192 192 minHei = dataOut.heightList[0]
193 193
194 194 if (maxHei > dataOut.heightList[-1]):
195 195 maxHei = dataOut.heightList[-1]
196 196
197 197 minIndex = 0
198 198 maxIndex = 0
199 199 heights = dataOut.heightList
200 200
201 201 inda = numpy.where(heights >= minHei)
202 202 indb = numpy.where(heights <= maxHei)
203 203
204 204 try:
205 205 minIndex = inda[0][0]
206 206 except:
207 207 minIndex = 0
208 208
209 209 try:
210 210 maxIndex = indb[0][-1]
211 211 except:
212 212 maxIndex = len(heights)
213 213
214 214 self.selectHeightsByIndex(minIndex, maxIndex)
215 215
216 216 return dataOut
217 217
218 218 def selectHeightsByIndex(self, minIndex, maxIndex):
219 219 """
220 220 Selecciona un bloque de datos en base a un grupo indices de alturas segun el rango
221 221 minIndex <= index <= maxIndex
222 222
223 223 Input:
224 224 minIndex : valor de indice minimo de altura a considerar
225 225 maxIndex : valor de indice maximo de altura a considerar
226 226
227 227 Affected:
228 228 self.dataOut.data
229 229 self.dataOut.heightList
230 230
231 231 Return:
232 232 1 si el metodo se ejecuto con exito caso contrario devuelve 0
233 233 """
234 234
235 235 if self.dataOut.type == 'Voltage':
236 236 if (minIndex < 0) or (minIndex > maxIndex):
237 237 raise ValueError("Height index range (%d,%d) is not valid" % (minIndex, maxIndex))
238 238
239 239 if (maxIndex >= self.dataOut.nHeights):
240 240 maxIndex = self.dataOut.nHeights
241 241
242 242 #voltage
243 243 if self.dataOut.flagDataAsBlock:
244 244 """
245 245 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
246 246 """
247 247 data = self.dataOut.data[:,:, minIndex:maxIndex]
248 248 else:
249 249 data = self.dataOut.data[:, minIndex:maxIndex]
250 250
251 251 # firstHeight = self.dataOut.heightList[minIndex]
252 252
253 253 self.dataOut.data = data
254 254 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex]
255 255
256 256 if self.dataOut.nHeights <= 1:
257 257 raise ValueError("selectHeights: Too few heights. Current number of heights is %d" %(self.dataOut.nHeights))
258 258 elif self.dataOut.type == 'Spectra':
259 259 if (minIndex < 0) or (minIndex > maxIndex):
260 260 raise ValueError("Error selecting heights: Index range (%d,%d) is not valid" % (
261 261 minIndex, maxIndex))
262 262
263 263 if (maxIndex >= self.dataOut.nHeights):
264 264 maxIndex = self.dataOut.nHeights - 1
265 265
266 266 # Spectra
267 267 data_spc = self.dataOut.data_spc[:, :, minIndex:maxIndex + 1]
268 268
269 269 data_cspc = None
270 270 if self.dataOut.data_cspc is not None:
271 271 data_cspc = self.dataOut.data_cspc[:, :, minIndex:maxIndex + 1]
272 272
273 273 data_dc = None
274 274 if self.dataOut.data_dc is not None:
275 275 data_dc = self.dataOut.data_dc[:, minIndex:maxIndex + 1]
276 276
277 277 self.dataOut.data_spc = data_spc
278 278 self.dataOut.data_cspc = data_cspc
279 279 self.dataOut.data_dc = data_dc
280 280
281 281 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex + 1]
282 282
283 283 return 1
284 284
285 285
286 286 class filterByHeights(Operation):
287 287
288 288 def run(self, dataOut, window):
289 289
290 290 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
291 291
292 292 if window == None:
293 293 window = (dataOut.radarControllerHeaderObj.txA/dataOut.radarControllerHeaderObj.nBaud) / deltaHeight
294 294
295 295 newdelta = deltaHeight * window
296 296 r = dataOut.nHeights % window
297 297 newheights = (dataOut.nHeights-r)/window
298 298
299 299 if newheights <= 1:
300 300 raise ValueError("filterByHeights: Too few heights. Current number of heights is %d and window is %d" %(dataOut.nHeights, window))
301 301
302 302 if dataOut.flagDataAsBlock:
303 303 """
304 304 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
305 305 """
306 306 buffer = dataOut.data[:, :, 0:int(dataOut.nHeights-r)]
307 307 buffer = buffer.reshape(dataOut.nChannels, dataOut.nProfiles, int(dataOut.nHeights/window), window)
308 308 buffer = numpy.sum(buffer,3)
309 309
310 310 else:
311 311 buffer = dataOut.data[:,0:int(dataOut.nHeights-r)]
312 312 buffer = buffer.reshape(dataOut.nChannels,int(dataOut.nHeights/window),int(window))
313 313 buffer = numpy.sum(buffer,2)
314 314
315 315 dataOut.data = buffer
316 316 dataOut.heightList = dataOut.heightList[0] + numpy.arange( newheights )*newdelta
317 317 dataOut.windowOfFilter = window
318 318
319 319 return dataOut
320 320
321 321
322 322 class setH0(Operation):
323 323
324 324 def run(self, dataOut, h0, deltaHeight = None):
325 325
326 326 if not deltaHeight:
327 327 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
328 328
329 329 nHeights = dataOut.nHeights
330 330
331 331 newHeiRange = h0 + numpy.arange(nHeights)*deltaHeight
332 332
333 333 dataOut.heightList = newHeiRange
334 334
335 335 return dataOut
336 336
337 337
338 338 class deFlip(Operation):
339 339
340 340 def run(self, dataOut, channelList = []):
341 341
342 342 data = dataOut.data.copy()
343 343
344 344 if dataOut.flagDataAsBlock:
345 345 flip = self.flip
346 346 profileList = list(range(dataOut.nProfiles))
347 347
348 348 if not channelList:
349 349 for thisProfile in profileList:
350 350 data[:,thisProfile,:] = data[:,thisProfile,:]*flip
351 351 flip *= -1.0
352 352 else:
353 353 for thisChannel in channelList:
354 354 if thisChannel not in dataOut.channelList:
355 355 continue
356 356
357 357 for thisProfile in profileList:
358 358 data[thisChannel,thisProfile,:] = data[thisChannel,thisProfile,:]*flip
359 359 flip *= -1.0
360 360
361 361 self.flip = flip
362 362
363 363 else:
364 364 if not channelList:
365 365 data[:,:] = data[:,:]*self.flip
366 366 else:
367 367 for thisChannel in channelList:
368 368 if thisChannel not in dataOut.channelList:
369 369 continue
370 370
371 371 data[thisChannel,:] = data[thisChannel,:]*self.flip
372 372
373 373 self.flip *= -1.
374 374
375 375 dataOut.data = data
376 376
377 377 return dataOut
378 378
379 379
380 380 class setAttribute(Operation):
381 381 '''
382 382 Set an arbitrary attribute(s) to dataOut
383 383 '''
384 384
385 385 def __init__(self):
386 386
387 387 Operation.__init__(self)
388 388 self._ready = False
389 389
390 390 def run(self, dataOut, **kwargs):
391 391
392 392 for key, value in kwargs.items():
393 393 setattr(dataOut, key, value)
394 394
395 395 return dataOut
396 396
397 397
398 398 @MPDecorator
399 399 class printAttribute(Operation):
400 400 '''
401 401 Print an arbitrary attribute of dataOut
402 402 '''
403 403
404 404 def __init__(self):
405 405
406 406 Operation.__init__(self)
407 407
408 408 def run(self, dataOut, attributes):
409 409
410 410 if isinstance(attributes, str):
411 411 attributes = [attributes]
412 412 for attr in attributes:
413 413 if hasattr(dataOut, attr):
414 414 log.log(getattr(dataOut, attr), attr)
415 415
416 416
417 417 class interpolateHeights(Operation):
418 418
419 419 def run(self, dataOut, topLim, botLim):
420 420 #69 al 72 para julia
421 421 #82-84 para meteoros
422 422 if len(numpy.shape(dataOut.data))==2:
423 423 sampInterp = (dataOut.data[:,botLim-1] + dataOut.data[:,topLim+1])/2
424 424 sampInterp = numpy.transpose(numpy.tile(sampInterp,(topLim-botLim + 1,1)))
425 425 #dataOut.data[:,botLim:limSup+1] = sampInterp
426 426 dataOut.data[:,botLim:topLim+1] = sampInterp
427 427 else:
428 428 nHeights = dataOut.data.shape[2]
429 429 x = numpy.hstack((numpy.arange(botLim),numpy.arange(topLim+1,nHeights)))
430 430 y = dataOut.data[:,:,list(range(botLim))+list(range(topLim+1,nHeights))]
431 431 f = interpolate.interp1d(x, y, axis = 2)
432 432 xnew = numpy.arange(botLim,topLim+1)
433 433 ynew = f(xnew)
434 434 dataOut.data[:,:,botLim:topLim+1] = ynew
435 435
436 436 return dataOut
437 437
438 438
439 439 class CohInt(Operation):
440 440
441 441 isConfig = False
442 442 __profIndex = 0
443 443 __byTime = False
444 444 __initime = None
445 445 __lastdatatime = None
446 446 __integrationtime = None
447 447 __buffer = None
448 448 __bufferStride = []
449 449 __dataReady = False
450 450 __profIndexStride = 0
451 451 __dataToPutStride = False
452 452 n = None
453 453
454 454 def __init__(self, **kwargs):
455 455
456 456 Operation.__init__(self, **kwargs)
457 457
458 458 def setup(self, n=None, timeInterval=None, stride=None, overlapping=False, byblock=False):
459 459 """
460 460 Set the parameters of the integration class.
461 461
462 462 Inputs:
463 463
464 464 n : Number of coherent integrations
465 465 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
466 466 overlapping :
467 467 """
468 468
469 469 self.__initime = None
470 470 self.__lastdatatime = 0
471 471 self.__buffer = None
472 472 self.__dataReady = False
473 473 self.byblock = byblock
474 474 self.stride = stride
475 475
476 476 if n == None and timeInterval == None:
477 477 raise ValueError("n or timeInterval should be specified ...")
478 478
479 479 if n != None:
480 480 self.n = n
481 481 self.__byTime = False
482 482 else:
483 483 self.__integrationtime = timeInterval #* 60. #if (type(timeInterval)!=integer) -> change this line
484 484 self.n = 9999
485 485 self.__byTime = True
486 486
487 487 if overlapping:
488 488 self.__withOverlapping = True
489 489 self.__buffer = None
490 490 else:
491 491 self.__withOverlapping = False
492 492 self.__buffer = 0
493 493
494 494 self.__profIndex = 0
495 495
496 496 def putData(self, data):
497 497
498 498 """
499 499 Add a profile to the __buffer and increase in one the __profileIndex
500 500
501 501 """
502 502
503 503 if not self.__withOverlapping:
504 504 self.__buffer += data.copy()
505 505 self.__profIndex += 1
506 506 return
507 507
508 508 #Overlapping data
509 509 nChannels, nHeis = data.shape
510 510 data = numpy.reshape(data, (1, nChannels, nHeis))
511 511
512 512 #If the buffer is empty then it takes the data value
513 513 if self.__buffer is None:
514 514 self.__buffer = data
515 515 self.__profIndex += 1
516 516 return
517 517
518 518 #If the buffer length is lower than n then stakcing the data value
519 519 if self.__profIndex < self.n:
520 520 self.__buffer = numpy.vstack((self.__buffer, data))
521 521 self.__profIndex += 1
522 522 return
523 523
524 524 #If the buffer length is equal to n then replacing the last buffer value with the data value
525 525 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
526 526 self.__buffer[self.n-1] = data
527 527 self.__profIndex = self.n
528 528 return
529 529
530 530
531 531 def pushData(self):
532 532 """
533 533 Return the sum of the last profiles and the profiles used in the sum.
534 534
535 535 Affected:
536 536
537 537 self.__profileIndex
538 538
539 539 """
540 540
541 541 if not self.__withOverlapping:
542 542 data = self.__buffer
543 543 n = self.__profIndex
544 544
545 545 self.__buffer = 0
546 546 self.__profIndex = 0
547 547
548 548 return data, n
549 549
550 550 #Integration with Overlapping
551 551 data = numpy.sum(self.__buffer, axis=0)
552 552 # print data
553 553 # raise
554 554 n = self.__profIndex
555 555
556 556 return data, n
557 557
558 558 def byProfiles(self, data):
559 559
560 560 self.__dataReady = False
561 561 avgdata = None
562 562 # n = None
563 563 # print data
564 564 # raise
565 565 self.putData(data)
566 566
567 567 if self.__profIndex == self.n:
568 568 avgdata, n = self.pushData()
569 569 self.__dataReady = True
570 570
571 571 return avgdata
572 572
573 573 def byTime(self, data, datatime):
574 574
575 575 self.__dataReady = False
576 576 avgdata = None
577 577 n = None
578 578
579 579 self.putData(data)
580 580
581 581 if (datatime - self.__initime) >= self.__integrationtime:
582 582 avgdata, n = self.pushData()
583 583 self.n = n
584 584 self.__dataReady = True
585 585
586 586 return avgdata
587 587
588 588 def integrateByStride(self, data, datatime):
589 589 # print data
590 590 if self.__profIndex == 0:
591 591 self.__buffer = [[data.copy(), datatime]]
592 592 else:
593 593 self.__buffer.append([data.copy(),datatime])
594 594 self.__profIndex += 1
595 595 self.__dataReady = False
596 596
597 597 if self.__profIndex == self.n * self.stride :
598 598 self.__dataToPutStride = True
599 599 self.__profIndexStride = 0
600 600 self.__profIndex = 0
601 601 self.__bufferStride = []
602 602 for i in range(self.stride):
603 603 current = self.__buffer[i::self.stride]
604 604 data = numpy.sum([t[0] for t in current], axis=0)
605 605 avgdatatime = numpy.average([t[1] for t in current])
606 606 # print data
607 607 self.__bufferStride.append((data, avgdatatime))
608 608
609 609 if self.__dataToPutStride:
610 610 self.__dataReady = True
611 611 self.__profIndexStride += 1
612 612 if self.__profIndexStride == self.stride:
613 613 self.__dataToPutStride = False
614 614 # print self.__bufferStride[self.__profIndexStride - 1]
615 615 # raise
616 616 return self.__bufferStride[self.__profIndexStride - 1]
617 617
618 618
619 619 return None, None
620 620
621 621 def integrate(self, data, datatime=None):
622 622
623 623 if self.__initime == None:
624 624 self.__initime = datatime
625 625
626 626 if self.__byTime:
627 627 avgdata = self.byTime(data, datatime)
628 628 else:
629 629 avgdata = self.byProfiles(data)
630 630
631 631
632 632 self.__lastdatatime = datatime
633 633
634 634 if avgdata is None:
635 635 return None, None
636 636
637 637 avgdatatime = self.__initime
638 638
639 639 deltatime = datatime - self.__lastdatatime
640 640
641 641 if not self.__withOverlapping:
642 642 self.__initime = datatime
643 643 else:
644 644 self.__initime += deltatime
645 645
646 646 return avgdata, avgdatatime
647 647
648 648 def integrateByBlock(self, dataOut):
649 649
650 650 times = int(dataOut.data.shape[1]/self.n)
651 651 avgdata = numpy.zeros((dataOut.nChannels, times, dataOut.nHeights), dtype=numpy.complex)
652 652
653 653 id_min = 0
654 654 id_max = self.n
655 655
656 656 for i in range(times):
657 657 junk = dataOut.data[:,id_min:id_max,:]
658 658 avgdata[:,i,:] = junk.sum(axis=1)
659 659 id_min += self.n
660 660 id_max += self.n
661 661
662 662 timeInterval = dataOut.ippSeconds*self.n
663 663 avgdatatime = (times - 1) * timeInterval + dataOut.utctime
664 664 self.__dataReady = True
665 665 return avgdata, avgdatatime
666 666
667 667 def run(self, dataOut, n=None, timeInterval=None, stride=None, overlapping=False, byblock=False, **kwargs):
668 668
669 669 if not self.isConfig:
670 670 self.setup(n=n, stride=stride, timeInterval=timeInterval, overlapping=overlapping, byblock=byblock, **kwargs)
671 671 self.isConfig = True
672 672
673 673 if dataOut.flagDataAsBlock:
674 674 """
675 675 Si la data es leida por bloques, dimension = [nChannels, nProfiles, nHeis]
676 676 """
677 677 avgdata, avgdatatime = self.integrateByBlock(dataOut)
678 678 dataOut.nProfiles /= self.n
679 679 else:
680 680 if stride is None:
681 681 avgdata, avgdatatime = self.integrate(dataOut.data, dataOut.utctime)
682 682 else:
683 683 avgdata, avgdatatime = self.integrateByStride(dataOut.data, dataOut.utctime)
684 684
685 685
686 686 # dataOut.timeInterval *= n
687 687 dataOut.flagNoData = True
688 688
689 689 if self.__dataReady:
690 690 dataOut.data = avgdata
691 691 if not dataOut.flagCohInt:
692 692 dataOut.nCohInt *= self.n
693 693 dataOut.flagCohInt = True
694 694 dataOut.utctime = avgdatatime
695 695 # print avgdata, avgdatatime
696 696 # raise
697 697 # dataOut.timeInterval = dataOut.ippSeconds * dataOut.nCohInt
698 698 dataOut.flagNoData = False
699 699 return dataOut
700 700
701 701 class Decoder(Operation):
702 702
703 703 isConfig = False
704 704 __profIndex = 0
705 705
706 706 code = None
707 707
708 708 nCode = None
709 709 nBaud = None
710 710
711 711 def __init__(self, **kwargs):
712 712
713 713 Operation.__init__(self, **kwargs)
714 714
715 715 self.times = None
716 716 self.osamp = None
717 717 # self.__setValues = False
718 718 self.isConfig = False
719 719 self.setupReq = False
720 720 def setup(self, code, osamp, dataOut):
721 721
722 722 self.__profIndex = 0
723 723
724 724 self.code = code
725 725
726 726 self.nCode = len(code)
727 727 self.nBaud = len(code[0])
728 728 if (osamp != None) and (osamp >1):
729 729 self.osamp = osamp
730 730 self.code = numpy.repeat(code, repeats=self.osamp, axis=1)
731 731 self.nBaud = self.nBaud*self.osamp
732 732
733 733 self.__nChannels = dataOut.nChannels
734 734 self.__nProfiles = dataOut.nProfiles
735 735 self.__nHeis = dataOut.nHeights
736 736
737 737 if self.__nHeis < self.nBaud:
738 738 raise ValueError('Number of heights (%d) should be greater than number of bauds (%d)' %(self.__nHeis, self.nBaud))
739 739
740 740 #Frequency
741 741 __codeBuffer = numpy.zeros((self.nCode, self.__nHeis), dtype=numpy.complex)
742 742
743 743 __codeBuffer[:,0:self.nBaud] = self.code
744 744
745 745 self.fft_code = numpy.conj(numpy.fft.fft(__codeBuffer, axis=1))
746 746
747 747 if dataOut.flagDataAsBlock:
748 748
749 749 self.ndatadec = self.__nHeis #- self.nBaud + 1
750 750
751 751 self.datadecTime = numpy.zeros((self.__nChannels, self.__nProfiles, self.ndatadec), dtype=numpy.complex)
752 752
753 753 else:
754 754
755 755 #Time
756 756 self.ndatadec = self.__nHeis #- self.nBaud + 1
757 757
758 758 self.datadecTime = numpy.zeros((self.__nChannels, self.ndatadec), dtype=numpy.complex)
759 759
760 760 def __convolutionInFreq(self, data):
761 761
762 762 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
763 763
764 764 fft_data = numpy.fft.fft(data, axis=1)
765 765
766 766 conv = fft_data*fft_code
767 767
768 768 data = numpy.fft.ifft(conv,axis=1)
769 769
770 770 return data
771 771
772 772 def __convolutionInFreqOpt(self, data):
773 773
774 774 raise NotImplementedError
775 775
776 776 def __convolutionInTime(self, data):
777 777
778 778 code = self.code[self.__profIndex]
779 779 for i in range(self.__nChannels):
780 780 self.datadecTime[i,:] = numpy.correlate(data[i,:], code, mode='full')[self.nBaud-1:]
781 781
782 782 return self.datadecTime
783 783
784 784 def __convolutionByBlockInTime(self, data):
785 785
786 786 repetitions = int(self.__nProfiles / self.nCode)
787 787 junk = numpy.lib.stride_tricks.as_strided(self.code, (repetitions, self.code.size), (0, self.code.itemsize))
788 788 junk = junk.flatten()
789 789 code_block = numpy.reshape(junk, (self.nCode*repetitions, self.nBaud))
790 790 profilesList = range(self.__nProfiles)
791 791
792 792 for i in range(self.__nChannels):
793 793 for j in profilesList:
794 794 self.datadecTime[i,j,:] = numpy.correlate(data[i,j,:], code_block[j,:], mode='full')[self.nBaud-1:]
795 795 return self.datadecTime
796 796
797 797 def __convolutionByBlockInFreq(self, data):
798 798
799 799 raise NotImplementedError("Decoder by frequency fro Blocks not implemented")
800 800
801 801
802 802 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
803 803
804 804 fft_data = numpy.fft.fft(data, axis=2)
805 805
806 806 conv = fft_data*fft_code
807 807
808 808 data = numpy.fft.ifft(conv,axis=2)
809 809
810 810 return data
811 811
812 812
813 813 def run(self, dataOut, code=None, nCode=None, nBaud=None, mode = 0, osamp=None, times=None):
814 814
815 815 if dataOut.flagDecodeData:
816 816 print("This data is already decoded, recoding again ...")
817 817
818 818 if not self.isConfig:
819 819
820 820 if code is None:
821 821 if dataOut.code is None:
822 822 raise ValueError("Code could not be read from %s instance. Enter a value in Code parameter" %dataOut.type)
823 823
824 824 code = dataOut.code
825 825 else:
826 826 code = numpy.array(code).reshape(nCode,nBaud)
827 827 self.setup(code, osamp, dataOut)
828 828
829 829 self.isConfig = True
830 830
831 831 if mode == 3:
832 832 sys.stderr.write("Decoder Warning: mode=%d is not valid, using mode=0\n" %mode)
833 833
834 834 if times != None:
835 835 sys.stderr.write("Decoder Warning: Argument 'times' in not used anymore\n")
836 836
837 837 if self.code is None:
838 838 print("Fail decoding: Code is not defined.")
839 839 return
840 840
841 841 self.__nProfiles = dataOut.nProfiles
842 842 datadec = None
843 843
844 844 if mode == 3:
845 845 mode = 0
846 846
847 847 if dataOut.flagDataAsBlock:
848 848 """
849 849 Decoding when data have been read as block,
850 850 """
851 851
852 852 if mode == 0:
853 853 datadec = self.__convolutionByBlockInTime(dataOut.data)
854 854 if mode == 1:
855 855 datadec = self.__convolutionByBlockInFreq(dataOut.data)
856 856 else:
857 857 """
858 858 Decoding when data have been read profile by profile
859 859 """
860 860 if mode == 0:
861 861 datadec = self.__convolutionInTime(dataOut.data)
862 862
863 863 if mode == 1:
864 864 datadec = self.__convolutionInFreq(dataOut.data)
865 865
866 866 if mode == 2:
867 867 datadec = self.__convolutionInFreqOpt(dataOut.data)
868 868
869 869 if datadec is None:
870 870 raise ValueError("Codification mode selected is not valid: mode=%d. Try selecting 0 or 1" %mode)
871 871
872 872 dataOut.code = self.code
873 873 dataOut.nCode = self.nCode
874 874 dataOut.nBaud = self.nBaud
875 875
876 876 dataOut.data = datadec
877 877
878 878 dataOut.heightList = dataOut.heightList[0:datadec.shape[-1]]
879 879
880 880 dataOut.flagDecodeData = True #asumo q la data esta decodificada
881 881
882 882 if self.__profIndex == self.nCode-1:
883 883 self.__profIndex = 0
884 884 return dataOut
885 885
886 886 self.__profIndex += 1
887 887
888 888 return dataOut
889 889 # dataOut.flagDeflipData = True #asumo q la data no esta sin flip
890 890
891 891
892 892 class ProfileConcat(Operation):
893 893
894 894 isConfig = False
895 895 buffer = None
896 896
897 897 def __init__(self, **kwargs):
898 898
899 899 Operation.__init__(self, **kwargs)
900 900 self.profileIndex = 0
901 901
902 902 def reset(self):
903 903 self.buffer = numpy.zeros_like(self.buffer)
904 904 self.start_index = 0
905 905 self.times = 1
906 906
907 907 def setup(self, data, m, n=1):
908 908 self.buffer = numpy.zeros((data.shape[0],data.shape[1]*m),dtype=type(data[0,0]))
909 909 self.nHeights = data.shape[1]#.nHeights
910 910 self.start_index = 0
911 911 self.times = 1
912 912
913 913 def concat(self, data):
914 914
915 915 self.buffer[:,self.start_index:self.nHeights*self.times] = data.copy()
916 916 self.start_index = self.start_index + self.nHeights
917 917
918 918 def run(self, dataOut, m):
919 919 dataOut.flagNoData = True
920 920
921 921 if not self.isConfig:
922 922 self.setup(dataOut.data, m, 1)
923 923 self.isConfig = True
924 924
925 925 if dataOut.flagDataAsBlock:
926 926 raise ValueError("ProfileConcat can only be used when voltage have been read profile by profile, getBlock = False")
927 927
928 928 else:
929 929 self.concat(dataOut.data)
930 930 self.times += 1
931 931 if self.times > m:
932 932 dataOut.data = self.buffer
933 933 self.reset()
934 934 dataOut.flagNoData = False
935 935 # se deben actualizar mas propiedades del header y del objeto dataOut, por ejemplo, las alturas
936 936 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
937 937 xf = dataOut.heightList[0] + dataOut.nHeights * deltaHeight * m
938 938 dataOut.heightList = numpy.arange(dataOut.heightList[0], xf, deltaHeight)
939 939 dataOut.ippSeconds *= m
940 940 return dataOut
941 941
942 942 class ProfileSelector(Operation):
943 943
944 944 profileIndex = None
945 945 # Tamanho total de los perfiles
946 946 nProfiles = None
947 947
948 948 def __init__(self, **kwargs):
949 949
950 950 Operation.__init__(self, **kwargs)
951 951 self.profileIndex = 0
952 952
953 953 def incProfileIndex(self):
954 954
955 955 self.profileIndex += 1
956 956
957 957 if self.profileIndex >= self.nProfiles:
958 958 self.profileIndex = 0
959 959
960 960 def isThisProfileInRange(self, profileIndex, minIndex, maxIndex):
961 961
962 962 if profileIndex < minIndex:
963 963 return False
964 964
965 965 if profileIndex > maxIndex:
966 966 return False
967 967
968 968 return True
969 969
970 970 def isThisProfileInList(self, profileIndex, profileList):
971 971
972 972 if profileIndex not in profileList:
973 973 return False
974 974
975 975 return True
976 976
977 977 def run(self, dataOut, profileList=None, profileRangeList=None, beam=None, byblock=False, rangeList = None, nProfiles=None):
978 978
979 979 """
980 980 ProfileSelector:
981 981
982 982 Inputs:
983 983 profileList : Index of profiles selected. Example: profileList = (0,1,2,7,8)
984 984
985 985 profileRangeList : Minimum and maximum profile indexes. Example: profileRangeList = (4, 30)
986 986
987 987 rangeList : List of profile ranges. Example: rangeList = ((4, 30), (32, 64), (128, 256))
988 988
989 989 """
990 990
991 991 if rangeList is not None:
992 992 if type(rangeList[0]) not in (tuple, list):
993 993 rangeList = [rangeList]
994 994
995 995 dataOut.flagNoData = True
996 996
997 997 if dataOut.flagDataAsBlock:
998 998 """
999 999 data dimension = [nChannels, nProfiles, nHeis]
1000 1000 """
1001 1001 if profileList != None:
1002 1002 dataOut.data = dataOut.data[:,profileList,:]
1003 1003
1004 1004 if profileRangeList != None:
1005 1005 minIndex = profileRangeList[0]
1006 1006 maxIndex = profileRangeList[1]
1007 1007 profileList = list(range(minIndex, maxIndex+1))
1008 1008
1009 1009 dataOut.data = dataOut.data[:,minIndex:maxIndex+1,:]
1010 1010
1011 1011 if rangeList != None:
1012 1012
1013 1013 profileList = []
1014 1014
1015 1015 for thisRange in rangeList:
1016 1016 minIndex = thisRange[0]
1017 1017 maxIndex = thisRange[1]
1018 1018
1019 1019 profileList.extend(list(range(minIndex, maxIndex+1)))
1020 1020
1021 1021 dataOut.data = dataOut.data[:,profileList,:]
1022 1022
1023 1023 dataOut.nProfiles = len(profileList)
1024 1024 dataOut.profileIndex = dataOut.nProfiles - 1
1025 1025 dataOut.flagNoData = False
1026 1026
1027 1027 return dataOut
1028 1028
1029 1029 """
1030 1030 data dimension = [nChannels, nHeis]
1031 1031 """
1032 1032
1033 1033 if profileList != None:
1034 1034
1035 1035 if self.isThisProfileInList(dataOut.profileIndex, profileList):
1036 1036
1037 1037 self.nProfiles = len(profileList)
1038 1038 dataOut.nProfiles = self.nProfiles
1039 1039 dataOut.profileIndex = self.profileIndex
1040 1040 dataOut.flagNoData = False
1041 1041
1042 1042 self.incProfileIndex()
1043 1043 return dataOut
1044 1044
1045 1045 if profileRangeList != None:
1046 1046
1047 1047 minIndex = profileRangeList[0]
1048 1048 maxIndex = profileRangeList[1]
1049 1049
1050 1050 if self.isThisProfileInRange(dataOut.profileIndex, minIndex, maxIndex):
1051 1051
1052 1052 self.nProfiles = maxIndex - minIndex + 1
1053 1053 dataOut.nProfiles = self.nProfiles
1054 1054 dataOut.profileIndex = self.profileIndex
1055 1055 dataOut.flagNoData = False
1056 1056
1057 1057 self.incProfileIndex()
1058 1058 return dataOut
1059 1059
1060 1060 if rangeList != None:
1061 1061
1062 1062 nProfiles = 0
1063 1063
1064 1064 for thisRange in rangeList:
1065 1065 minIndex = thisRange[0]
1066 1066 maxIndex = thisRange[1]
1067 1067
1068 1068 nProfiles += maxIndex - minIndex + 1
1069 1069
1070 1070 for thisRange in rangeList:
1071 1071
1072 1072 minIndex = thisRange[0]
1073 1073 maxIndex = thisRange[1]
1074 1074
1075 1075 if self.isThisProfileInRange(dataOut.profileIndex, minIndex, maxIndex):
1076 1076
1077 1077 self.nProfiles = nProfiles
1078 1078 dataOut.nProfiles = self.nProfiles
1079 1079 dataOut.profileIndex = self.profileIndex
1080 1080 dataOut.flagNoData = False
1081 1081
1082 1082 self.incProfileIndex()
1083 1083
1084 1084 break
1085 1085
1086 1086 return dataOut
1087 1087
1088 1088
1089 1089 if beam != None: #beam is only for AMISR data
1090 1090 if self.isThisProfileInList(dataOut.profileIndex, dataOut.beamRangeDict[beam]):
1091 1091 dataOut.flagNoData = False
1092 1092 dataOut.profileIndex = self.profileIndex
1093 1093
1094 1094 self.incProfileIndex()
1095 1095
1096 1096 return dataOut
1097 1097
1098 1098 raise ValueError("ProfileSelector needs profileList, profileRangeList or rangeList parameter")
1099 1099
1100 1100
1101 1101 class Reshaper(Operation):
1102 1102
1103 1103 def __init__(self, **kwargs):
1104 1104
1105 1105 Operation.__init__(self, **kwargs)
1106 1106
1107 1107 self.__buffer = None
1108 1108 self.__nitems = 0
1109 1109
1110 1110 def __appendProfile(self, dataOut, nTxs):
1111 1111
1112 1112 if self.__buffer is None:
1113 1113 shape = (dataOut.nChannels, int(dataOut.nHeights/nTxs) )
1114 1114 self.__buffer = numpy.empty(shape, dtype = dataOut.data.dtype)
1115 1115
1116 1116 ini = dataOut.nHeights * self.__nitems
1117 1117 end = ini + dataOut.nHeights
1118 1118
1119 1119 self.__buffer[:, ini:end] = dataOut.data
1120 1120
1121 1121 self.__nitems += 1
1122 1122
1123 1123 return int(self.__nitems*nTxs)
1124 1124
1125 1125 def __getBuffer(self):
1126 1126
1127 1127 if self.__nitems == int(1./self.__nTxs):
1128 1128
1129 1129 self.__nitems = 0
1130 1130
1131 1131 return self.__buffer.copy()
1132 1132
1133 1133 return None
1134 1134
1135 1135 def __checkInputs(self, dataOut, shape, nTxs):
1136 1136
1137 1137 if shape is None and nTxs is None:
1138 1138 raise ValueError("Reshaper: shape of factor should be defined")
1139 1139
1140 1140 if nTxs:
1141 1141 if nTxs < 0:
1142 1142 raise ValueError("nTxs should be greater than 0")
1143 1143
1144 1144 if nTxs < 1 and dataOut.nProfiles % (1./nTxs) != 0:
1145 1145 raise ValueError("nProfiles= %d is not divisibled by (1./nTxs) = %f" %(dataOut.nProfiles, (1./nTxs)))
1146 1146
1147 1147 shape = [dataOut.nChannels, dataOut.nProfiles*nTxs, dataOut.nHeights/nTxs]
1148 1148
1149 1149 return shape, nTxs
1150 1150
1151 1151 if len(shape) != 2 and len(shape) != 3:
1152 1152 raise ValueError("shape dimension should be equal to 2 or 3. shape = (nProfiles, nHeis) or (nChannels, nProfiles, nHeis). Actually shape = (%d, %d, %d)" %(dataOut.nChannels, dataOut.nProfiles, dataOut.nHeights))
1153 1153
1154 1154 if len(shape) == 2:
1155 1155 shape_tuple = [dataOut.nChannels]
1156 1156 shape_tuple.extend(shape)
1157 1157 else:
1158 1158 shape_tuple = list(shape)
1159 1159
1160 1160 nTxs = 1.0*shape_tuple[1]/dataOut.nProfiles
1161 1161
1162 1162 return shape_tuple, nTxs
1163 1163
1164 1164 def run(self, dataOut, shape=None, nTxs=None):
1165 1165
1166 1166 shape_tuple, self.__nTxs = self.__checkInputs(dataOut, shape, nTxs)
1167 1167
1168 1168 dataOut.flagNoData = True
1169 1169 profileIndex = None
1170 1170
1171 1171 if dataOut.flagDataAsBlock:
1172 1172
1173 1173 dataOut.data = numpy.reshape(dataOut.data, shape_tuple)
1174 1174 dataOut.flagNoData = False
1175 1175
1176 1176 profileIndex = int(dataOut.nProfiles*self.__nTxs) - 1
1177 1177
1178 1178 else:
1179 1179
1180 1180 if self.__nTxs < 1:
1181 1181
1182 1182 self.__appendProfile(dataOut, self.__nTxs)
1183 1183 new_data = self.__getBuffer()
1184 1184
1185 1185 if new_data is not None:
1186 1186 dataOut.data = new_data
1187 1187 dataOut.flagNoData = False
1188 1188
1189 1189 profileIndex = dataOut.profileIndex*nTxs
1190 1190
1191 1191 else:
1192 1192 raise ValueError("nTxs should be greater than 0 and lower than 1, or use VoltageReader(..., getblock=True)")
1193 1193
1194 1194 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1195 1195
1196 1196 dataOut.heightList = numpy.arange(dataOut.nHeights/self.__nTxs) * deltaHeight + dataOut.heightList[0]
1197 1197
1198 1198 dataOut.nProfiles = int(dataOut.nProfiles*self.__nTxs)
1199 1199
1200 1200 dataOut.profileIndex = profileIndex
1201 1201
1202 1202 dataOut.ippSeconds /= self.__nTxs
1203 1203
1204 1204 return dataOut
1205 1205
1206 1206 class SplitProfiles(Operation):
1207 1207
1208 1208 def __init__(self, **kwargs):
1209 1209
1210 1210 Operation.__init__(self, **kwargs)
1211 1211
1212 1212 def run(self, dataOut, n):
1213 1213
1214 1214 dataOut.flagNoData = True
1215 1215 profileIndex = None
1216 1216
1217 1217 if dataOut.flagDataAsBlock:
1218 1218
1219 1219 #nchannels, nprofiles, nsamples
1220 1220 shape = dataOut.data.shape
1221 1221
1222 1222 if shape[2] % n != 0:
1223 1223 raise ValueError("Could not split the data, n=%d has to be multiple of %d" %(n, shape[2]))
1224 1224
1225 1225 new_shape = shape[0], shape[1]*n, int(shape[2]/n)
1226 1226
1227 1227 dataOut.data = numpy.reshape(dataOut.data, new_shape)
1228 1228 dataOut.flagNoData = False
1229 1229
1230 1230 profileIndex = int(dataOut.nProfiles/n) - 1
1231 1231
1232 1232 else:
1233 1233
1234 1234 raise ValueError("Could not split the data when is read Profile by Profile. Use VoltageReader(..., getblock=True)")
1235 1235
1236 1236 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1237 1237
1238 1238 dataOut.heightList = numpy.arange(dataOut.nHeights/n) * deltaHeight + dataOut.heightList[0]
1239 1239
1240 1240 dataOut.nProfiles = int(dataOut.nProfiles*n)
1241 1241
1242 1242 dataOut.profileIndex = profileIndex
1243 1243
1244 1244 dataOut.ippSeconds /= n
1245 1245
1246 1246 return dataOut
1247 1247
1248 1248 class CombineProfiles(Operation):
1249 1249 def __init__(self, **kwargs):
1250 1250
1251 1251 Operation.__init__(self, **kwargs)
1252 1252
1253 1253 self.__remData = None
1254 1254 self.__profileIndex = 0
1255 1255
1256 1256 def run(self, dataOut, n):
1257 1257
1258 1258 dataOut.flagNoData = True
1259 1259 profileIndex = None
1260 1260
1261 1261 if dataOut.flagDataAsBlock:
1262 1262
1263 1263 #nchannels, nprofiles, nsamples
1264 1264 shape = dataOut.data.shape
1265 1265 new_shape = shape[0], shape[1]/n, shape[2]*n
1266 1266
1267 1267 if shape[1] % n != 0:
1268 1268 raise ValueError("Could not split the data, n=%d has to be multiple of %d" %(n, shape[1]))
1269 1269
1270 1270 dataOut.data = numpy.reshape(dataOut.data, new_shape)
1271 1271 dataOut.flagNoData = False
1272 1272
1273 1273 profileIndex = int(dataOut.nProfiles*n) - 1
1274 1274
1275 1275 else:
1276 1276
1277 1277 #nchannels, nsamples
1278 1278 if self.__remData is None:
1279 1279 newData = dataOut.data
1280 1280 else:
1281 1281 newData = numpy.concatenate((self.__remData, dataOut.data), axis=1)
1282 1282
1283 1283 self.__profileIndex += 1
1284 1284
1285 1285 if self.__profileIndex < n:
1286 1286 self.__remData = newData
1287 1287 #continue
1288 1288 return
1289 1289
1290 1290 self.__profileIndex = 0
1291 1291 self.__remData = None
1292 1292
1293 1293 dataOut.data = newData
1294 1294 dataOut.flagNoData = False
1295 1295
1296 1296 profileIndex = dataOut.profileIndex/n
1297 1297
1298 1298
1299 1299 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1300 1300
1301 1301 dataOut.heightList = numpy.arange(dataOut.nHeights*n) * deltaHeight + dataOut.heightList[0]
1302 1302
1303 1303 dataOut.nProfiles = int(dataOut.nProfiles/n)
1304 1304
1305 1305 dataOut.profileIndex = profileIndex
1306 1306
1307 1307 dataOut.ippSeconds *= n
1308 1308
1309 1309 return dataOut
1310 1310
1311 1311 class PulsePairVoltage(Operation):
1312 1312 '''
1313 1313 Function PulsePair(Signal Power, Velocity)
1314 1314 The real component of Lag[0] provides Intensity Information
1315 1315 The imag component of Lag[1] Phase provides Velocity Information
1316 1316
1317 1317 Configuration Parameters:
1318 1318 nPRF = Number of Several PRF
1319 1319 theta = Degree Azimuth angel Boundaries
1320 1320
1321 1321 Input:
1322 1322 self.dataOut
1323 1323 lag[N]
1324 1324 Affected:
1325 1325 self.dataOut.spc
1326 1326 '''
1327 1327 isConfig = False
1328 1328 __profIndex = 0
1329 1329 __initime = None
1330 1330 __lastdatatime = None
1331 1331 __buffer = None
1332 1332 noise = None
1333 1333 __dataReady = False
1334 1334 n = None
1335 1335 __nch = 0
1336 1336 __nHeis = 0
1337 1337 removeDC = False
1338 1338 ipp = None
1339 1339 lambda_ = 0
1340 1340
1341 1341 def __init__(self,**kwargs):
1342 1342 Operation.__init__(self,**kwargs)
1343 1343
1344 1344 def setup(self, dataOut, n = None, removeDC=False):
1345 1345 '''
1346 1346 n= Numero de PRF's de entrada
1347 1347 '''
1348 1348 self.__initime = None
1349 1349 self.__lastdatatime = 0
1350 1350 self.__dataReady = False
1351 1351 self.__buffer = 0
1352 1352 self.__profIndex = 0
1353 1353 self.noise = None
1354 1354 self.__nch = dataOut.nChannels
1355 1355 self.__nHeis = dataOut.nHeights
1356 1356 self.removeDC = removeDC
1357 1357 self.lambda_ = 3.0e8/(9345.0e6)
1358 1358 self.ippSec = dataOut.ippSeconds
1359 1359 self.nCohInt = dataOut.nCohInt
1360 1360
1361 1361 if n == None:
1362 1362 raise ValueError("n should be specified.")
1363 1363
1364 1364 if n != None:
1365 1365 if n<2:
1366 1366 raise ValueError("n should be greater than 2")
1367 1367
1368 1368 self.n = n
1369 1369 self.__nProf = n
1370 1370
1371 1371 self.__buffer = numpy.zeros((dataOut.nChannels,
1372 1372 n,
1373 1373 dataOut.nHeights),
1374 1374 dtype='complex')
1375 1375
1376 1376 def putData(self,data):
1377 1377 '''
1378 1378 Add a profile to he __buffer and increase in one the __profiel Index
1379 1379 '''
1380 1380 self.__buffer[:,self.__profIndex,:]= data
1381 1381 self.__profIndex += 1
1382 1382 return
1383 1383
1384 1384 def pushData(self,dataOut):
1385 1385 '''
1386 1386 Return the PULSEPAIR and the profiles used in the operation
1387 1387 Affected : self.__profileIndex
1388 1388 '''
1389 1389 #----------------- Remove DC-----------------------------------
1390 1390 if self.removeDC==True:
1391 1391 mean = numpy.mean(self.__buffer,1)
1392 1392 tmp = mean.reshape(self.__nch,1,self.__nHeis)
1393 1393 dc= numpy.tile(tmp,[1,self.__nProf,1])
1394 1394 self.__buffer = self.__buffer - dc
1395 1395 #------------------Calculo de Potencia ------------------------
1396 1396 pair0 = self.__buffer*numpy.conj(self.__buffer)
1397 1397 pair0 = pair0.real
1398 1398 lag_0 = numpy.sum(pair0,1)
1399 1399 #------------------Calculo de Ruido x canal--------------------
1400 1400 self.noise = numpy.zeros(self.__nch)
1401 1401 for i in range(self.__nch):
1402 1402 daux = numpy.sort(pair0[i,:,:],axis= None)
1403 1403 self.noise[i]=hildebrand_sekhon( daux ,self.nCohInt)
1404 1404
1405 1405 self.noise = self.noise.reshape(self.__nch,1)
1406 1406 self.noise = numpy.tile(self.noise,[1,self.__nHeis])
1407 1407 noise_buffer = self.noise.reshape(self.__nch,1,self.__nHeis)
1408 1408 noise_buffer = numpy.tile(noise_buffer,[1,self.__nProf,1])
1409 1409 #------------------ Potencia recibida= P , Potencia senal = S , Ruido= N--
1410 1410 #------------------ P= S+N ,P=lag_0/N ---------------------------------
1411 1411 #-------------------- Power --------------------------------------------------
1412 1412 data_power = lag_0/(self.n*self.nCohInt)
1413 1413 #------------------ Senal ---------------------------------------------------
1414 1414 data_intensity = pair0 - noise_buffer
1415 1415 data_intensity = numpy.sum(data_intensity,axis=1)*(self.n*self.nCohInt)#*self.nCohInt)
1416 1416 #data_intensity = (lag_0-self.noise*self.n)*(self.n*self.nCohInt)
1417 1417 for i in range(self.__nch):
1418 1418 for j in range(self.__nHeis):
1419 1419 if data_intensity[i][j] < 0:
1420 1420 data_intensity[i][j] = numpy.min(numpy.absolute(data_intensity[i][j]))
1421 1421
1422 1422 #----------------- Calculo de Frecuencia y Velocidad doppler--------
1423 1423 pair1 = self.__buffer[:,:-1,:]*numpy.conjugate(self.__buffer[:,1:,:])
1424 1424 lag_1 = numpy.sum(pair1,1)
1425 1425 data_freq = (-1/(2.0*math.pi*self.ippSec*self.nCohInt))*numpy.angle(lag_1)
1426 1426 data_velocity = (self.lambda_/2.0)*data_freq
1427 1427
1428 1428 #---------------- Potencia promedio estimada de la Senal-----------
1429 1429 lag_0 = lag_0/self.n
1430 1430 S = lag_0-self.noise
1431 1431
1432 1432 #---------------- Frecuencia Doppler promedio ---------------------
1433 1433 lag_1 = lag_1/(self.n-1)
1434 1434 R1 = numpy.abs(lag_1)
1435 1435
1436 1436 #---------------- Calculo del SNR----------------------------------
1437 1437 data_snrPP = S/self.noise
1438 1438 for i in range(self.__nch):
1439 1439 for j in range(self.__nHeis):
1440 1440 if data_snrPP[i][j] < 1.e-20:
1441 1441 data_snrPP[i][j] = 1.e-20
1442 1442
1443 1443 #----------------- Calculo del ancho espectral ----------------------
1444 1444 L = S/R1
1445 1445 L = numpy.where(L<0,1,L)
1446 1446 L = numpy.log(L)
1447 1447 tmp = numpy.sqrt(numpy.absolute(L))
1448 1448 data_specwidth = (self.lambda_/(2*math.sqrt(2)*math.pi*self.ippSec*self.nCohInt))*tmp*numpy.sign(L)
1449 1449 n = self.__profIndex
1450 1450
1451 1451 self.__buffer = numpy.zeros((self.__nch, self.__nProf,self.__nHeis), dtype='complex')
1452 1452 self.__profIndex = 0
1453 1453 return data_power,data_intensity,data_velocity,data_snrPP,data_specwidth,n
1454 1454
1455 1455
1456 1456 def pulsePairbyProfiles(self,dataOut):
1457 1457
1458 1458 self.__dataReady = False
1459 1459 data_power = None
1460 1460 data_intensity = None
1461 1461 data_velocity = None
1462 1462 data_specwidth = None
1463 1463 data_snrPP = None
1464 1464 self.putData(data=dataOut.data)
1465 1465 if self.__profIndex == self.n:
1466 1466 data_power,data_intensity, data_velocity,data_snrPP,data_specwidth, n = self.pushData(dataOut=dataOut)
1467 1467 self.__dataReady = True
1468 1468
1469 1469 return data_power, data_intensity, data_velocity, data_snrPP, data_specwidth
1470 1470
1471 1471
1472 1472 def pulsePairOp(self, dataOut, datatime= None):
1473 1473
1474 1474 if self.__initime == None:
1475 1475 self.__initime = datatime
1476 1476 data_power, data_intensity, data_velocity, data_snrPP, data_specwidth = self.pulsePairbyProfiles(dataOut)
1477 1477 self.__lastdatatime = datatime
1478 1478
1479 1479 if data_power is None:
1480 1480 return None, None, None,None,None,None
1481 1481
1482 1482 avgdatatime = self.__initime
1483 1483 deltatime = datatime - self.__lastdatatime
1484 1484 self.__initime = datatime
1485 1485
1486 1486 return data_power, data_intensity, data_velocity, data_snrPP, data_specwidth, avgdatatime
1487 1487
1488 1488 def run(self, dataOut,n = None,removeDC= False, overlapping= False,**kwargs):
1489 1489
1490 1490 if not self.isConfig:
1491 1491 self.setup(dataOut = dataOut, n = n , removeDC=removeDC , **kwargs)
1492 1492 self.isConfig = True
1493 1493 data_power, data_intensity, data_velocity,data_snrPP,data_specwidth, avgdatatime = self.pulsePairOp(dataOut, dataOut.utctime)
1494 1494 dataOut.flagNoData = True
1495 1495
1496 1496 if self.__dataReady:
1497 1497 dataOut.nCohInt *= self.n
1498 1498 dataOut.dataPP_POW = data_intensity # S
1499 1499 dataOut.dataPP_POWER = data_power # P
1500 1500 dataOut.dataPP_DOP = data_velocity
1501 1501 dataOut.dataPP_SNR = data_snrPP
1502 1502 dataOut.dataPP_WIDTH = data_specwidth
1503 1503 dataOut.PRFbyAngle = self.n #numero de PRF*cada angulo rotado que equivale a un tiempo.
1504 1504 dataOut.utctime = avgdatatime
1505 1505 dataOut.flagNoData = False
1506 1506 return dataOut
1507 1507
1508 1508
1509 1509
1510 1510 # import collections
1511 1511 # from scipy.stats import mode
1512 1512 #
1513 1513 # class Synchronize(Operation):
1514 1514 #
1515 1515 # isConfig = False
1516 1516 # __profIndex = 0
1517 1517 #
1518 1518 # def __init__(self, **kwargs):
1519 1519 #
1520 1520 # Operation.__init__(self, **kwargs)
1521 1521 # # self.isConfig = False
1522 1522 # self.__powBuffer = None
1523 1523 # self.__startIndex = 0
1524 1524 # self.__pulseFound = False
1525 1525 #
1526 1526 # def __findTxPulse(self, dataOut, channel=0, pulse_with = None):
1527 1527 #
1528 1528 # #Read data
1529 1529 #
1530 1530 # powerdB = dataOut.getPower(channel = channel)
1531 1531 # noisedB = dataOut.getNoise(channel = channel)[0]
1532 1532 #
1533 1533 # self.__powBuffer.extend(powerdB.flatten())
1534 1534 #
1535 1535 # dataArray = numpy.array(self.__powBuffer)
1536 1536 #
1537 1537 # filteredPower = numpy.correlate(dataArray, dataArray[0:self.__nSamples], "same")
1538 1538 #
1539 1539 # maxValue = numpy.nanmax(filteredPower)
1540 1540 #
1541 1541 # if maxValue < noisedB + 10:
1542 1542 # #No se encuentra ningun pulso de transmision
1543 1543 # return None
1544 1544 #
1545 1545 # maxValuesIndex = numpy.where(filteredPower > maxValue - 0.1*abs(maxValue))[0]
1546 1546 #
1547 1547 # if len(maxValuesIndex) < 2:
1548 1548 # #Solo se encontro un solo pulso de transmision de un baudio, esperando por el siguiente TX
1549 1549 # return None
1550 1550 #
1551 1551 # phasedMaxValuesIndex = maxValuesIndex - self.__nSamples
1552 1552 #
1553 1553 # #Seleccionar solo valores con un espaciamiento de nSamples
1554 1554 # pulseIndex = numpy.intersect1d(maxValuesIndex, phasedMaxValuesIndex)
1555 1555 #
1556 1556 # if len(pulseIndex) < 2:
1557 1557 # #Solo se encontro un pulso de transmision con ancho mayor a 1
1558 1558 # return None
1559 1559 #
1560 1560 # spacing = pulseIndex[1:] - pulseIndex[:-1]
1561 1561 #
1562 1562 # #remover senales que se distancien menos de 10 unidades o muestras
1563 1563 # #(No deberian existir IPP menor a 10 unidades)
1564 1564 #
1565 1565 # realIndex = numpy.where(spacing > 10 )[0]
1566 1566 #
1567 1567 # if len(realIndex) < 2:
1568 1568 # #Solo se encontro un pulso de transmision con ancho mayor a 1
1569 1569 # return None
1570 1570 #
1571 1571 # #Eliminar pulsos anchos (deja solo la diferencia entre IPPs)
1572 1572 # realPulseIndex = pulseIndex[realIndex]
1573 1573 #
1574 1574 # period = mode(realPulseIndex[1:] - realPulseIndex[:-1])[0][0]
1575 1575 #
1576 1576 # print "IPP = %d samples" %period
1577 1577 #
1578 1578 # self.__newNSamples = dataOut.nHeights #int(period)
1579 1579 # self.__startIndex = int(realPulseIndex[0])
1580 1580 #
1581 1581 # return 1
1582 1582 #
1583 1583 #
1584 1584 # def setup(self, nSamples, nChannels, buffer_size = 4):
1585 1585 #
1586 1586 # self.__powBuffer = collections.deque(numpy.zeros( buffer_size*nSamples,dtype=numpy.float),
1587 1587 # maxlen = buffer_size*nSamples)
1588 1588 #
1589 1589 # bufferList = []
1590 1590 #
1591 1591 # for i in range(nChannels):
1592 1592 # bufferByChannel = collections.deque(numpy.zeros( buffer_size*nSamples, dtype=numpy.complex) + numpy.NAN,
1593 1593 # maxlen = buffer_size*nSamples)
1594 1594 #
1595 1595 # bufferList.append(bufferByChannel)
1596 1596 #
1597 1597 # self.__nSamples = nSamples
1598 1598 # self.__nChannels = nChannels
1599 1599 # self.__bufferList = bufferList
1600 1600 #
1601 1601 # def run(self, dataOut, channel = 0):
1602 1602 #
1603 1603 # if not self.isConfig:
1604 1604 # nSamples = dataOut.nHeights
1605 1605 # nChannels = dataOut.nChannels
1606 1606 # self.setup(nSamples, nChannels)
1607 1607 # self.isConfig = True
1608 1608 #
1609 1609 # #Append new data to internal buffer
1610 1610 # for thisChannel in range(self.__nChannels):
1611 1611 # bufferByChannel = self.__bufferList[thisChannel]
1612 1612 # bufferByChannel.extend(dataOut.data[thisChannel])
1613 1613 #
1614 1614 # if self.__pulseFound:
1615 1615 # self.__startIndex -= self.__nSamples
1616 1616 #
1617 1617 # #Finding Tx Pulse
1618 1618 # if not self.__pulseFound:
1619 1619 # indexFound = self.__findTxPulse(dataOut, channel)
1620 1620 #
1621 1621 # if indexFound == None:
1622 1622 # dataOut.flagNoData = True
1623 1623 # return
1624 1624 #
1625 1625 # self.__arrayBuffer = numpy.zeros((self.__nChannels, self.__newNSamples), dtype = numpy.complex)
1626 1626 # self.__pulseFound = True
1627 1627 # self.__startIndex = indexFound
1628 1628 #
1629 1629 # #If pulse was found ...
1630 1630 # for thisChannel in range(self.__nChannels):
1631 1631 # bufferByChannel = self.__bufferList[thisChannel]
1632 1632 # #print self.__startIndex
1633 1633 # x = numpy.array(bufferByChannel)
1634 1634 # self.__arrayBuffer[thisChannel] = x[self.__startIndex:self.__startIndex+self.__newNSamples]
1635 1635 #
1636 1636 # deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1637 1637 # dataOut.heightList = numpy.arange(self.__newNSamples)*deltaHeight
1638 1638 # # dataOut.ippSeconds = (self.__newNSamples / deltaHeight)/1e6
1639 1639 #
1640 1640 # dataOut.data = self.__arrayBuffer
1641 1641 #
1642 1642 # self.__startIndex += self.__newNSamples
1643 1643 #
1644 1644 # return
1645 1645 class SSheightProfiles(Operation):
1646 1646
1647 1647 step = None
1648 1648 nsamples = None
1649 1649 bufferShape = None
1650 1650 profileShape = None
1651 1651 sshProfiles = None
1652 1652 profileIndex = None
1653 1653
1654 1654 def __init__(self, **kwargs):
1655 1655
1656 1656 Operation.__init__(self, **kwargs)
1657 1657 self.isConfig = False
1658 1658
1659 1659 def setup(self,dataOut ,step = None , nsamples = None):
1660 1660
1661 1661 if step == None and nsamples == None:
1662 1662 raise ValueError("step or nheights should be specified ...")
1663 1663
1664 1664 self.step = step
1665 1665 self.nsamples = nsamples
1666 1666 self.__nChannels = dataOut.nChannels
1667 1667 self.__nProfiles = dataOut.nProfiles
1668 1668 self.__nHeis = dataOut.nHeights
1669 1669 shape = dataOut.data.shape #nchannels, nprofiles, nsamples
1670 1670
1671 1671 residue = (shape[1] - self.nsamples) % self.step
1672 1672 if residue != 0:
1673 1673 print("The residue is %d, step=%d should be multiple of %d to avoid loss of %d samples"%(residue,step,shape[1] - self.nsamples,residue))
1674 1674
1675 1675 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1676 1676 numberProfile = self.nsamples
1677 1677 numberSamples = (shape[1] - self.nsamples)/self.step
1678 1678
1679 1679 self.bufferShape = int(shape[0]), int(numberSamples), int(numberProfile) # nchannels, nsamples , nprofiles
1680 1680 self.profileShape = int(shape[0]), int(numberProfile), int(numberSamples) # nchannels, nprofiles, nsamples
1681 1681
1682 1682 self.buffer = numpy.zeros(self.bufferShape , dtype=numpy.complex)
1683 1683 self.sshProfiles = numpy.zeros(self.profileShape, dtype=numpy.complex)
1684 1684
1685 1685 def run(self, dataOut, step, nsamples, code = None, repeat = None):
1686 1686 dataOut.flagNoData = True
1687 1687
1688 1688 profileIndex = None
1689 1689 #print("nProfiles, nHeights ",dataOut.nProfiles, dataOut.nHeights)
1690 1690 #print(dataOut.getFreqRange(1)/1000.)
1691 1691 #exit(1)
1692 1692 if dataOut.flagDataAsBlock:
1693 1693 dataOut.data = numpy.average(dataOut.data,axis=1)
1694 1694 #print("jee")
1695 1695 dataOut.flagDataAsBlock = False
1696 1696 if not self.isConfig:
1697 1697 self.setup(dataOut, step=step , nsamples=nsamples)
1698 1698 #print("Setup done")
1699 1699 self.isConfig = True
1700 1700
1701 1701
1702 1702 if code is not None:
1703 1703 code = numpy.array(code)
1704 1704 code_block = code
1705 1705
1706 1706 if repeat is not None:
1707 1707 code_block = numpy.repeat(code_block, repeats=repeat, axis=1)
1708 1708 #print(code_block.shape)
1709 1709 for i in range(self.buffer.shape[1]):
1710 1710
1711 1711 if code is not None:
1712 1712 self.buffer[:,i] = dataOut.data[:,i*self.step:i*self.step + self.nsamples]*code_block
1713 1713
1714 1714 else:
1715 1715
1716 1716 self.buffer[:,i] = dataOut.data[:,i*self.step:i*self.step + self.nsamples]#*code[dataOut.profileIndex,:]
1717 1717
1718 1718 #self.buffer[:,j,self.__nHeis-j*self.step - self.nheights:self.__nHeis-j*self.step] = numpy.flip(dataOut.data[:,j*self.step:j*self.step + self.nheights])
1719 1719
1720 1720 for j in range(self.buffer.shape[0]):
1721 1721 self.sshProfiles[j] = numpy.transpose(self.buffer[j])
1722 1722
1723 1723 profileIndex = self.nsamples
1724 1724 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1725 1725 ippSeconds = (deltaHeight*1.0e-6)/(0.15)
1726 1726 #print("ippSeconds, dH: ",ippSeconds,deltaHeight)
1727 1727 try:
1728 1728 if dataOut.concat_m is not None:
1729 1729 ippSeconds= ippSeconds/float(dataOut.concat_m)
1730 1730 #print "Profile concat %d"%dataOut.concat_m
1731 1731 except:
1732 1732 pass
1733 1733
1734 1734 dataOut.data = self.sshProfiles
1735 1735 dataOut.flagNoData = False
1736 1736 dataOut.heightList = numpy.arange(self.buffer.shape[1]) *self.step*deltaHeight + dataOut.heightList[0]
1737 1737 dataOut.nProfiles = int(dataOut.nProfiles*self.nsamples)
1738 1738
1739 1739 dataOut.profileIndex = profileIndex
1740 1740 dataOut.flagDataAsBlock = True
1741 1741 dataOut.ippSeconds = ippSeconds
1742 1742 dataOut.step = self.step
1743 1743 #print(numpy.shape(dataOut.data))
1744 1744 #exit(1)
1745 1745 #print("new data shape and time:", dataOut.data.shape, dataOut.utctime)
1746 1746
1747 1747 return dataOut
1748 1748 ################################################################################3############################3
1749 1749 ################################################################################3############################3
1750 1750 ################################################################################3############################3
1751 1751 ################################################################################3############################3
1752 1752
1753 1753 class SSheightProfiles2(Operation):
1754 1754 '''
1755 1755 Procesa por perfiles y por bloques
1756 1756 '''
1757 1757
1758 1758
1759 1759 bufferShape = None
1760 1760 profileShape = None
1761 1761 sshProfiles = None
1762 1762 profileIndex = None
1763 1763 #nsamples = None
1764 1764 #step = None
1765 1765 #deltaHeight = None
1766 1766 #init_range = None
1767 1767 __slots__ = ('step', 'nsamples', 'deltaHeight', 'init_range', 'isConfig', '__nChannels',
1768 1768 '__nProfiles', '__nHeis', 'deltaHeight', 'new_nHeights')
1769 1769
1770 1770 def __init__(self, **kwargs):
1771 1771
1772 1772 Operation.__init__(self, **kwargs)
1773 1773 self.isConfig = False
1774 1774
1775 1775 def setup(self,dataOut ,step = None , nsamples = None):
1776 1776
1777 1777 if step == None and nsamples == None:
1778 1778 raise ValueError("step or nheights should be specified ...")
1779 1779
1780 1780 self.step = step
1781 1781 self.nsamples = nsamples
1782 1782 self.__nChannels = int(dataOut.nChannels)
1783 1783 self.__nProfiles = int(dataOut.nProfiles)
1784 1784 self.__nHeis = int(dataOut.nHeights)
1785 1785
1786 1786 residue = (self.__nHeis - self.nsamples) % self.step
1787 1787 if residue != 0:
1788 1788 print("The residue is %d, step=%d should be multiple of %d to avoid loss of %d samples"%(residue,step,shape[1] - self.nsamples,residue))
1789 1789
1790 1790 self.deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1791 1791 self.init_range = dataOut.heightList[0]
1792 1792 #numberProfile = self.nsamples
1793 1793 numberSamples = (self.__nHeis - self.nsamples)/self.step
1794 1794
1795 1795 self.new_nHeights = numberSamples
1796 1796
1797 1797 self.bufferShape = int(self.__nChannels), int(numberSamples), int(self.nsamples) # nchannels, nsamples , nprofiles
1798 1798 self.profileShape = int(self.__nChannels), int(self.nsamples), int(numberSamples) # nchannels, nprofiles, nsamples
1799 1799
1800 1800 self.buffer = numpy.zeros(self.bufferShape , dtype=numpy.complex)
1801 1801 self.sshProfiles = numpy.zeros(self.profileShape, dtype=numpy.complex)
1802 1802
1803 1803 def getNewProfiles(self, data, code=None, repeat=None):
1804 1804
1805 1805 if code is not None:
1806 1806 code = numpy.array(code)
1807 1807 code_block = code
1808 1808
1809 1809 if repeat is not None:
1810 1810 code_block = numpy.repeat(code_block, repeats=repeat, axis=1)
1811 1811 if data.ndim == 2:
1812 1812 data = data.reshape(1,1,self.__nHeis )
1813 1813 #print("buff, data, :",self.buffer.shape, data.shape,self.sshProfiles.shape)
1814 1814 for i in range(int(self.new_nHeights)): #nuevas alturas
1815 1815 if code is not None:
1816 1816 self.buffer[:,i,:] = data[:,:,i*self.step:i*self.step + self.nsamples]*code_block
1817 1817 else:
1818 1818 self.buffer[:,i,:] = data[:,:,i*self.step:i*self.step + self.nsamples]#*code[dataOut.profileIndex,:]
1819 1819
1820 1820 for j in range(self.__nChannels): #en los cananles
1821 1821 self.sshProfiles[j,:,:] = numpy.transpose(self.buffer[j,:,:])
1822 1822 #print("new profs Done")
1823 1823
1824 1824
1825 1825
1826 1826 def run(self, dataOut, step, nsamples, code = None, repeat = None):
1827 1827
1828 1828 if dataOut.flagNoData == True:
1829 1829 return dataOut
1830 1830 dataOut.flagNoData = True
1831 1831 #print("init data shape:", dataOut.data.shape)
1832 1832 #print("ch: {} prof: {} hs: {}".format(int(dataOut.nChannels),
1833 1833 # int(dataOut.nProfiles),int(dataOut.nHeights)))
1834 1834
1835 1835 profileIndex = None
1836 1836 # if not dataOut.flagDataAsBlock:
1837 1837 # dataOut.nProfiles = 1
1838 1838
1839 1839 if not self.isConfig:
1840 1840 self.setup(dataOut, step=step , nsamples=nsamples)
1841 1841 #print("Setup done")
1842 1842 self.isConfig = True
1843 1843
1844 1844 dataBlock = None
1845 1845
1846 1846 nprof = 1
1847 1847 if dataOut.flagDataAsBlock:
1848 1848 nprof = int(dataOut.nProfiles)
1849 1849
1850 1850 #print("dataOut nProfiles:", dataOut.nProfiles)
1851 1851 for profile in range(nprof):
1852 1852 if dataOut.flagDataAsBlock:
1853 1853 #print("read blocks")
1854 1854 self.getNewProfiles(dataOut.data[:,profile,:], code=code, repeat=repeat)
1855 1855 else:
1856 1856 #print("read profiles")
1857 1857 self.getNewProfiles(dataOut.data, code=code, repeat=repeat) #only one channe
1858 1858 if profile == 0:
1859 1859 dataBlock = self.sshProfiles.copy()
1860 1860 else: #by blocks
1861 1861 dataBlock = numpy.concatenate((dataBlock,self.sshProfiles), axis=1) #profile axis
1862 1862 #print("by blocks: ",dataBlock.shape, self.sshProfiles.shape)
1863 1863
1864 1864 profileIndex = self.nsamples
1865 1865 #deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1866 1866 ippSeconds = (self.deltaHeight*1.0e-6)/(0.15)
1867 1867
1868 1868
1869 1869 dataOut.data = dataBlock
1870 1870 #print("show me: ",self.step,self.deltaHeight, dataOut.heightList, self.new_nHeights)
1871 1871 dataOut.heightList = numpy.arange(int(self.new_nHeights)) *self.step*self.deltaHeight + self.init_range
1872 1872
1873 1873 dataOut.ippSeconds = ippSeconds
1874 1874 dataOut.step = self.step
1875 1875 dataOut.flagNoData = False
1876 1876 if dataOut.flagDataAsBlock:
1877 1877 dataOut.nProfiles = int(dataOut.nProfiles*self.nsamples)
1878 1878
1879 1879 else:
1880 1880 dataOut.nProfiles = int(self.nsamples)
1881 1881 dataOut.profileIndex = dataOut.nProfiles
1882 1882 dataOut.flagDataAsBlock = True
1883 1883
1884 1884 dataBlock = None
1885 1885
1886 1886 #print("new data shape:", dataOut.data.shape, dataOut.utctime)
1887 1887
1888 1888 return dataOut
1889 1889
1890 1890
1891 1891
1892 1892
1893 1893 #import skimage.color
1894 1894 #import skimage.io
1895 1895 #import matplotlib.pyplot as plt
1896 1896
1897 1897 class removeProfileByFaradayHS(Operation):
1898 1898 '''
1899 1899
1900 1900 '''
1901 #isConfig = False
1902 #n = None
1903 1901
1904 #__dataReady = False
1905 1902 __buffer_data = []
1906 1903 __buffer_times = []
1907 #__initime = None
1908 #__count_exec = 0
1909 #__profIndex = 0
1910 buffer = None
1911 #lenProfileOut = 1
1912 1904
1913 #init_prof = 0
1914 #end_prof = 0
1905 buffer = None
1915 1906
1916 #first_utcBlock = None
1917 1907 outliers_IDs_list = []
1918 #__dh = 0
1908
1919 1909
1920 1910 __slots__ = ('n','navg','profileMargin','thHistOutlier','minHei_idx','maxHei_idx','nHeights',
1921 1911 '__dh','first_utcBlock','__profIndex','init_prof','end_prof','lenProfileOut','nChannels',
1922 1912 '__count_exec','__initime','__dataReady','__ipp')
1923 1913 def __init__(self, **kwargs):
1924 1914
1925 1915 Operation.__init__(self, **kwargs)
1926 1916 self.isConfig = False
1927 1917
1928 1918 def setup(self,dataOut, n=None , navg=0.8, profileMargin=50,thHistOutlier=3, minHei=None, maxHei=None):
1929 1919
1930 1920 if n == None and timeInterval == None:
1931 1921 raise ValueError("nprofiles or timeInterval should be specified ...")
1932 1922
1933 1923 if n != None:
1934 1924 self.n = n
1935 1925
1936 1926 self.navg = navg
1937 1927 self.profileMargin = profileMargin
1938 1928 self.thHistOutlier = thHistOutlier
1939 1929 self.__profIndex = 0
1940 1930 self.buffer = None
1941 1931 self._ipp = dataOut.ippSeconds
1942 1932 self.n_prof_released = 0
1943 1933 self.heightList = dataOut.heightList
1944 1934 self.init_prof = 0
1945 1935 self.end_prof = 0
1946 1936 self.__count_exec = 0
1947 1937 self.__profIndex = 0
1948 1938 self.first_utcBlock = None
1949 1939 self.__dh = dataOut.heightList[1] - dataOut.heightList[0]
1950 1940 minHei = minHei
1951 1941 maxHei = maxHei
1952 1942 if minHei==None :
1953 1943 minHei = dataOut.heightList[0]
1954 1944 if maxHei==None :
1955 1945 maxHei = dataOut.heightList[-1]
1956 1946 self.minHei_idx,self.maxHei_idx = getHei_index(minHei, maxHei, dataOut.heightList)
1957 1947
1958 1948 self.nChannels = dataOut.nChannels
1959 1949 self.nHeights = dataOut.nHeights
1960 1950
1961 1951 def filterSatsProfiles(self):
1962 1952 data = self.__buffer_data
1963 1953 #print(data.shape)
1964 1954 nChannels, profiles, heights = data.shape
1965 1955 indexes=[]
1966 1956 outliers_IDs=[]
1967 1957 for c in range(nChannels):
1968 1958 for h in range(self.minHei_idx, self.maxHei_idx):
1969 1959 power = data[c,:,h] * numpy.conjugate(data[c,:,h])
1970 1960 power = power.real
1971 1961 #power = (numpy.abs(data[c,:,h].real))
1972 1962 sortdata = numpy.sort(power, axis=None)
1973 1963 sortID=power.argsort()
1974 1964 index = _noise.hildebrand_sekhon2(sortdata,self.navg) #0.75-> buen valor
1975 1965
1976 1966 indexes.append(index)
1977 1967 outliers_IDs=numpy.append(outliers_IDs,sortID[index:])
1978 1968 # print(outliers_IDs)
1979 1969 # fig,ax = plt.subplots()
1980 1970 # #ax.set_title(str(k)+" "+str(j))
1981 1971 # x=range(len(sortdata))
1982 1972 # ax.scatter(x,sortdata)
1983 1973 # ax.axvline(index)
1984 1974 # plt.grid()
1985 1975 # plt.show()
1986 1976
1977
1987 1978 outliers_IDs = outliers_IDs.astype(numpy.dtype('int64'))
1988 1979 outliers_IDs = numpy.unique(outliers_IDs)
1989 1980 outs_lines = numpy.sort(outliers_IDs)
1990 1981 # #print("outliers Ids: ", outs_lines, outs_lines.shape)
1991 1982 #hist, bin_edges = numpy.histogram(outs_lines, bins=10, density=True)
1992 1983
1993 1984
1994 1985 #Agrupando el histograma de outliers,
1986 #my_bins = numpy.linspace(0,int(profiles), int(profiles/100), endpoint=False)
1995 1987 my_bins = numpy.linspace(0,9600, 96, endpoint=False)
1996 1988
1997
1998 1989 hist, bins = numpy.histogram(outs_lines,bins=my_bins)
1999 1990 hist_outliers_indexes = numpy.where(hist > self.thHistOutlier) #es outlier
2000 1991 #print(hist_outliers_indexes[0])
2001 1992 bins_outliers_indexes = [int(i) for i in bins[hist_outliers_indexes]] #
2002 1993 #print(bins_outliers_indexes)
2003 1994 outlier_loc_index = []
2004 1995
2005 #outlier_loc_index = [k for k in range(bins_outliers_indexes[n]-50,bins_outliers_indexes[n+1]+50) for n in range(len(bins_outliers_indexes)-1) ]
2006 for n in range(len(bins_outliers_indexes)-1):
2007 #outlier_loc_index = [k for k in range(bins_outliers_indexes[n]-50,bins_outliers_indexes[n+1]+50)]
2008 for k in range(bins_outliers_indexes[n]-self.profileMargin,bins_outliers_indexes[n+1]+self.profileMargin):
2009 outlier_loc_index.append(k)
1996
1997 # for n in range(len(bins_outliers_indexes)-1):
1998 # for k in range(bins_outliers_indexes[n]-self.profileMargin,bins_outliers_indexes[n+1]+self.profileMargin):
1999 # outlier_loc_index.append(k)
2000
2001 outlier_loc_index = [e for n in range(len(bins_outliers_indexes)-1) for e in range(bins_outliers_indexes[n]-self.profileMargin,bins_outliers_indexes[n+1]+self.profileMargin) ]
2010 2002
2011 2003 outlier_loc_index = numpy.asarray(outlier_loc_index)
2012 #print(numpy.unique(outlier_loc_index))
2004 #print(len(numpy.unique(outlier_loc_index)), numpy.unique(outlier_loc_index))
2013 2005
2014 2006
2015 2007
2016 2008 # x, y = numpy.meshgrid(numpy.arange(profiles), self.heightList)
2017 2009 # fig, ax = plt.subplots(1,2,figsize=(8, 6))
2018 2010 #
2019 2011 # dat = data[0,:,:].real
2020 2012 # m = numpy.nanmean(dat)
2021 2013 # o = numpy.nanstd(dat)
2022 2014 # #print(m, o, x.shape, y.shape)
2023 2015 # c = ax[0].pcolormesh(x, y, dat.T, cmap ='YlGnBu', vmin = (m-2*o), vmax = (m+2*o))
2024 2016 # ax[0].vlines(outs_lines,200,600, linestyles='dashed', label = 'outs', color='w')
2025 2017 # fig.colorbar(c)
2026 2018 # ax[0].vlines(outlier_loc_index,650,750, linestyles='dashed', label = 'outs', color='r')
2027 2019 # ax[1].hist(outs_lines,bins=my_bins)
2028 2020 # plt.show()
2029 2021
2030 2022
2031 2023 self.outliers_IDs_list = numpy.unique(outlier_loc_index)
2032 2024 return data
2033 2025
2034 2026 def cleanOutliersByBlock(self):
2035 2027 #print(self.__buffer_data[0].shape)
2036 2028 data = self.__buffer_data#.copy()
2037 2029 #print("cleaning shape inpt: ",data.shape)
2038 2030 '''
2039 2031 self.__buffer_data = []
2040 2032
2041 2033 spectrum = numpy.fft.fft2(data, axes=(0,2))
2042 2034 #print("spc : ",spectrum.shape)
2043 2035 (nch,nsamples, nh) = spectrum.shape
2044 2036 data2 = None
2045 2037 #print(data.shape)
2046 2038 spectrum2 = spectrum.copy()
2047 2039 for ch in range(nch):
2048 2040 dh = self.__dh
2049 2041 dt1 = (dh*1.0e-6)/(0.15)
2050 2042 dt2 = self.__buffer_times[1]-self.__buffer_times[0]
2051 2043 freqv = numpy.fft.fftfreq(nh, d=dt1)
2052 2044 freqh = numpy.fft.fftfreq(self.n, d=dt2)
2053 2045 #print("spc loop: ")
2054 2046
2055 2047
2056 2048
2057 2049 x, y = numpy.meshgrid(numpy.sort(freqh),numpy.sort(freqv))
2058 2050 z = numpy.abs(spectrum[ch,:,:])
2059 2051 # Find all peaks higher than the 98th percentile
2060 2052 peaks = z < numpy.percentile(z, 98)
2061 2053 #print(peaks)
2062 2054 # Set those peak coefficients to zero
2063 2055 spectrum2 = spectrum2 * peaks.astype(int)
2064 2056 data2 = numpy.fft.ifft2(spectrum2)
2065 2057
2066 2058 dat = numpy.log10(z.T)
2067 2059 dat2 = numpy.log10(spectrum2.T)
2068 2060
2069 2061 # m = numpy.mean(dat)
2070 2062 # o = numpy.std(dat)
2071 2063 # fig, ax = plt.subplots(2,1,figsize=(8, 6))
2072 2064 #
2073 2065 # c = ax[0].pcolormesh(x, y, dat, cmap ='YlGnBu', vmin = (m-2*o), vmax = (m+2*o))
2074 2066 # #c = ax.pcolor( z.T , cmap ='gray', vmin = (m-2*o), vmax = (m+2*o))
2075 2067 # date_time = datetime.datetime.fromtimestamp(self.__buffer_times[0]).strftime('%Y-%m-%d %H:%M:%S.%f')
2076 2068 # #strftime('%Y-%m-%d %H:%M:%S')
2077 2069 # ax[0].set_title('Spectrum magnitude '+date_time)
2078 2070 # fig.canvas.set_window_title('Spectrum magnitude {} '.format(self.n)+date_time)
2079 2071 #
2080 2072 #
2081 2073 # c = ax[1].pcolormesh(x, y, dat, cmap ='YlGnBu', vmin = (m-2*o), vmax = (m+2*o))
2082 2074 # fig.colorbar(c)
2083 2075 # plt.show()
2084 2076
2085 2077 #print(data2.shape)
2086 2078
2087 2079 data = data2
2088 2080
2089 2081 #cleanBlock = numpy.fft.ifft2(spectrum, axes=(0,2)).reshape()
2090 2082 '''
2091 2083 #print("cleanOutliersByBlock Done")
2092 2084
2093 2085 return self.filterSatsProfiles()
2094 2086
2095 2087
2096 2088
2097 2089 def fillBuffer(self, data, datatime):
2098 2090
2099 2091 if self.__profIndex == 0:
2100 2092 self.__buffer_data = data.copy()
2101 2093
2102 2094 else:
2103 2095 self.__buffer_data = numpy.concatenate((self.__buffer_data,data), axis=1)#en perfiles
2104 2096 self.__profIndex += 1
2105 2097 #self.__buffer_times.append(datatime)
2106 2098
2107 2099 def getData(self, data, datatime=None):
2108 2100
2109 2101 if self.__profIndex == 0:
2110 2102 self.__initime = datatime
2111 2103
2112 2104
2113 2105 self.__dataReady = False
2114 2106
2115 2107 self.fillBuffer(data, datatime)
2116 2108 dataBlock = None
2117 2109
2118 2110 if self.__profIndex == self.n:
2119 2111 #print("apnd : ",data)
2120 2112 #dataBlock = self.cleanOutliersByBlock()
2121 2113 dataBlock = self.filterSatsProfiles()
2122 2114 self.__dataReady = True
2123 2115
2124 2116 return dataBlock
2125 2117
2126 2118 if dataBlock is None:
2127 2119 return None, None
2128 2120
2129 2121
2130 2122
2131 2123 return dataBlock
2132 2124
2133 2125 def releaseBlock(self):
2134 2126
2135 2127 if self.n % self.lenProfileOut != 0:
2136 2128 raise ValueError("lenProfileOut %d must be submultiple of nProfiles %d" %(self.lenProfileOut, self.n))
2137 2129 return None
2138 2130
2139 2131 data = self.buffer[:,self.init_prof:self.end_prof:,:] #ch, prof, alt
2140 2132
2141 2133 self.init_prof = self.end_prof
2142 2134 self.end_prof += self.lenProfileOut
2143 2135 #print("data release shape: ",dataOut.data.shape, self.end_prof)
2144 2136 self.n_prof_released += 1
2145 2137
2146 2138
2147 2139 #print("f_no_data ", dataOut.flagNoData)
2148 2140 return data
2149 2141
2150 2142 def run(self, dataOut, n=None, navg=0.8, nProfilesOut=1, profile_margin=50,th_hist_outlier=3,minHei=None, maxHei=None):
2151 2143 #print("run op buffer 2D",dataOut.ippSeconds)
2152 2144 # self.nChannels = dataOut.nChannels
2153 2145 # self.nHeights = dataOut.nHeights
2154 2146
2155 2147 if not self.isConfig:
2156 2148 #print("init p idx: ", dataOut.profileIndex )
2157 2149 self.setup(dataOut,n=n, navg=navg,profileMargin=profile_margin,
2158 2150 thHistOutlier=th_hist_outlier,minHei=minHei, maxHei=maxHei)
2159 2151 self.isConfig = True
2160 2152
2161 2153 dataBlock = None
2162 2154
2163 2155 if not dataOut.buffer_empty: #hay datos acumulados
2164 2156
2165 2157 if self.init_prof == 0:
2166 2158 self.n_prof_released = 0
2167 2159 self.lenProfileOut = nProfilesOut
2168 2160 dataOut.flagNoData = False
2169 2161 #print("tp 2 ",dataOut.data.shape)
2170 2162
2171 2163 self.init_prof = 0
2172 2164 self.end_prof = self.lenProfileOut
2173 2165
2174 2166 dataOut.nProfiles = self.lenProfileOut
2175 2167 if nProfilesOut == 1:
2176 2168 dataOut.flagDataAsBlock = False
2177 2169 else:
2178 2170 dataOut.flagDataAsBlock = True
2179 2171 #print("prof: ",self.init_prof)
2180 2172 dataOut.flagNoData = False
2181 2173 if numpy.isin(self.n_prof_released, self.outliers_IDs_list):
2182 2174 #print("omitting: ", self.n_prof_released)
2183 2175 dataOut.flagNoData = True
2184 2176 dataOut.ippSeconds = self._ipp
2185 2177 dataOut.utctime = self.first_utcBlock + self.init_prof*self._ipp
2186 2178 # print("time: ", dataOut.utctime, self.first_utcBlock, self.init_prof,self._ipp,dataOut.ippSeconds)
2187 2179 #dataOut.data = self.releaseBlock()
2188 2180 #########################################################3
2189 2181 if self.n % self.lenProfileOut != 0:
2190 2182 raise ValueError("lenProfileOut %d must be submultiple of nProfiles %d" %(self.lenProfileOut, self.n))
2191 2183 return None
2192 2184
2193 2185 dataOut.data = self.buffer[:,self.init_prof:self.end_prof:,:] #ch, prof, alt
2194 2186
2195 2187 self.init_prof = self.end_prof
2196 2188 self.end_prof += self.lenProfileOut
2197 2189 #print("data release shape: ",dataOut.data.shape, self.end_prof)
2198 2190 self.n_prof_released += 1
2199 2191
2200 2192 if self.end_prof >= (self.n +self.lenProfileOut):
2201 2193
2202 2194 self.init_prof = 0
2203 2195 self.__profIndex = 0
2204 2196 self.buffer = None
2205 2197 dataOut.buffer_empty = True
2206 2198 self.outliers_IDs_list = []
2207 2199 self.n_prof_released = 0
2208 2200 dataOut.flagNoData = False #enviar ultimo aunque sea outlier :(
2209 2201 #print("cleaning...", dataOut.buffer_empty)
2210 2202 dataOut.profileIndex = 0 #self.lenProfileOut
2211 2203 ####################################################################
2212 2204 return dataOut
2213 2205
2214 2206
2215 2207 #print("tp 223 ",dataOut.data.shape)
2216 2208 dataOut.flagNoData = True
2217 2209
2218 2210
2219 2211
2220 2212 try:
2221 2213 #dataBlock = self.getData(dataOut.data.reshape(self.nChannels,1,self.nHeights), dataOut.utctime)
2222 2214 dataBlock = self.getData(numpy.reshape(dataOut.data,(self.nChannels,1,self.nHeights)), dataOut.utctime)
2223 2215 self.__count_exec +=1
2224 2216 except Exception as e:
2225 2217 print("Error getting profiles data",self.__count_exec )
2226 2218 print(e)
2227 2219 sys.exit()
2228 2220
2229 2221 if self.__dataReady:
2230 2222 #print("omitting: ", len(self.outliers_IDs_list))
2231 2223 self.__count_exec = 0
2232 2224 #dataOut.data =
2233 2225 #self.buffer = numpy.flip(dataBlock, axis=1)
2234 2226 self.buffer = dataBlock
2235 2227 self.first_utcBlock = self.__initime
2236 2228 dataOut.utctime = self.__initime
2237 2229 dataOut.nProfiles = self.__profIndex
2238 2230 #dataOut.flagNoData = False
2239 2231 self.init_prof = 0
2240 2232 self.__profIndex = 0
2241 2233 self.__initime = None
2242 2234 dataBlock = None
2243 2235 self.__buffer_times = []
2244 2236 dataOut.error = False
2245 2237 dataOut.useInputBuffer = True
2246 2238 dataOut.buffer_empty = False
2247 2239 #print("1 ch: {} prof: {} hs: {}".format(int(dataOut.nChannels),int(dataOut.nProfiles),int(dataOut.nHeights)))
2248 2240
2249 2241
2250 2242
2251 2243 #print(self.__count_exec)
2252 2244
2253 2245 return dataOut
2254 2246
2255 2247 class RemoveProfileSats(Operation):
2256 2248 '''
2257 2249 Omite los perfiles contaminados con seΓ±al de satelites,
2258 2250 In: minHei = min_sat_range
2259 2251 max_sat_range
2260 2252 min_hei_ref
2261 2253 max_hei_ref
2262 2254 th = diference between profiles mean, ref and sats
2263 2255 Out:
2264 2256 profile clean
2265 2257 '''
2266 2258
2267 2259 isConfig = False
2268 2260 min_sats = 0
2269 2261 max_sats = 999999999
2270 2262 min_ref= 0
2271 2263 max_ref= 9999999999
2272 2264 needReshape = False
2273 2265 count = 0
2274 2266 thdB = 0
2275 2267 byRanges = False
2276 2268 min_sats = None
2277 2269 max_sats = None
2278 2270 noise = 0
2279 2271
2280 2272 def __init__(self, **kwargs):
2281 2273
2282 2274 Operation.__init__(self, **kwargs)
2283 2275 self.isConfig = False
2284 2276
2285 2277
2286 2278 def setup(self, dataOut, minHei, maxHei, minRef, maxRef, th, thdB, rangeHeiList):
2287 2279
2288 2280 if rangeHeiList!=None:
2289 2281 self.byRanges = True
2290 2282 else:
2291 2283 if minHei==None or maxHei==None :
2292 2284 raise ValueError("Parameters heights are required")
2293 2285 if minRef==None or maxRef==None:
2294 2286 raise ValueError("Parameters heights are required")
2295 2287
2296 2288 if self.byRanges:
2297 2289 self.min_sats = []
2298 2290 self.max_sats = []
2299 2291 for min,max in rangeHeiList:
2300 2292 a,b = getHei_index(min, max, dataOut.heightList)
2301 2293 self.min_sats.append(a)
2302 2294 self.max_sats.append(b)
2303 2295 else:
2304 2296 self.min_sats, self.max_sats = getHei_index(minHei, maxHei, dataOut.heightList)
2305 2297 self.min_ref, self.max_ref = getHei_index(minRef, maxRef, dataOut.heightList)
2306 2298 self.th = th
2307 2299 self.thdB = thdB
2308 2300 self.isConfig = True
2309 2301
2310 2302
2311 2303 def compareRanges(self,data, minHei,maxHei):
2312 2304
2313 2305 # ref = data[0,self.min_ref:self.max_ref] * numpy.conjugate(data[0,self.min_ref:self.max_ref])
2314 2306 # p_ref = 10*numpy.log10(ref.real)
2315 2307 # m_ref = numpy.mean(p_ref)
2316 2308
2317 2309 m_ref = self.noise
2318 2310
2319 2311 sats = data[0,minHei:maxHei] * numpy.conjugate(data[0,minHei:maxHei])
2320 2312 p_sats = 10*numpy.log10(sats.real)
2321 2313 m_sats = numpy.mean(p_sats)
2322 2314
2323 2315 if m_sats > (m_ref + self.th): #and (m_sats > self.thdB):
2324 2316 #print("msats: ",m_sats," \tmRef: ", m_ref, "\t",(m_sats - m_ref))
2325 2317 #print("Removing profiles...")
2326 2318 return False
2327 2319
2328 2320 return True
2329 2321
2330 2322 def isProfileClean(self, data):
2331 2323 '''
2332 2324 Analiza solo 1 canal, y descarta todos...
2333 2325 '''
2334 2326
2335 2327 clean = True
2336 2328
2337 2329 if self.byRanges:
2338 2330
2339 2331 for n in range(len(self.min_sats)):
2340 2332 c = self.compareRanges(data,self.min_sats[n],self.max_sats[n])
2341 2333 clean = clean and c
2342 2334 else:
2343 2335
2344 2336 clean = (self.compareRanges(data, self.min_sats,self.max_sats))
2345 2337
2346 2338 return clean
2347 2339
2348 2340
2349 2341
2350 2342 def run(self, dataOut, minHei=None, maxHei=None, minRef=None, maxRef=None, th=5, thdB=65, rangeHeiList=None):
2351 2343 dataOut.flagNoData = True
2352 2344
2353 2345 if not self.isConfig:
2354 2346 self.setup(dataOut, minHei, maxHei, minRef, maxRef, th, thdB, rangeHeiList)
2355 2347 self.isConfig = True
2356 2348 #print(self.min_sats,self.max_sats)
2357 2349 if dataOut.flagDataAsBlock:
2358 2350 raise ValueError("ProfileConcat can only be used when voltage have been read profile by profile, getBlock = False")
2359 2351
2360 2352 else:
2361 2353 self.noise =10*numpy.log10(dataOut.getNoisebyHildebrand(ymin_index=self.min_ref, ymax_index=self.max_ref))
2362 2354 if not self.isProfileClean(dataOut.data):
2363 2355 return dataOut
2364 2356 #dataOut.data = numpy.full((dataOut.nChannels,dataOut.nHeights),numpy.NAN)
2365 2357 #self.count += 1
2366 2358
2367 2359 dataOut.flagNoData = False
2368 2360
2369 2361 return dataOut
General Comments 0
You need to be logged in to leave comments. Login now