##// END OF EJS Templates
completado op clean Rayleigh, modificada la escritura y lectura de hdf5 para potencia, cambio en gráficos para uso del ChannelList, añadir en otros pendiente
joabAM -
r1397:9109c52a497c
parent child
Show More
@@ -0,0 +1,11
1 #!/home/soporte/workspace/schain/ENV_DIR/bin/python3
2
3 # -*- coding: utf-8 -*-
4 import re
5 import sys
6
7 from setuptools.command.easy_install import main
8
9 if __name__ == '__main__':
10 sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
11 sys.exit(main())
@@ -0,0 +1,11
1 #!/home/soporte/workspace/schain/ENV_DIR/bin/python3
2
3 # -*- coding: utf-8 -*-
4 import re
5 import sys
6
7 from setuptools.command.easy_install import main
8
9 if __name__ == '__main__':
10 sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
11 sys.exit(main())
@@ -0,0 +1,11
1 #!/home/soporte/workspace/schain/ENV_DIR/bin/python3
2
3 # -*- coding: utf-8 -*-
4 import re
5 import sys
6
7 from pip import main
8
9 if __name__ == '__main__':
10 sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
11 sys.exit(main())
@@ -0,0 +1,11
1 #!/home/soporte/workspace/schain/ENV_DIR/bin/python3
2
3 # -*- coding: utf-8 -*-
4 import re
5 import sys
6
7 from pip import main
8
9 if __name__ == '__main__':
10 sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
11 sys.exit(main())
@@ -0,0 +1,11
1 #!/home/soporte/workspace/schain/ENV_DIR/bin/python3
2
3 # -*- coding: utf-8 -*-
4 import re
5 import sys
6
7 from pip import main
8
9 if __name__ == '__main__':
10 sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
11 sys.exit(main())
@@ -0,0 +1,1
1 python3 No newline at end of file
@@ -0,0 +1,1
1 /usr/bin/python3 No newline at end of file
@@ -0,0 +1,1
1 lib No newline at end of file
@@ -0,0 +1,3
1 home = /usr/bin
2 include-system-site-packages = false
3 version = 3.6.9
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
@@ -1,1074 +1,1073
1 1 # Copyright (c) 2012-2020 Jicamarca Radio Observatory
2 2 # All rights reserved.
3 3 #
4 4 # Distributed under the terms of the BSD 3-clause license.
5 5 """Definition of diferent Data objects for different types of data
6 6
7 7 Here you will find the diferent data objects for the different types
8 8 of data, this data objects must be used as dataIn or dataOut objects in
9 9 processing units and operations. Currently the supported data objects are:
10 10 Voltage, Spectra, SpectraHeis, Fits, Correlation and Parameters
11 11 """
12 12
13 13 import copy
14 14 import numpy
15 15 import datetime
16 16 import json
17 17
18 18 import schainpy.admin
19 19 from schainpy.utils import log
20 20 from .jroheaderIO import SystemHeader, RadarControllerHeader
21 21 from schainpy.model.data import _noise
22 22
23 23
24 24 def getNumpyDtype(dataTypeCode):
25 25
26 26 if dataTypeCode == 0:
27 27 numpyDtype = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
28 28 elif dataTypeCode == 1:
29 29 numpyDtype = numpy.dtype([('real', '<i2'), ('imag', '<i2')])
30 30 elif dataTypeCode == 2:
31 31 numpyDtype = numpy.dtype([('real', '<i4'), ('imag', '<i4')])
32 32 elif dataTypeCode == 3:
33 33 numpyDtype = numpy.dtype([('real', '<i8'), ('imag', '<i8')])
34 34 elif dataTypeCode == 4:
35 35 numpyDtype = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
36 36 elif dataTypeCode == 5:
37 37 numpyDtype = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
38 38 else:
39 39 raise ValueError('dataTypeCode was not defined')
40 40
41 41 return numpyDtype
42 42
43 43
44 44 def getDataTypeCode(numpyDtype):
45 45
46 46 if numpyDtype == numpy.dtype([('real', '<i1'), ('imag', '<i1')]):
47 47 datatype = 0
48 48 elif numpyDtype == numpy.dtype([('real', '<i2'), ('imag', '<i2')]):
49 49 datatype = 1
50 50 elif numpyDtype == numpy.dtype([('real', '<i4'), ('imag', '<i4')]):
51 51 datatype = 2
52 52 elif numpyDtype == numpy.dtype([('real', '<i8'), ('imag', '<i8')]):
53 53 datatype = 3
54 54 elif numpyDtype == numpy.dtype([('real', '<f4'), ('imag', '<f4')]):
55 55 datatype = 4
56 56 elif numpyDtype == numpy.dtype([('real', '<f8'), ('imag', '<f8')]):
57 57 datatype = 5
58 58 else:
59 59 datatype = None
60 60
61 61 return datatype
62 62
63 63
64 64 def hildebrand_sekhon(data, navg):
65 65 """
66 66 This method is for the objective determination of the noise level in Doppler spectra. This
67 67 implementation technique is based on the fact that the standard deviation of the spectral
68 68 densities is equal to the mean spectral density for white Gaussian noise
69 69
70 70 Inputs:
71 71 Data : heights
72 72 navg : numbers of averages
73 73
74 74 Return:
75 75 mean : noise's level
76 76 """
77 77
78 78 sortdata = numpy.sort(data, axis=None)
79 79 '''
80 80 lenOfData = len(sortdata)
81 81 nums_min = lenOfData*0.2
82 82
83 83 if nums_min <= 5:
84 84
85 85 nums_min = 5
86 86
87 87 sump = 0.
88 88 sumq = 0.
89 89
90 90 j = 0
91 91 cont = 1
92 92
93 93 while((cont == 1)and(j < lenOfData)):
94 94
95 95 sump += sortdata[j]
96 96 sumq += sortdata[j]**2
97 97
98 98 if j > nums_min:
99 99 rtest = float(j)/(j-1) + 1.0/navg
100 100 if ((sumq*j) > (rtest*sump**2)):
101 101 j = j - 1
102 102 sump = sump - sortdata[j]
103 103 sumq = sumq - sortdata[j]**2
104 104 cont = 0
105 105
106 106 j += 1
107 107
108 108 lnoise = sump / j
109 109 '''
110 110 return _noise.hildebrand_sekhon(sortdata, navg)
111 111
112 112
113 113 class Beam:
114 114
115 115 def __init__(self):
116 116 self.codeList = []
117 117 self.azimuthList = []
118 118 self.zenithList = []
119 119
120 120
121 121
122 122 class GenericData(object):
123 123
124 124 flagNoData = True
125 125
126 126 def copy(self, inputObj=None):
127 127
128 128 if inputObj == None:
129 129 return copy.deepcopy(self)
130 130
131 131 for key in list(inputObj.__dict__.keys()):
132 132
133 133 attribute = inputObj.__dict__[key]
134 134
135 135 # If this attribute is a tuple or list
136 136 if type(inputObj.__dict__[key]) in (tuple, list):
137 137 self.__dict__[key] = attribute[:]
138 138 continue
139 139
140 140 # If this attribute is another object or instance
141 141 if hasattr(attribute, '__dict__'):
142 142 self.__dict__[key] = attribute.copy()
143 143 continue
144 144
145 145 self.__dict__[key] = inputObj.__dict__[key]
146 146
147 147 def deepcopy(self):
148 148
149 149 return copy.deepcopy(self)
150 150
151 151 def isEmpty(self):
152 152
153 153 return self.flagNoData
154 154
155 155 def isReady(self):
156 156
157 157 return not self.flagNoData
158 158
159 159
160 160 class JROData(GenericData):
161 161
162 162 systemHeaderObj = SystemHeader()
163 163 radarControllerHeaderObj = RadarControllerHeader()
164 164 type = None
165 165 datatype = None # dtype but in string
166 166 nProfiles = None
167 167 heightList = None
168 168 channelList = None
169 169 flagDiscontinuousBlock = False
170 170 useLocalTime = False
171 171 utctime = None
172 172 timeZone = None
173 173 dstFlag = None
174 174 errorCount = None
175 175 blocksize = None
176 176 flagDecodeData = False # asumo q la data no esta decodificada
177 177 flagDeflipData = False # asumo q la data no esta sin flip
178 178 flagShiftFFT = False
179 179 nCohInt = None
180 180 windowOfFilter = 1
181 181 C = 3e8
182 182 frequency = 49.92e6
183 183 realtime = False
184 184 beacon_heiIndexList = None
185 185 last_block = None
186 186 blocknow = None
187 187 azimuth = None
188 188 zenith = None
189 189 beam = Beam()
190 190 profileIndex = None
191 191 error = None
192 192 data = None
193 193 nmodes = None
194 194 metadata_list = ['heightList', 'timeZone', 'type']
195 195 codeList = None
196 196 azimuthList = None
197 197 elevationList = None
198 198
199 199 def __str__(self):
200 200
201 201 return '{} - {}'.format(self.type, self.datatime())
202 202
203 203 def getNoise(self):
204 204
205 205 raise NotImplementedError
206 206
207 207 @property
208 208 def nChannels(self):
209 209
210 210 return len(self.channelList)
211 211
212 212 @property
213 213 def channelIndexList(self):
214 214
215 215 return list(range(self.nChannels))
216 216
217 217 @property
218 218 def nHeights(self):
219 219
220 220 return len(self.heightList)
221 221
222 222 def getDeltaH(self):
223 223
224 224 return self.heightList[1] - self.heightList[0]
225 225
226 226 @property
227 227 def ltctime(self):
228 228
229 229 if self.useLocalTime:
230 230 return self.utctime - self.timeZone * 60
231 231
232 232 return self.utctime
233 233
234 234 @property
235 235 def datatime(self):
236 236
237 237 datatimeValue = datetime.datetime.utcfromtimestamp(self.ltctime)
238 238 return datatimeValue
239 239
240 240 def getTimeRange(self):
241 241
242 242 datatime = []
243 243
244 244 datatime.append(self.ltctime)
245 245 datatime.append(self.ltctime + self.timeInterval + 1)
246 246
247 247 datatime = numpy.array(datatime)
248 248
249 249 return datatime
250 250
251 251 def getFmaxTimeResponse(self):
252 252
253 253 period = (10**-6) * self.getDeltaH() / (0.15)
254 254
255 255 PRF = 1. / (period * self.nCohInt)
256 256
257 257 fmax = PRF
258 258
259 259 return fmax
260 260
261 261 def getFmax(self):
262 262 PRF = 1. / (self.ippSeconds * self.nCohInt)
263 263
264 264 fmax = PRF
265 265 return fmax
266 266
267 267 def getVmax(self):
268 268
269 269 _lambda = self.C / self.frequency
270 270
271 271 vmax = self.getFmax() * _lambda / 2
272 272
273 273 return vmax
274 274
275 275 @property
276 276 def ippSeconds(self):
277 277 '''
278 278 '''
279 279 return self.radarControllerHeaderObj.ippSeconds
280 280
281 281 @ippSeconds.setter
282 282 def ippSeconds(self, ippSeconds):
283 283 '''
284 284 '''
285 285 self.radarControllerHeaderObj.ippSeconds = ippSeconds
286 286
287 287 @property
288 288 def code(self):
289 289 '''
290 290 '''
291 291 return self.radarControllerHeaderObj.code
292 292
293 293 @code.setter
294 294 def code(self, code):
295 295 '''
296 296 '''
297 297 self.radarControllerHeaderObj.code = code
298 298
299 299 @property
300 300 def nCode(self):
301 301 '''
302 302 '''
303 303 return self.radarControllerHeaderObj.nCode
304 304
305 305 @nCode.setter
306 306 def nCode(self, ncode):
307 307 '''
308 308 '''
309 309 self.radarControllerHeaderObj.nCode = ncode
310 310
311 311 @property
312 312 def nBaud(self):
313 313 '''
314 314 '''
315 315 return self.radarControllerHeaderObj.nBaud
316 316
317 317 @nBaud.setter
318 318 def nBaud(self, nbaud):
319 319 '''
320 320 '''
321 321 self.radarControllerHeaderObj.nBaud = nbaud
322 322
323 323 @property
324 324 def ipp(self):
325 325 '''
326 326 '''
327 327 return self.radarControllerHeaderObj.ipp
328 328
329 329 @ipp.setter
330 330 def ipp(self, ipp):
331 331 '''
332 332 '''
333 333 self.radarControllerHeaderObj.ipp = ipp
334 334
335 335 @property
336 336 def metadata(self):
337 337 '''
338 338 '''
339 339
340 340 return {attr: getattr(self, attr) for attr in self.metadata_list}
341 341
342 342
343 343 class Voltage(JROData):
344 344
345 345 dataPP_POW = None
346 346 dataPP_DOP = None
347 347 dataPP_WIDTH = None
348 348 dataPP_SNR = None
349 349
350 350 def __init__(self):
351 351 '''
352 352 Constructor
353 353 '''
354 354
355 355 self.useLocalTime = True
356 356 self.radarControllerHeaderObj = RadarControllerHeader()
357 357 self.systemHeaderObj = SystemHeader()
358 358 self.type = "Voltage"
359 359 self.data = None
360 360 self.nProfiles = None
361 361 self.heightList = None
362 362 self.channelList = None
363 363 self.flagNoData = True
364 364 self.flagDiscontinuousBlock = False
365 365 self.utctime = None
366 366 self.timeZone = 0
367 367 self.dstFlag = None
368 368 self.errorCount = None
369 369 self.nCohInt = None
370 370 self.blocksize = None
371 371 self.flagCohInt = False
372 372 self.flagDecodeData = False # asumo q la data no esta decodificada
373 373 self.flagDeflipData = False # asumo q la data no esta sin flip
374 374 self.flagShiftFFT = False
375 375 self.flagDataAsBlock = False # Asumo que la data es leida perfil a perfil
376 376 self.profileIndex = 0
377 377 self.metadata_list = ['type', 'heightList', 'timeZone', 'nProfiles', 'channelList', 'nCohInt',
378 378 'code', 'nCode', 'nBaud', 'ippSeconds', 'ipp']
379 379
380 380 def getNoisebyHildebrand(self, channel=None):
381 381 """
382 382 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
383 383
384 384 Return:
385 385 noiselevel
386 386 """
387 387
388 388 if channel != None:
389 389 data = self.data[channel]
390 390 nChannels = 1
391 391 else:
392 392 data = self.data
393 393 nChannels = self.nChannels
394 394
395 395 noise = numpy.zeros(nChannels)
396 396 power = data * numpy.conjugate(data)
397 397
398 398 for thisChannel in range(nChannels):
399 399 if nChannels == 1:
400 400 daux = power[:].real
401 401 else:
402 402 daux = power[thisChannel, :].real
403 403 noise[thisChannel] = hildebrand_sekhon(daux, self.nCohInt)
404 404
405 405 return noise
406 406
407 407 def getNoise(self, type=1, channel=None):
408 408
409 409 if type == 1:
410 410 noise = self.getNoisebyHildebrand(channel)
411 411
412 412 return noise
413 413
414 414 def getPower(self, channel=None):
415 415
416 416 if channel != None:
417 417 data = self.data[channel]
418 418 else:
419 419 data = self.data
420 420
421 421 power = data * numpy.conjugate(data)
422 422 powerdB = 10 * numpy.log10(power.real)
423 423 powerdB = numpy.squeeze(powerdB)
424 424
425 425 return powerdB
426 426
427 427 @property
428 428 def timeInterval(self):
429 429
430 430 return self.ippSeconds * self.nCohInt
431 431
432 432 noise = property(getNoise, "I'm the 'nHeights' property.")
433 433
434 434
435 435 class Spectra(JROData):
436 436
437 437 def __init__(self):
438 438 '''
439 439 Constructor
440 440 '''
441 441
442 442 self.data_dc = None
443 443 self.data_spc = None
444 444 self.data_cspc = None
445 445 self.useLocalTime = True
446 446 self.radarControllerHeaderObj = RadarControllerHeader()
447 447 self.systemHeaderObj = SystemHeader()
448 448 self.type = "Spectra"
449 449 self.timeZone = 0
450 450 self.nProfiles = None
451 451 self.heightList = None
452 452 self.channelList = None
453 453 self.pairsList = None
454 454 self.flagNoData = True
455 455 self.flagDiscontinuousBlock = False
456 456 self.utctime = None
457 457 self.nCohInt = None
458 458 self.nIncohInt = None
459 459 self.blocksize = None
460 460 self.nFFTPoints = None
461 461 self.wavelength = None
462 462 self.flagDecodeData = False # asumo q la data no esta decodificada
463 463 self.flagDeflipData = False # asumo q la data no esta sin flip
464 464 self.flagShiftFFT = False
465 465 self.ippFactor = 1
466 466 self.beacon_heiIndexList = []
467 467 self.noise_estimation = None
468 468 self.metadata_list = ['type', 'heightList', 'timeZone', 'pairsList', 'channelList', 'nCohInt',
469 469 'code', 'nCode', 'nBaud', 'ippSeconds', 'ipp','nIncohInt', 'nFFTPoints', 'nProfiles']
470 470
471 471
472 472 def getNoisebyHildebrand(self, xmin_index=None, xmax_index=None, ymin_index=None, ymax_index=None):
473 473 """
474 474 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
475 475
476 476 Return:
477 477 noiselevel
478 478 """
479 479
480 480 noise = numpy.zeros(self.nChannels)
481
482 481 for channel in range(self.nChannels):
483 482 daux = self.data_spc[channel,
484 483 xmin_index:xmax_index, ymin_index:ymax_index]
485 484 noise[channel] = hildebrand_sekhon(daux, self.nIncohInt)
486 485
487 486 return noise
488 487
489 488 def getNoise(self, xmin_index=None, xmax_index=None, ymin_index=None, ymax_index=None):
490 489
491 490 if self.noise_estimation is not None:
492 491 # this was estimated by getNoise Operation defined in jroproc_spectra.py
493 492 return self.noise_estimation
494 493 else:
495 494 noise = self.getNoisebyHildebrand(
496 495 xmin_index, xmax_index, ymin_index, ymax_index)
497 496 return noise
498 497
499 498 def getFreqRangeTimeResponse(self, extrapoints=0):
500 499
501 500 deltafreq = self.getFmaxTimeResponse() / (self.nFFTPoints * self.ippFactor)
502 501 freqrange = deltafreq * (numpy.arange(self.nFFTPoints + extrapoints) - self.nFFTPoints / 2.) - deltafreq / 2
503 502
504 503 return freqrange
505 504
506 505 def getAcfRange(self, extrapoints=0):
507 506
508 507 deltafreq = 10. / (self.getFmax() / (self.nFFTPoints * self.ippFactor))
509 508 freqrange = deltafreq * (numpy.arange(self.nFFTPoints + extrapoints) -self.nFFTPoints / 2.) - deltafreq / 2
510 509
511 510 return freqrange
512 511
513 512 def getFreqRange(self, extrapoints=0):
514 513
515 514 deltafreq = self.getFmax() / (self.nFFTPoints * self.ippFactor)
516 515 freqrange = deltafreq * (numpy.arange(self.nFFTPoints + extrapoints) -self.nFFTPoints / 2.) - deltafreq / 2
517 516
518 517 return freqrange
519 518
520 519 def getVelRange(self, extrapoints=0):
521 520
522 521 deltav = self.getVmax() / (self.nFFTPoints * self.ippFactor)
523 522 velrange = deltav * (numpy.arange(self.nFFTPoints + extrapoints) - self.nFFTPoints / 2.)
524 523
525 524 if self.nmodes:
526 525 return velrange/self.nmodes
527 526 else:
528 527 return velrange
529 528
530 529 @property
531 530 def nPairs(self):
532 531
533 532 return len(self.pairsList)
534 533
535 534 @property
536 535 def pairsIndexList(self):
537 536
538 537 return list(range(self.nPairs))
539 538
540 539 @property
541 540 def normFactor(self):
542 541
543 542 pwcode = 1
544 543
545 544 if self.flagDecodeData:
546 545 pwcode = numpy.sum(self.code[0]**2)
547 546 #normFactor = min(self.nFFTPoints,self.nProfiles)*self.nIncohInt*self.nCohInt*pwcode*self.windowOfFilter
548 547 normFactor = self.nProfiles * self.nIncohInt * self.nCohInt * pwcode * self.windowOfFilter
549 548
550 549 return normFactor
551 550
552 551 @property
553 552 def flag_cspc(self):
554 553
555 554 if self.data_cspc is None:
556 555 return True
557 556
558 557 return False
559 558
560 559 @property
561 560 def flag_dc(self):
562 561
563 562 if self.data_dc is None:
564 563 return True
565 564
566 565 return False
567 566
568 567 @property
569 568 def timeInterval(self):
570 569
571 570 timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt * self.nProfiles * self.ippFactor
572 571 if self.nmodes:
573 572 return self.nmodes*timeInterval
574 573 else:
575 574 return timeInterval
576 575
577 576 def getPower(self):
578 577
579 578 factor = self.normFactor
580 579 z = self.data_spc / factor
581 580 z = numpy.where(numpy.isfinite(z), z, numpy.NAN)
582 581 avg = numpy.average(z, axis=1)
583 582
584 583 return 10 * numpy.log10(avg)
585 584
586 585 def getCoherence(self, pairsList=None, phase=False):
587 586
588 587 z = []
589 588 if pairsList is None:
590 589 pairsIndexList = self.pairsIndexList
591 590 else:
592 591 pairsIndexList = []
593 592 for pair in pairsList:
594 593 if pair not in self.pairsList:
595 594 raise ValueError("Pair %s is not in dataOut.pairsList" % (
596 595 pair))
597 596 pairsIndexList.append(self.pairsList.index(pair))
598 597 for i in range(len(pairsIndexList)):
599 598 pair = self.pairsList[pairsIndexList[i]]
600 599 ccf = numpy.average(self.data_cspc[pairsIndexList[i], :, :], axis=0)
601 600 powa = numpy.average(self.data_spc[pair[0], :, :], axis=0)
602 601 powb = numpy.average(self.data_spc[pair[1], :, :], axis=0)
603 602 avgcoherenceComplex = ccf / numpy.sqrt(powa * powb)
604 603 if phase:
605 604 data = numpy.arctan2(avgcoherenceComplex.imag,
606 605 avgcoherenceComplex.real) * 180 / numpy.pi
607 606 else:
608 607 data = numpy.abs(avgcoherenceComplex)
609 608
610 609 z.append(data)
611 610
612 611 return numpy.array(z)
613 612
614 613 def setValue(self, value):
615 614
616 615 print("This property should not be initialized")
617 616
618 617 return
619 618
620 619 noise = property(getNoise, setValue, "I'm the 'nHeights' property.")
621 620
622 621
623 622 class SpectraHeis(Spectra):
624 623
625 624 def __init__(self):
626 625
627 626 self.radarControllerHeaderObj = RadarControllerHeader()
628 627 self.systemHeaderObj = SystemHeader()
629 628 self.type = "SpectraHeis"
630 629 self.nProfiles = None
631 630 self.heightList = None
632 631 self.channelList = None
633 632 self.flagNoData = True
634 633 self.flagDiscontinuousBlock = False
635 634 self.utctime = None
636 635 self.blocksize = None
637 636 self.profileIndex = 0
638 637 self.nCohInt = 1
639 638 self.nIncohInt = 1
640 639
641 640 @property
642 641 def normFactor(self):
643 642 pwcode = 1
644 643 if self.flagDecodeData:
645 644 pwcode = numpy.sum(self.code[0]**2)
646 645
647 646 normFactor = self.nIncohInt * self.nCohInt * pwcode
648 647
649 648 return normFactor
650 649
651 650 @property
652 651 def timeInterval(self):
653 652
654 653 return self.ippSeconds * self.nCohInt * self.nIncohInt
655 654
656 655
657 656 class Fits(JROData):
658 657
659 658 def __init__(self):
660 659
661 660 self.type = "Fits"
662 661 self.nProfiles = None
663 662 self.heightList = None
664 663 self.channelList = None
665 664 self.flagNoData = True
666 665 self.utctime = None
667 666 self.nCohInt = 1
668 667 self.nIncohInt = 1
669 668 self.useLocalTime = True
670 669 self.profileIndex = 0
671 670 self.timeZone = 0
672 671
673 672 def getTimeRange(self):
674 673
675 674 datatime = []
676 675
677 676 datatime.append(self.ltctime)
678 677 datatime.append(self.ltctime + self.timeInterval)
679 678
680 679 datatime = numpy.array(datatime)
681 680
682 681 return datatime
683 682
684 683 def getChannelIndexList(self):
685 684
686 685 return list(range(self.nChannels))
687 686
688 687 def getNoise(self, type=1):
689 688
690 689
691 690 if type == 1:
692 691 noise = self.getNoisebyHildebrand()
693 692
694 693 if type == 2:
695 694 noise = self.getNoisebySort()
696 695
697 696 if type == 3:
698 697 noise = self.getNoisebyWindow()
699 698
700 699 return noise
701 700
702 701 @property
703 702 def timeInterval(self):
704 703
705 704 timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt
706 705
707 706 return timeInterval
708 707
709 708 @property
710 709 def ippSeconds(self):
711 710 '''
712 711 '''
713 712 return self.ipp_sec
714 713
715 714 noise = property(getNoise, "I'm the 'nHeights' property.")
716 715
717 716
718 717 class Correlation(JROData):
719 718
720 719 def __init__(self):
721 720 '''
722 721 Constructor
723 722 '''
724 723 self.radarControllerHeaderObj = RadarControllerHeader()
725 724 self.systemHeaderObj = SystemHeader()
726 725 self.type = "Correlation"
727 726 self.data = None
728 727 self.dtype = None
729 728 self.nProfiles = None
730 729 self.heightList = None
731 730 self.channelList = None
732 731 self.flagNoData = True
733 732 self.flagDiscontinuousBlock = False
734 733 self.utctime = None
735 734 self.timeZone = 0
736 735 self.dstFlag = None
737 736 self.errorCount = None
738 737 self.blocksize = None
739 738 self.flagDecodeData = False # asumo q la data no esta decodificada
740 739 self.flagDeflipData = False # asumo q la data no esta sin flip
741 740 self.pairsList = None
742 741 self.nPoints = None
743 742
744 743 def getPairsList(self):
745 744
746 745 return self.pairsList
747 746
748 747 def getNoise(self, mode=2):
749 748
750 749 indR = numpy.where(self.lagR == 0)[0][0]
751 750 indT = numpy.where(self.lagT == 0)[0][0]
752 751
753 752 jspectra0 = self.data_corr[:, :, indR, :]
754 753 jspectra = copy.copy(jspectra0)
755 754
756 755 num_chan = jspectra.shape[0]
757 756 num_hei = jspectra.shape[2]
758 757
759 758 freq_dc = jspectra.shape[1] / 2
760 759 ind_vel = numpy.array([-2, -1, 1, 2]) + freq_dc
761 760
762 761 if ind_vel[0] < 0:
763 762 ind_vel[list(range(0, 1))] = ind_vel[list(
764 763 range(0, 1))] + self.num_prof
765 764
766 765 if mode == 1:
767 766 jspectra[:, freq_dc, :] = (
768 767 jspectra[:, ind_vel[1], :] + jspectra[:, ind_vel[2], :]) / 2 # CORRECCION
769 768
770 769 if mode == 2:
771 770
772 771 vel = numpy.array([-2, -1, 1, 2])
773 772 xx = numpy.zeros([4, 4])
774 773
775 774 for fil in range(4):
776 775 xx[fil, :] = vel[fil]**numpy.asarray(list(range(4)))
777 776
778 777 xx_inv = numpy.linalg.inv(xx)
779 778 xx_aux = xx_inv[0, :]
780 779
781 780 for ich in range(num_chan):
782 781 yy = jspectra[ich, ind_vel, :]
783 782 jspectra[ich, freq_dc, :] = numpy.dot(xx_aux, yy)
784 783
785 784 junkid = jspectra[ich, freq_dc, :] <= 0
786 785 cjunkid = sum(junkid)
787 786
788 787 if cjunkid.any():
789 788 jspectra[ich, freq_dc, junkid.nonzero()] = (
790 789 jspectra[ich, ind_vel[1], junkid] + jspectra[ich, ind_vel[2], junkid]) / 2
791 790
792 791 noise = jspectra0[:, freq_dc, :] - jspectra[:, freq_dc, :]
793 792
794 793 return noise
795 794
796 795 @property
797 796 def timeInterval(self):
798 797
799 798 return self.ippSeconds * self.nCohInt * self.nProfiles
800 799
801 800 def splitFunctions(self):
802 801
803 802 pairsList = self.pairsList
804 803 ccf_pairs = []
805 804 acf_pairs = []
806 805 ccf_ind = []
807 806 acf_ind = []
808 807 for l in range(len(pairsList)):
809 808 chan0 = pairsList[l][0]
810 809 chan1 = pairsList[l][1]
811 810
812 811 # Obteniendo pares de Autocorrelacion
813 812 if chan0 == chan1:
814 813 acf_pairs.append(chan0)
815 814 acf_ind.append(l)
816 815 else:
817 816 ccf_pairs.append(pairsList[l])
818 817 ccf_ind.append(l)
819 818
820 819 data_acf = self.data_cf[acf_ind]
821 820 data_ccf = self.data_cf[ccf_ind]
822 821
823 822 return acf_ind, ccf_ind, acf_pairs, ccf_pairs, data_acf, data_ccf
824 823
825 824 @property
826 825 def normFactor(self):
827 826 acf_ind, ccf_ind, acf_pairs, ccf_pairs, data_acf, data_ccf = self.splitFunctions()
828 827 acf_pairs = numpy.array(acf_pairs)
829 828 normFactor = numpy.zeros((self.nPairs, self.nHeights))
830 829
831 830 for p in range(self.nPairs):
832 831 pair = self.pairsList[p]
833 832
834 833 ch0 = pair[0]
835 834 ch1 = pair[1]
836 835
837 836 ch0_max = numpy.max(data_acf[acf_pairs == ch0, :, :], axis=1)
838 837 ch1_max = numpy.max(data_acf[acf_pairs == ch1, :, :], axis=1)
839 838 normFactor[p, :] = numpy.sqrt(ch0_max * ch1_max)
840 839
841 840 return normFactor
842 841
843 842
844 843 class Parameters(Spectra):
845 844
846 845 groupList = None # List of Pairs, Groups, etc
847 846 data_param = None # Parameters obtained
848 847 data_pre = None # Data Pre Parametrization
849 848 data_SNR = None # Signal to Noise Ratio
850 849 abscissaList = None # Abscissa, can be velocities, lags or time
851 850 utctimeInit = None # Initial UTC time
852 851 paramInterval = None # Time interval to calculate Parameters in seconds
853 852 useLocalTime = True
854 853 # Fitting
855 854 data_error = None # Error of the estimation
856 855 constants = None
857 856 library = None
858 857 # Output signal
859 858 outputInterval = None # Time interval to calculate output signal in seconds
860 859 data_output = None # Out signal
861 860 nAvg = None
862 861 noise_estimation = None
863 862 GauSPC = None # Fit gaussian SPC
864 863
865 864 def __init__(self):
866 865 '''
867 866 Constructor
868 867 '''
869 868 self.radarControllerHeaderObj = RadarControllerHeader()
870 869 self.systemHeaderObj = SystemHeader()
871 870 self.type = "Parameters"
872 871 self.timeZone = 0
873 872
874 873 def getTimeRange1(self, interval):
875 874
876 875 datatime = []
877 876
878 877 if self.useLocalTime:
879 878 time1 = self.utctimeInit - self.timeZone * 60
880 879 else:
881 880 time1 = self.utctimeInit
882 881
883 882 datatime.append(time1)
884 883 datatime.append(time1 + interval)
885 884 datatime = numpy.array(datatime)
886 885
887 886 return datatime
888 887
889 888 @property
890 889 def timeInterval(self):
891 890
892 891 if hasattr(self, 'timeInterval1'):
893 892 return self.timeInterval1
894 893 else:
895 894 return self.paramInterval
896 895
897 896 def setValue(self, value):
898 897
899 898 print("This property should not be initialized")
900 899
901 900 return
902 901
903 902 def getNoise(self):
904 903
905 904 return self.spc_noise
906 905
907 906 noise = property(getNoise, setValue, "I'm the 'Noise' property.")
908 907
909 908
910 909 class PlotterData(object):
911 910 '''
912 911 Object to hold data to be plotted
913 912 '''
914 913
915 914 MAXNUMX = 200
916 915 MAXNUMY = 200
917 916
918 917 def __init__(self, code, exp_code, localtime=True):
919 918
920 919 self.key = code
921 920 self.exp_code = exp_code
922 921 self.ready = False
923 922 self.flagNoData = False
924 923 self.localtime = localtime
925 924 self.data = {}
926 925 self.meta = {}
927 926 self.__heights = []
928 927
929 928 def __str__(self):
930 929 dum = ['{}{}'.format(key, self.shape(key)) for key in self.data]
931 930 return 'Data[{}][{}]'.format(';'.join(dum), len(self.times))
932 931
933 932 def __len__(self):
934 933 return len(self.data)
935 934
936 935 def __getitem__(self, key):
937 936 if isinstance(key, int):
938 937 return self.data[self.times[key]]
939 938 elif isinstance(key, str):
940 939 ret = numpy.array([self.data[x][key] for x in self.times])
941 940 if ret.ndim > 1:
942 941 ret = numpy.swapaxes(ret, 0, 1)
943 942 return ret
944 943
945 944 def __contains__(self, key):
946 945 return key in self.data[self.min_time]
947 946
948 947 def setup(self):
949 948 '''
950 949 Configure object
951 950 '''
952 951 self.type = ''
953 952 self.ready = False
954 953 del self.data
955 954 self.data = {}
956 955 self.__heights = []
957 956 self.__all_heights = set()
958 957
959 958 def shape(self, key):
960 959 '''
961 960 Get the shape of the one-element data for the given key
962 961 '''
963 962
964 963 if len(self.data[self.min_time][key]):
965 964 return self.data[self.min_time][key].shape
966 965 return (0,)
967 966
968 967 def update(self, data, tm, meta={}):
969 968 '''
970 969 Update data object with new dataOut
971 970 '''
972 971
973 972 self.data[tm] = data
974 973
975 974 for key, value in meta.items():
976 975 setattr(self, key, value)
977 976
978 977 def normalize_heights(self):
979 978 '''
980 979 Ensure same-dimension of the data for different heighList
981 980 '''
982 981
983 982 H = numpy.array(list(self.__all_heights))
984 983 H.sort()
985 984 for key in self.data:
986 985 shape = self.shape(key)[:-1] + H.shape
987 986 for tm, obj in list(self.data[key].items()):
988 987 h = self.__heights[self.times.tolist().index(tm)]
989 988 if H.size == h.size:
990 989 continue
991 990 index = numpy.where(numpy.in1d(H, h))[0]
992 991 dummy = numpy.zeros(shape) + numpy.nan
993 992 if len(shape) == 2:
994 993 dummy[:, index] = obj
995 994 else:
996 995 dummy[index] = obj
997 996 self.data[key][tm] = dummy
998 997
999 998 self.__heights = [H for tm in self.times]
1000 999
1001 1000 def jsonify(self, tm, plot_name, plot_type, decimate=False):
1002 1001 '''
1003 1002 Convert data to json
1004 1003 '''
1005 1004
1006 1005 meta = {}
1007 1006 meta['xrange'] = []
1008 1007 dy = int(len(self.yrange)/self.MAXNUMY) + 1
1009 1008 tmp = self.data[tm][self.key]
1010 1009 shape = tmp.shape
1011 1010 if len(shape) == 2:
1012 1011 data = self.roundFloats(self.data[tm][self.key][::, ::dy].tolist())
1013 1012 elif len(shape) == 3:
1014 1013 dx = int(self.data[tm][self.key].shape[1]/self.MAXNUMX) + 1
1015 1014 data = self.roundFloats(
1016 1015 self.data[tm][self.key][::, ::dx, ::dy].tolist())
1017 1016 meta['xrange'] = self.roundFloats(self.xrange[2][::dx].tolist())
1018 1017 else:
1019 1018 data = self.roundFloats(self.data[tm][self.key].tolist())
1020 1019
1021 1020 ret = {
1022 1021 'plot': plot_name,
1023 1022 'code': self.exp_code,
1024 1023 'time': float(tm),
1025 1024 'data': data,
1026 1025 }
1027 1026 meta['type'] = plot_type
1028 1027 meta['interval'] = float(self.interval)
1029 1028 meta['localtime'] = self.localtime
1030 1029 meta['yrange'] = self.roundFloats(self.yrange[::dy].tolist())
1031 1030 meta.update(self.meta)
1032 1031 ret['metadata'] = meta
1033 1032 return json.dumps(ret)
1034 1033
1035 1034 @property
1036 1035 def times(self):
1037 1036 '''
1038 1037 Return the list of times of the current data
1039 1038 '''
1040 1039
1041 1040 ret = [t for t in self.data]
1042 1041 ret.sort()
1043 1042 return numpy.array(ret)
1044 1043
1045 1044 @property
1046 1045 def min_time(self):
1047 1046 '''
1048 1047 Return the minimun time value
1049 1048 '''
1050 1049
1051 1050 return self.times[0]
1052 1051
1053 1052 @property
1054 1053 def max_time(self):
1055 1054 '''
1056 1055 Return the maximun time value
1057 1056 '''
1058 1057
1059 1058 return self.times[-1]
1060 1059
1061 1060 # @property
1062 1061 # def heights(self):
1063 1062 # '''
1064 1063 # Return the list of heights of the current data
1065 1064 # '''
1066 1065
1067 1066 # return numpy.array(self.__heights[-1])
1068 1067
1069 1068 @staticmethod
1070 1069 def roundFloats(obj):
1071 1070 if isinstance(obj, list):
1072 1071 return list(map(PlotterData.roundFloats, obj))
1073 1072 elif isinstance(obj, float):
1074 1073 return round(obj, 2)
@@ -1,695 +1,695
1 1 # Copyright (c) 2012-2020 Jicamarca Radio Observatory
2 2 # All rights reserved.
3 3 #
4 4 # Distributed under the terms of the BSD 3-clause license.
5 5 """Base class to create plot operations
6 6
7 7 """
8 8
9 9 import os
10 10 import sys
11 11 import zmq
12 12 import time
13 13 import numpy
14 14 import datetime
15 15 from collections import deque
16 16 from functools import wraps
17 17 from threading import Thread
18 18 import matplotlib
19 19
20 20 if 'BACKEND' in os.environ:
21 21 matplotlib.use(os.environ['BACKEND'])
22 22 elif 'linux' in sys.platform:
23 23 matplotlib.use("TkAgg")
24 24 elif 'darwin' in sys.platform:
25 25 matplotlib.use('MacOSX')
26 26 else:
27 27 from schainpy.utils import log
28 28 log.warning('Using default Backend="Agg"', 'INFO')
29 29 matplotlib.use('Agg')
30 30
31 31 import matplotlib.pyplot as plt
32 32 from matplotlib.patches import Polygon
33 33 from mpl_toolkits.axes_grid1 import make_axes_locatable
34 34 from matplotlib.ticker import FuncFormatter, LinearLocator, MultipleLocator
35 35
36 36 from schainpy.model.data.jrodata import PlotterData
37 37 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
38 38 from schainpy.utils import log
39 39
40 40 jet_values = matplotlib.pyplot.get_cmap('jet', 100)(numpy.arange(100))[10:90]
41 41 blu_values = matplotlib.pyplot.get_cmap(
42 42 'seismic_r', 20)(numpy.arange(20))[10:15]
43 43 ncmap = matplotlib.colors.LinearSegmentedColormap.from_list(
44 44 'jro', numpy.vstack((blu_values, jet_values)))
45 45 matplotlib.pyplot.register_cmap(cmap=ncmap)
46 46
47 47 CMAPS = [plt.get_cmap(s) for s in ('jro', 'jet', 'viridis',
48 48 'plasma', 'inferno', 'Greys', 'seismic', 'bwr', 'coolwarm')]
49 49
50 50 EARTH_RADIUS = 6.3710e3
51 51
52 52 def ll2xy(lat1, lon1, lat2, lon2):
53 53
54 54 p = 0.017453292519943295
55 55 a = 0.5 - numpy.cos((lat2 - lat1) * p)/2 + numpy.cos(lat1 * p) * \
56 56 numpy.cos(lat2 * p) * (1 - numpy.cos((lon2 - lon1) * p)) / 2
57 57 r = 12742 * numpy.arcsin(numpy.sqrt(a))
58 58 theta = numpy.arctan2(numpy.sin((lon2-lon1)*p)*numpy.cos(lat2*p), numpy.cos(lat1*p)
59 59 * numpy.sin(lat2*p)-numpy.sin(lat1*p)*numpy.cos(lat2*p)*numpy.cos((lon2-lon1)*p))
60 60 theta = -theta + numpy.pi/2
61 61 return r*numpy.cos(theta), r*numpy.sin(theta)
62 62
63 63
64 64 def km2deg(km):
65 65 '''
66 66 Convert distance in km to degrees
67 67 '''
68 68
69 69 return numpy.rad2deg(km/EARTH_RADIUS)
70 70
71 71
72 72 def figpause(interval):
73 73 backend = plt.rcParams['backend']
74 74 if backend in matplotlib.rcsetup.interactive_bk:
75 75 figManager = matplotlib._pylab_helpers.Gcf.get_active()
76 76 if figManager is not None:
77 77 canvas = figManager.canvas
78 78 if canvas.figure.stale:
79 79 canvas.draw()
80 80 try:
81 81 canvas.start_event_loop(interval)
82 82 except:
83 83 pass
84 84 return
85 85
86 86 def popup(message):
87 87 '''
88 88 '''
89 89
90 90 fig = plt.figure(figsize=(12, 8), facecolor='r')
91 91 text = '\n'.join([s.strip() for s in message.split(':')])
92 92 fig.text(0.01, 0.5, text, ha='left', va='center',
93 93 size='20', weight='heavy', color='w')
94 94 fig.show()
95 95 figpause(1000)
96 96
97 97
98 98 class Throttle(object):
99 99 '''
100 100 Decorator that prevents a function from being called more than once every
101 101 time period.
102 102 To create a function that cannot be called more than once a minute, but
103 103 will sleep until it can be called:
104 104 @Throttle(minutes=1)
105 105 def foo():
106 106 pass
107 107
108 108 for i in range(10):
109 109 foo()
110 110 print "This function has run %s times." % i
111 111 '''
112 112
113 113 def __init__(self, seconds=0, minutes=0, hours=0):
114 114 self.throttle_period = datetime.timedelta(
115 115 seconds=seconds, minutes=minutes, hours=hours
116 116 )
117 117
118 118 self.time_of_last_call = datetime.datetime.min
119 119
120 120 def __call__(self, fn):
121 121 @wraps(fn)
122 122 def wrapper(*args, **kwargs):
123 123 coerce = kwargs.pop('coerce', None)
124 124 if coerce:
125 125 self.time_of_last_call = datetime.datetime.now()
126 126 return fn(*args, **kwargs)
127 127 else:
128 128 now = datetime.datetime.now()
129 129 time_since_last_call = now - self.time_of_last_call
130 130 time_left = self.throttle_period - time_since_last_call
131 131
132 132 if time_left > datetime.timedelta(seconds=0):
133 133 return
134 134
135 135 self.time_of_last_call = datetime.datetime.now()
136 136 return fn(*args, **kwargs)
137 137
138 138 return wrapper
139 139
140 140 def apply_throttle(value):
141 141
142 142 @Throttle(seconds=value)
143 143 def fnThrottled(fn):
144 144 fn()
145 145
146 146 return fnThrottled
147 147
148 148
149 149 @MPDecorator
150 150 class Plot(Operation):
151 151 """Base class for Schain plotting operations
152 152
153 153 This class should never be use directtly you must subclass a new operation,
154 154 children classes must be defined as follow:
155 155
156 156 ExamplePlot(Plot):
157 157
158 158 CODE = 'code'
159 159 colormap = 'jet'
160 160 plot_type = 'pcolor' # options are ('pcolor', 'pcolorbuffer', 'scatter', 'scatterbuffer')
161 161
162 162 def setup(self):
163 163 pass
164 164
165 165 def plot(self):
166 166 pass
167 167
168 168 """
169 169
170 170 CODE = 'Figure'
171 171 colormap = 'jet'
172 172 bgcolor = 'white'
173 173 buffering = True
174 174 __missing = 1E30
175 175
176 176 __attrs__ = ['show', 'save', 'ymin', 'ymax', 'zmin', 'zmax', 'title',
177 177 'showprofile']
178 178
179 179 def __init__(self):
180 180
181 181 Operation.__init__(self)
182 182 self.isConfig = False
183 183 self.isPlotConfig = False
184 184 self.save_time = 0
185 185 self.sender_time = 0
186 186 self.data = None
187 187 self.firsttime = True
188 188 self.sender_queue = deque(maxlen=10)
189 189 self.plots_adjust = {'left': 0.125, 'right': 0.9, 'bottom': 0.15, 'top': 0.9, 'wspace': 0.2, 'hspace': 0.2}
190 190
191 191 def __fmtTime(self, x, pos):
192 192 '''
193 193 '''
194 194
195 195 return '{}'.format(self.getDateTime(x).strftime('%H:%M'))
196 196
197 197 def __setup(self, **kwargs):
198 198 '''
199 199 Initialize variables
200 200 '''
201 201
202 202 self.figures = []
203 203 self.axes = []
204 204 self.cb_axes = []
205 205 self.pf_axes = []
206 206 self.localtime = kwargs.pop('localtime', True)
207 207 self.show = kwargs.get('show', True)
208 208 self.save = kwargs.get('save', False)
209 209 self.save_period = kwargs.get('save_period', 0)
210 210 self.colormap = kwargs.get('colormap', self.colormap)
211 211 self.colormap_coh = kwargs.get('colormap_coh', 'jet')
212 212 self.colormap_phase = kwargs.get('colormap_phase', 'RdBu_r')
213 213 self.colormaps = kwargs.get('colormaps', None)
214 214 self.bgcolor = kwargs.get('bgcolor', self.bgcolor)
215 215 self.showprofile = kwargs.get('showprofile', False)
216 216 self.title = kwargs.get('wintitle', self.CODE.upper())
217 217 self.cb_label = kwargs.get('cb_label', None)
218 218 self.cb_labels = kwargs.get('cb_labels', None)
219 219 self.labels = kwargs.get('labels', None)
220 220 self.xaxis = kwargs.get('xaxis', 'frequency')
221 221 self.zmin = kwargs.get('zmin', None)
222 222 self.zmax = kwargs.get('zmax', None)
223 223 self.zlimits = kwargs.get('zlimits', None)
224 224 self.xmin = kwargs.get('xmin', None)
225 225 self.xmax = kwargs.get('xmax', None)
226 226 self.xrange = kwargs.get('xrange', 12)
227 227 self.xscale = kwargs.get('xscale', None)
228 228 self.ymin = kwargs.get('ymin', None)
229 229 self.ymax = kwargs.get('ymax', None)
230 230 self.yscale = kwargs.get('yscale', None)
231 231 self.xlabel = kwargs.get('xlabel', None)
232 232 self.attr_time = kwargs.get('attr_time', 'utctime')
233 233 self.attr_data = kwargs.get('attr_data', 'data_param')
234 234 self.decimation = kwargs.get('decimation', None)
235 235 self.oneFigure = kwargs.get('oneFigure', True)
236 236 self.width = kwargs.get('width', None)
237 237 self.height = kwargs.get('height', None)
238 238 self.colorbar = kwargs.get('colorbar', True)
239 self.factors = kwargs.get('factors', [1, 1, 1, 1, 1, 1, 1, 1])
239 self.factors = kwargs.get('factors', range(18))
240 240 self.channels = kwargs.get('channels', None)
241 241 self.titles = kwargs.get('titles', [])
242 242 self.polar = False
243 243 self.type = kwargs.get('type', 'iq')
244 244 self.grid = kwargs.get('grid', False)
245 245 self.pause = kwargs.get('pause', False)
246 246 self.save_code = kwargs.get('save_code', self.CODE)
247 247 self.throttle = kwargs.get('throttle', 0)
248 248 self.exp_code = kwargs.get('exp_code', None)
249 249 self.server = kwargs.get('server', False)
250 250 self.sender_period = kwargs.get('sender_period', 60)
251 251 self.tag = kwargs.get('tag', '')
252 252 self.height_index = kwargs.get('height_index', None)
253 253 self.__throttle_plot = apply_throttle(self.throttle)
254 254 code = self.attr_data if self.attr_data else self.CODE
255 255 self.data = PlotterData(self.CODE, self.exp_code, self.localtime)
256 256 self.tmin = kwargs.get('tmin', None)
257 257
258 258 if self.server:
259 259 if not self.server.startswith('tcp://'):
260 260 self.server = 'tcp://{}'.format(self.server)
261 261 log.success(
262 262 'Sending to server: {}'.format(self.server),
263 263 self.name
264 264 )
265 265
266 266 if isinstance(self.attr_data, str):
267 267 self.attr_data = [self.attr_data]
268 268
269 269 def __setup_plot(self):
270 270 '''
271 271 Common setup for all figures, here figures and axes are created
272 272 '''
273 273
274 274 self.setup()
275 275
276 276 self.time_label = 'LT' if self.localtime else 'UTC'
277 277
278 278 if self.width is None:
279 279 self.width = 8
280 280
281 281 self.figures = []
282 282 self.axes = []
283 283 self.cb_axes = []
284 284 self.pf_axes = []
285 285 self.cmaps = []
286 286
287 287 size = '15%' if self.ncols == 1 else '30%'
288 288 pad = '4%' if self.ncols == 1 else '8%'
289 289
290 290 if self.oneFigure:
291 291 if self.height is None:
292 292 self.height = 1.4 * self.nrows + 1
293 293 fig = plt.figure(figsize=(self.width, self.height),
294 294 edgecolor='k',
295 295 facecolor='w')
296 296 self.figures.append(fig)
297 297 for n in range(self.nplots):
298 298 ax = fig.add_subplot(self.nrows, self.ncols,
299 299 n + 1, polar=self.polar)
300 300 ax.tick_params(labelsize=8)
301 301 ax.firsttime = True
302 302 ax.index = 0
303 303 ax.press = None
304 304 self.axes.append(ax)
305 305 if self.showprofile:
306 306 cax = self.__add_axes(ax, size=size, pad=pad)
307 307 cax.tick_params(labelsize=8)
308 308 self.pf_axes.append(cax)
309 309 else:
310 310 if self.height is None:
311 311 self.height = 3
312 312 for n in range(self.nplots):
313 313 fig = plt.figure(figsize=(self.width, self.height),
314 314 edgecolor='k',
315 315 facecolor='w')
316 316 ax = fig.add_subplot(1, 1, 1, polar=self.polar)
317 317 ax.tick_params(labelsize=8)
318 318 ax.firsttime = True
319 319 ax.index = 0
320 320 ax.press = None
321 321 self.figures.append(fig)
322 322 self.axes.append(ax)
323 323 if self.showprofile:
324 324 cax = self.__add_axes(ax, size=size, pad=pad)
325 325 cax.tick_params(labelsize=8)
326 326 self.pf_axes.append(cax)
327 327
328 328 for n in range(self.nrows):
329 329 if self.colormaps is not None:
330 330 cmap = plt.get_cmap(self.colormaps[n])
331 331 else:
332 332 cmap = plt.get_cmap(self.colormap)
333 333 cmap.set_bad(self.bgcolor, 1.)
334 334 self.cmaps.append(cmap)
335 335
336 336 def __add_axes(self, ax, size='30%', pad='8%'):
337 337 '''
338 338 Add new axes to the given figure
339 339 '''
340 340 divider = make_axes_locatable(ax)
341 341 nax = divider.new_horizontal(size=size, pad=pad)
342 342 ax.figure.add_axes(nax)
343 343 return nax
344 344
345 345 def fill_gaps(self, x_buffer, y_buffer, z_buffer):
346 346 '''
347 347 Create a masked array for missing data
348 348 '''
349 349 if x_buffer.shape[0] < 2:
350 350 return x_buffer, y_buffer, z_buffer
351 351
352 352 deltas = x_buffer[1:] - x_buffer[0:-1]
353 353 x_median = numpy.median(deltas)
354 354
355 355 index = numpy.where(deltas > 5 * x_median)
356 356
357 357 if len(index[0]) != 0:
358 358 z_buffer[::, index[0], ::] = self.__missing
359 359 z_buffer = numpy.ma.masked_inside(z_buffer,
360 360 0.99 * self.__missing,
361 361 1.01 * self.__missing)
362 362
363 363 return x_buffer, y_buffer, z_buffer
364 364
365 365 def decimate(self):
366 366
367 367 # dx = int(len(self.x)/self.__MAXNUMX) + 1
368 368 dy = int(len(self.y) / self.decimation) + 1
369 369
370 370 # x = self.x[::dx]
371 371 x = self.x
372 372 y = self.y[::dy]
373 373 z = self.z[::, ::, ::dy]
374 374
375 375 return x, y, z
376 376
377 377 def format(self):
378 378 '''
379 379 Set min and max values, labels, ticks and titles
380 380 '''
381 381
382 382 for n, ax in enumerate(self.axes):
383 383 if ax.firsttime:
384 384 if self.xaxis != 'time':
385 385 xmin = self.xmin
386 386 xmax = self.xmax
387 387 else:
388 388 xmin = self.tmin
389 389 xmax = self.tmin + self.xrange*60*60
390 390 ax.xaxis.set_major_formatter(FuncFormatter(self.__fmtTime))
391 391 ax.xaxis.set_major_locator(LinearLocator(9))
392 392 ymin = self.ymin if self.ymin is not None else numpy.nanmin(self.y[numpy.isfinite(self.y)])
393 393 ymax = self.ymax if self.ymax is not None else numpy.nanmax(self.y[numpy.isfinite(self.y)])
394 394 ax.set_facecolor(self.bgcolor)
395 395 if self.xscale:
396 396 ax.xaxis.set_major_formatter(FuncFormatter(
397 397 lambda x, pos: '{0:g}'.format(x*self.xscale)))
398 398 if self.yscale:
399 399 ax.yaxis.set_major_formatter(FuncFormatter(
400 400 lambda x, pos: '{0:g}'.format(x*self.yscale)))
401 401 if self.xlabel is not None:
402 402 ax.set_xlabel(self.xlabel)
403 403 if self.ylabel is not None:
404 404 ax.set_ylabel(self.ylabel)
405 405 if self.showprofile:
406 406 self.pf_axes[n].set_ylim(ymin, ymax)
407 407 self.pf_axes[n].set_xlim(self.zmin, self.zmax)
408 408 self.pf_axes[n].set_xlabel('dB')
409 409 self.pf_axes[n].grid(b=True, axis='x')
410 410 [tick.set_visible(False)
411 411 for tick in self.pf_axes[n].get_yticklabels()]
412 412 if self.colorbar:
413 413 ax.cbar = plt.colorbar(
414 414 ax.plt, ax=ax, fraction=0.05, pad=0.02, aspect=10)
415 415 ax.cbar.ax.tick_params(labelsize=8)
416 416 ax.cbar.ax.press = None
417 417 if self.cb_label:
418 418 ax.cbar.set_label(self.cb_label, size=8)
419 419 elif self.cb_labels:
420 420 ax.cbar.set_label(self.cb_labels[n], size=8)
421 421 else:
422 422 ax.cbar = None
423 423 ax.set_xlim(xmin, xmax)
424 424 ax.set_ylim(ymin, ymax)
425 425 ax.firsttime = False
426 426 if self.grid:
427 427 ax.grid(True)
428 428 if not self.polar:
429 429 ax.set_title('{} {} {}'.format(
430 430 self.titles[n],
431 431 self.getDateTime(self.data.max_time).strftime(
432 432 '%Y-%m-%d %H:%M:%S'),
433 433 self.time_label),
434 434 size=8)
435 435 else:
436 436 ax.set_title('{}'.format(self.titles[n]), size=8)
437 437 ax.set_ylim(0, 90)
438 438 ax.set_yticks(numpy.arange(0, 90, 20))
439 439 ax.yaxis.labelpad = 40
440 440
441 441 if self.firsttime:
442 442 for n, fig in enumerate(self.figures):
443 443 fig.subplots_adjust(**self.plots_adjust)
444 444 self.firsttime = False
445 445
446 446 def clear_figures(self):
447 447 '''
448 448 Reset axes for redraw plots
449 449 '''
450 450
451 451 for ax in self.axes+self.pf_axes+self.cb_axes:
452 452 ax.clear()
453 453 ax.firsttime = True
454 454 if hasattr(ax, 'cbar') and ax.cbar:
455 455 ax.cbar.remove()
456 456
457 457 def __plot(self):
458 458 '''
459 459 Main function to plot, format and save figures
460 460 '''
461 461
462 462 self.plot()
463 463 self.format()
464 464
465 465 for n, fig in enumerate(self.figures):
466 466 if self.nrows == 0 or self.nplots == 0:
467 467 log.warning('No data', self.name)
468 468 fig.text(0.5, 0.5, 'No Data', fontsize='large', ha='center')
469 469 fig.canvas.manager.set_window_title(self.CODE)
470 470 continue
471 471
472 472 fig.canvas.manager.set_window_title('{} - {}'.format(self.title,
473 473 self.getDateTime(self.data.max_time).strftime('%Y/%m/%d')))
474 474 fig.canvas.draw()
475 475 if self.show:
476 476 fig.show()
477 477 figpause(0.01)
478 478
479 479 if self.save:
480 480 self.save_figure(n)
481 481
482 482 if self.server:
483 483 self.send_to_server()
484 484
485 485 def __update(self, dataOut, timestamp):
486 486 '''
487 487 '''
488 488
489 489 metadata = {
490 490 'yrange': dataOut.heightList,
491 491 'interval': dataOut.timeInterval,
492 492 'channels': dataOut.channelList
493 493 }
494 494
495 495 data, meta = self.update(dataOut)
496 496 metadata.update(meta)
497 497 self.data.update(data, timestamp, metadata)
498 498
499 499 def save_figure(self, n):
500 500 '''
501 501 '''
502 502
503 503 if (self.data.max_time - self.save_time) <= self.save_period:
504 504 return
505 505
506 506 self.save_time = self.data.max_time
507 507
508 508 fig = self.figures[n]
509 509
510 510 if self.throttle == 0:
511 511 figname = os.path.join(
512 512 self.save,
513 513 self.save_code,
514 514 '{}_{}.png'.format(
515 515 self.save_code,
516 516 self.getDateTime(self.data.max_time).strftime(
517 517 '%Y%m%d_%H%M%S'
518 518 ),
519 519 )
520 520 )
521 521 log.log('Saving figure: {}'.format(figname), self.name)
522 522 if not os.path.isdir(os.path.dirname(figname)):
523 523 os.makedirs(os.path.dirname(figname))
524 524 fig.savefig(figname)
525 525
526 526 figname = os.path.join(
527 527 self.save,
528 528 '{}_{}.png'.format(
529 529 self.save_code,
530 530 self.getDateTime(self.data.min_time).strftime(
531 531 '%Y%m%d'
532 532 ),
533 533 )
534 534 )
535 535
536 536 log.log('Saving figure: {}'.format(figname), self.name)
537 537 if not os.path.isdir(os.path.dirname(figname)):
538 538 os.makedirs(os.path.dirname(figname))
539 539 fig.savefig(figname)
540 540
541 541 def send_to_server(self):
542 542 '''
543 543 '''
544 544
545 545 if self.exp_code == None:
546 546 log.warning('Missing `exp_code` skipping sending to server...')
547 547
548 548 last_time = self.data.max_time
549 549 interval = last_time - self.sender_time
550 550 if interval < self.sender_period:
551 551 return
552 552
553 553 self.sender_time = last_time
554 554
555 555 attrs = ['titles', 'zmin', 'zmax', 'tag', 'ymin', 'ymax']
556 556 for attr in attrs:
557 557 value = getattr(self, attr)
558 558 if value:
559 559 if isinstance(value, (numpy.float32, numpy.float64)):
560 560 value = round(float(value), 2)
561 561 self.data.meta[attr] = value
562 562 if self.colormap == 'jet':
563 563 self.data.meta['colormap'] = 'Jet'
564 564 elif 'RdBu' in self.colormap:
565 565 self.data.meta['colormap'] = 'RdBu'
566 566 else:
567 567 self.data.meta['colormap'] = 'Viridis'
568 568 self.data.meta['interval'] = int(interval)
569 569
570 570 self.sender_queue.append(last_time)
571 571
572 572 while True:
573 573 try:
574 574 tm = self.sender_queue.popleft()
575 575 except IndexError:
576 576 break
577 577 msg = self.data.jsonify(tm, self.save_code, self.plot_type)
578 578 self.socket.send_string(msg)
579 579 socks = dict(self.poll.poll(2000))
580 580 if socks.get(self.socket) == zmq.POLLIN:
581 581 reply = self.socket.recv_string()
582 582 if reply == 'ok':
583 583 log.log("Response from server ok", self.name)
584 584 time.sleep(0.1)
585 585 continue
586 586 else:
587 587 log.warning(
588 588 "Malformed reply from server: {}".format(reply), self.name)
589 589 else:
590 590 log.warning(
591 591 "No response from server, retrying...", self.name)
592 592 self.sender_queue.appendleft(tm)
593 593 self.socket.setsockopt(zmq.LINGER, 0)
594 594 self.socket.close()
595 595 self.poll.unregister(self.socket)
596 596 self.socket = self.context.socket(zmq.REQ)
597 597 self.socket.connect(self.server)
598 598 self.poll.register(self.socket, zmq.POLLIN)
599 599 break
600 600
601 601 def setup(self):
602 602 '''
603 603 This method should be implemented in the child class, the following
604 604 attributes should be set:
605 605
606 606 self.nrows: number of rows
607 607 self.ncols: number of cols
608 608 self.nplots: number of plots (channels or pairs)
609 609 self.ylabel: label for Y axes
610 610 self.titles: list of axes title
611 611
612 612 '''
613 613 raise NotImplementedError
614 614
615 615 def plot(self):
616 616 '''
617 617 Must be defined in the child class, the actual plotting method
618 618 '''
619 619 raise NotImplementedError
620 620
621 621 def update(self, dataOut):
622 622 '''
623 623 Must be defined in the child class, update self.data with new data
624 624 '''
625 625
626 626 data = {
627 627 self.CODE: getattr(dataOut, 'data_{}'.format(self.CODE))
628 628 }
629 629 meta = {}
630 630
631 631 return data, meta
632 632
633 633 def run(self, dataOut, **kwargs):
634 634 '''
635 635 Main plotting routine
636 636 '''
637 637
638 638 if self.isConfig is False:
639 639 self.__setup(**kwargs)
640 640
641 641 if self.localtime:
642 642 self.getDateTime = datetime.datetime.fromtimestamp
643 643 else:
644 644 self.getDateTime = datetime.datetime.utcfromtimestamp
645 645
646 646 self.data.setup()
647 647 self.isConfig = True
648 648 if self.server:
649 649 self.context = zmq.Context()
650 650 self.socket = self.context.socket(zmq.REQ)
651 651 self.socket.connect(self.server)
652 652 self.poll = zmq.Poller()
653 653 self.poll.register(self.socket, zmq.POLLIN)
654 654
655 655 tm = getattr(dataOut, self.attr_time)
656 656
657 657 if self.data and 'time' in self.xaxis and (tm - self.tmin) >= self.xrange*60*60:
658 658 self.save_time = tm
659 659 self.__plot()
660 660 self.tmin += self.xrange*60*60
661 661 self.data.setup()
662 662 self.clear_figures()
663 663
664 664 self.__update(dataOut, tm)
665 665
666 666 if self.isPlotConfig is False:
667 667 self.__setup_plot()
668 668 self.isPlotConfig = True
669 669 if self.xaxis == 'time':
670 670 dt = self.getDateTime(tm)
671 671 if self.xmin is None:
672 672 self.tmin = tm
673 673 self.xmin = dt.hour
674 674 minutes = (self.xmin-int(self.xmin)) * 60
675 675 seconds = (minutes - int(minutes)) * 60
676 676 self.tmin = (dt.replace(hour=int(self.xmin), minute=int(minutes), second=int(seconds)) -
677 677 datetime.datetime(1970, 1, 1)).total_seconds()
678 678 if self.localtime:
679 679 self.tmin += time.timezone
680 680
681 681 if self.xmin is not None and self.xmax is not None:
682 682 self.xrange = self.xmax - self.xmin
683 683
684 684 if self.throttle == 0:
685 685 self.__plot()
686 686 else:
687 687 self.__throttle_plot(self.__plot)#, coerce=coerce)
688 688
689 689 def close(self):
690 690
691 691 if self.data and not self.data.flagNoData:
692 692 self.save_time = 0
693 693 self.__plot()
694 694 if self.data and not self.data.flagNoData and self.pause:
695 695 figpause(10)
@@ -1,356 +1,358
1 1 import os
2 2 import datetime
3 3 import numpy
4 4
5 5 from schainpy.model.graphics.jroplot_base import Plot, plt
6 6 from schainpy.model.graphics.jroplot_spectra import SpectraPlot, RTIPlot, CoherencePlot
7 7 from schainpy.utils import log
8 8
9 9 EARTH_RADIUS = 6.3710e3
10 10
11 11
12 12 def ll2xy(lat1, lon1, lat2, lon2):
13 13
14 14 p = 0.017453292519943295
15 15 a = 0.5 - numpy.cos((lat2 - lat1) * p)/2 + numpy.cos(lat1 * p) * \
16 16 numpy.cos(lat2 * p) * (1 - numpy.cos((lon2 - lon1) * p)) / 2
17 17 r = 12742 * numpy.arcsin(numpy.sqrt(a))
18 18 theta = numpy.arctan2(numpy.sin((lon2-lon1)*p)*numpy.cos(lat2*p), numpy.cos(lat1*p)
19 19 * numpy.sin(lat2*p)-numpy.sin(lat1*p)*numpy.cos(lat2*p)*numpy.cos((lon2-lon1)*p))
20 20 theta = -theta + numpy.pi/2
21 21 return r*numpy.cos(theta), r*numpy.sin(theta)
22 22
23 23
24 24 def km2deg(km):
25 25 '''
26 26 Convert distance in km to degrees
27 27 '''
28 28
29 29 return numpy.rad2deg(km/EARTH_RADIUS)
30 30
31 31
32 32
33 33 class SpectralMomentsPlot(SpectraPlot):
34 34 '''
35 35 Plot for Spectral Moments
36 36 '''
37 37 CODE = 'spc_moments'
38 38 colormap = 'jet'
39 39 plot_type = 'pcolor'
40 40
41 41
42 42 class SnrPlot(RTIPlot):
43 43 '''
44 44 Plot for SNR Data
45 45 '''
46 46
47 47 CODE = 'snr'
48 48 colormap = 'jet'
49 49
50 50 def update(self, dataOut):
51
51 if len(self.channelList) == 0:
52 self.channelList = dataOut.channelList
52 53 data = {
53 54 'snr': 10*numpy.log10(dataOut.data_snr)
54 55 }
55 56
56 57 return data, {}
57 58
58 59 class DopplerPlot(RTIPlot):
59 60 '''
60 61 Plot for DOPPLER Data (1st moment)
61 62 '''
62 63
63 64 CODE = 'dop'
64 65 colormap = 'jet'
65 66
66 67 def update(self, dataOut):
67 68
68 69 data = {
69 70 'dop': 10*numpy.log10(dataOut.data_dop)
70 71 }
71 72
72 73 return data, {}
73 74
74 75 class PowerPlot(RTIPlot):
75 76 '''
76 77 Plot for Power Data (0 moment)
77 78 '''
78 79
79 80 CODE = 'pow'
80 81 colormap = 'jet'
81 82
82 83 def update(self, dataOut):
83
84 if len(self.channelList) == 0:
85 self.channelList = dataOut.channelList
84 86 data = {
85 87 'pow': 10*numpy.log10(dataOut.data_pow)
86 88 }
87 print("data",data)
89 #print("data",data)
88 90 return data, {}
89 91
90 92 class SpectralWidthPlot(RTIPlot):
91 93 '''
92 94 Plot for Spectral Width Data (2nd moment)
93 95 '''
94 96
95 97 CODE = 'width'
96 98 colormap = 'jet'
97 99
98 100 def update(self, dataOut):
99 101
100 102 data = {
101 103 'width': dataOut.data_width
102 104 }
103 105
104 106 return data, {}
105 107
106 108 class SkyMapPlot(Plot):
107 109 '''
108 110 Plot for meteors detection data
109 111 '''
110 112
111 113 CODE = 'param'
112 114
113 115 def setup(self):
114 116
115 117 self.ncols = 1
116 118 self.nrows = 1
117 119 self.width = 7.2
118 120 self.height = 7.2
119 121 self.nplots = 1
120 122 self.xlabel = 'Zonal Zenith Angle (deg)'
121 123 self.ylabel = 'Meridional Zenith Angle (deg)'
122 124 self.polar = True
123 125 self.ymin = -180
124 126 self.ymax = 180
125 127 self.colorbar = False
126 128
127 129 def plot(self):
128 130
129 131 arrayParameters = numpy.concatenate(self.data['param'])
130 132 error = arrayParameters[:, -1]
131 133 indValid = numpy.where(error == 0)[0]
132 134 finalMeteor = arrayParameters[indValid, :]
133 135 finalAzimuth = finalMeteor[:, 3]
134 136 finalZenith = finalMeteor[:, 4]
135 137
136 138 x = finalAzimuth * numpy.pi / 180
137 139 y = finalZenith
138 140
139 141 ax = self.axes[0]
140 142
141 143 if ax.firsttime:
142 144 ax.plot = ax.plot(x, y, 'bo', markersize=5)[0]
143 145 else:
144 146 ax.plot.set_data(x, y)
145 147
146 148 dt1 = self.getDateTime(self.data.min_time).strftime('%y/%m/%d %H:%M:%S')
147 149 dt2 = self.getDateTime(self.data.max_time).strftime('%y/%m/%d %H:%M:%S')
148 150 title = 'Meteor Detection Sky Map\n %s - %s \n Number of events: %5.0f\n' % (dt1,
149 151 dt2,
150 152 len(x))
151 153 self.titles[0] = title
152 154
153 155
154 156 class GenericRTIPlot(Plot):
155 157 '''
156 158 Plot for data_xxxx object
157 159 '''
158 160
159 161 CODE = 'param'
160 162 colormap = 'viridis'
161 163 plot_type = 'pcolorbuffer'
162 164
163 165 def setup(self):
164 166 self.xaxis = 'time'
165 167 self.ncols = 1
166 168 self.nrows = self.data.shape('param')[0]
167 169 self.nplots = self.nrows
168 170 self.plots_adjust.update({'hspace':0.8, 'left': 0.1, 'bottom': 0.08, 'right':0.95, 'top': 0.95})
169 171
170 172 if not self.xlabel:
171 173 self.xlabel = 'Time'
172 174
173 175 self.ylabel = 'Height [km]'
174 176 if not self.titles:
175 177 self.titles = ['Param {}'.format(x) for x in range(self.nrows)]
176 178
177 179 def update(self, dataOut):
178 180
179 181 data = {
180 182 'param' : numpy.concatenate([getattr(dataOut, attr) for attr in self.attr_data], axis=0)
181 183 }
182 184
183 185 meta = {}
184 186
185 187 return data, meta
186 188
187 189 def plot(self):
188 190 # self.data.normalize_heights()
189 191 self.x = self.data.times
190 192 self.y = self.data.yrange
191 193 self.z = self.data['param']
192 194
193 195 self.z = numpy.ma.masked_invalid(self.z)
194 196
195 197 if self.decimation is None:
196 198 x, y, z = self.fill_gaps(self.x, self.y, self.z)
197 199 else:
198 200 x, y, z = self.fill_gaps(*self.decimate())
199 201
200 202 for n, ax in enumerate(self.axes):
201 203
202 204 self.zmax = self.zmax if self.zmax is not None else numpy.max(
203 205 self.z[n])
204 206 self.zmin = self.zmin if self.zmin is not None else numpy.min(
205 207 self.z[n])
206 208
207 209 if ax.firsttime:
208 210 if self.zlimits is not None:
209 211 self.zmin, self.zmax = self.zlimits[n]
210 212
211 213 ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n],
212 214 vmin=self.zmin,
213 215 vmax=self.zmax,
214 216 cmap=self.cmaps[n]
215 217 )
216 218 else:
217 219 if self.zlimits is not None:
218 220 self.zmin, self.zmax = self.zlimits[n]
219 221 ax.collections.remove(ax.collections[0])
220 222 ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n],
221 223 vmin=self.zmin,
222 224 vmax=self.zmax,
223 225 cmap=self.cmaps[n]
224 226 )
225 227
226 228
227 229 class PolarMapPlot(Plot):
228 230 '''
229 231 Plot for weather radar
230 232 '''
231 233
232 234 CODE = 'param'
233 235 colormap = 'seismic'
234 236
235 237 def setup(self):
236 238 self.ncols = 1
237 239 self.nrows = 1
238 240 self.width = 9
239 241 self.height = 8
240 242 self.mode = self.data.meta['mode']
241 243 if self.channels is not None:
242 244 self.nplots = len(self.channels)
243 245 self.nrows = len(self.channels)
244 246 else:
245 247 self.nplots = self.data.shape(self.CODE)[0]
246 248 self.nrows = self.nplots
247 249 self.channels = list(range(self.nplots))
248 250 if self.mode == 'E':
249 251 self.xlabel = 'Longitude'
250 252 self.ylabel = 'Latitude'
251 253 else:
252 254 self.xlabel = 'Range (km)'
253 255 self.ylabel = 'Height (km)'
254 256 self.bgcolor = 'white'
255 257 self.cb_labels = self.data.meta['units']
256 258 self.lat = self.data.meta['latitude']
257 259 self.lon = self.data.meta['longitude']
258 260 self.xmin, self.xmax = float(
259 261 km2deg(self.xmin) + self.lon), float(km2deg(self.xmax) + self.lon)
260 262 self.ymin, self.ymax = float(
261 263 km2deg(self.ymin) + self.lat), float(km2deg(self.ymax) + self.lat)
262 264 # self.polar = True
263 265
264 266 def plot(self):
265 267
266 268 for n, ax in enumerate(self.axes):
267 269 data = self.data['param'][self.channels[n]]
268 270
269 271 zeniths = numpy.linspace(
270 272 0, self.data.meta['max_range'], data.shape[1])
271 273 if self.mode == 'E':
272 274 azimuths = -numpy.radians(self.data.yrange)+numpy.pi/2
273 275 r, theta = numpy.meshgrid(zeniths, azimuths)
274 276 x, y = r*numpy.cos(theta)*numpy.cos(numpy.radians(self.data.meta['elevation'])), r*numpy.sin(
275 277 theta)*numpy.cos(numpy.radians(self.data.meta['elevation']))
276 278 x = km2deg(x) + self.lon
277 279 y = km2deg(y) + self.lat
278 280 else:
279 281 azimuths = numpy.radians(self.data.yrange)
280 282 r, theta = numpy.meshgrid(zeniths, azimuths)
281 283 x, y = r*numpy.cos(theta), r*numpy.sin(theta)
282 284 self.y = zeniths
283 285
284 286 if ax.firsttime:
285 287 if self.zlimits is not None:
286 288 self.zmin, self.zmax = self.zlimits[n]
287 289 ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
288 290 x, y, numpy.ma.array(data, mask=numpy.isnan(data)),
289 291 vmin=self.zmin,
290 292 vmax=self.zmax,
291 293 cmap=self.cmaps[n])
292 294 else:
293 295 if self.zlimits is not None:
294 296 self.zmin, self.zmax = self.zlimits[n]
295 297 ax.collections.remove(ax.collections[0])
296 298 ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
297 299 x, y, numpy.ma.array(data, mask=numpy.isnan(data)),
298 300 vmin=self.zmin,
299 301 vmax=self.zmax,
300 302 cmap=self.cmaps[n])
301 303
302 304 if self.mode == 'A':
303 305 continue
304 306
305 307 # plot district names
306 308 f = open('/data/workspace/schain_scripts/distrito.csv')
307 309 for line in f:
308 310 label, lon, lat = [s.strip() for s in line.split(',') if s]
309 311 lat = float(lat)
310 312 lon = float(lon)
311 313 # ax.plot(lon, lat, '.b', ms=2)
312 314 ax.text(lon, lat, label.decode('utf8'), ha='center',
313 315 va='bottom', size='8', color='black')
314 316
315 317 # plot limites
316 318 limites = []
317 319 tmp = []
318 320 for line in open('/data/workspace/schain_scripts/lima.csv'):
319 321 if '#' in line:
320 322 if tmp:
321 323 limites.append(tmp)
322 324 tmp = []
323 325 continue
324 326 values = line.strip().split(',')
325 327 tmp.append((float(values[0]), float(values[1])))
326 328 for points in limites:
327 329 ax.add_patch(
328 330 Polygon(points, ec='k', fc='none', ls='--', lw=0.5))
329 331
330 332 # plot Cuencas
331 333 for cuenca in ('rimac', 'lurin', 'mala', 'chillon', 'chilca', 'chancay-huaral'):
332 334 f = open('/data/workspace/schain_scripts/{}.csv'.format(cuenca))
333 335 values = [line.strip().split(',') for line in f]
334 336 points = [(float(s[0]), float(s[1])) for s in values]
335 337 ax.add_patch(Polygon(points, ec='b', fc='none'))
336 338
337 339 # plot grid
338 340 for r in (15, 30, 45, 60):
339 341 ax.add_artist(plt.Circle((self.lon, self.lat),
340 342 km2deg(r), color='0.6', fill=False, lw=0.2))
341 343 ax.text(
342 344 self.lon + (km2deg(r))*numpy.cos(60*numpy.pi/180),
343 345 self.lat + (km2deg(r))*numpy.sin(60*numpy.pi/180),
344 346 '{}km'.format(r),
345 347 ha='center', va='bottom', size='8', color='0.6', weight='heavy')
346 348
347 349 if self.mode == 'E':
348 350 title = 'El={}$^\circ$'.format(self.data.meta['elevation'])
349 351 label = 'E{:02d}'.format(int(self.data.meta['elevation']))
350 352 else:
351 353 title = 'Az={}$^\circ$'.format(self.data.meta['azimuth'])
352 354 label = 'A{:02d}'.format(int(self.data.meta['azimuth']))
353 355
354 356 self.save_labels = ['{}-{}'.format(lbl, label) for lbl in self.labels]
355 357 self.titles = ['{} {}'.format(
356 358 self.data.parameters[x], title) for x in self.channels]
@@ -1,727 +1,727
1 1 # Copyright (c) 2012-2020 Jicamarca Radio Observatory
2 2 # All rights reserved.
3 3 #
4 4 # Distributed under the terms of the BSD 3-clause license.
5 5 """Classes to plot Spectra data
6 6
7 7 """
8 8
9 9 import os
10 10 import numpy
11 11
12 12 from schainpy.model.graphics.jroplot_base import Plot, plt, log
13
13 from itertools import combinations
14 14
15 15 class SpectraPlot(Plot):
16 16 '''
17 17 Plot for Spectra data
18 18 '''
19 19
20 20 CODE = 'spc'
21 21 colormap = 'jet'
22 22 plot_type = 'pcolor'
23 23 buffering = False
24 24 channelList = []
25 25
26 26 def setup(self):
27 27 self.nplots = len(self.data.channels)
28 28 self.ncols = int(numpy.sqrt(self.nplots) + 0.9)
29 29 self.nrows = int((1.0 * self.nplots / self.ncols) + 0.9)
30 30 self.height = 2.6 * self.nrows
31 31
32 32 self.cb_label = 'dB'
33 33 if self.showprofile:
34 34 self.width = 4 * self.ncols
35 35 else:
36 36 self.width = 3.5 * self.ncols
37 37 self.plots_adjust.update({'wspace': 0.4, 'hspace':0.4, 'left': 0.1, 'right': 0.9, 'bottom': 0.08})
38 38 self.ylabel = 'Range [km]'
39 39
40 40 def update(self, dataOut):
41 41 if self.channelList == None:
42 42 self.channelList = dataOut.channelList
43 43 data = {}
44 44 meta = {}
45 45 spc = 10*numpy.log10(dataOut.data_spc/dataOut.normFactor)
46
46 47 data['spc'] = spc
47 48 data['rti'] = dataOut.getPower()
48 49 data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor)
49 50 meta['xrange'] = (dataOut.getFreqRange(1)/1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
50 51 if self.CODE == 'spc_moments':
51 52 data['moments'] = dataOut.moments
52 53
53 54 return data, meta
54 55
55 56 def plot(self):
56 57 if self.xaxis == "frequency":
57 58 x = self.data.xrange[0]
58 59 self.xlabel = "Frequency (kHz)"
59 60 elif self.xaxis == "time":
60 61 x = self.data.xrange[1]
61 62 self.xlabel = "Time (ms)"
62 63 else:
63 64 x = self.data.xrange[2]
64 65 self.xlabel = "Velocity (m/s)"
65 66
66 67 if self.CODE == 'spc_moments':
67 68 x = self.data.xrange[2]
68 69 self.xlabel = "Velocity (m/s)"
69 70
70 71 self.titles = []
71 72
72 73 y = self.data.yrange
73 74 self.y = y
74 75
75 76 data = self.data[-1]
76 77 z = data['spc']
77 78
78 79 for n, ax in enumerate(self.axes):
79 80 noise = data['noise'][n]
80 81 if self.CODE == 'spc_moments':
81 82 mean = data['moments'][n, 1]
82 83 if ax.firsttime:
83 84 self.xmax = self.xmax if self.xmax else numpy.nanmax(x)
84 85 self.xmin = self.xmin if self.xmin else -self.xmax
85 86 self.zmin = self.zmin if self.zmin else numpy.nanmin(z)
86 87 self.zmax = self.zmax if self.zmax else numpy.nanmax(z)
87 88 ax.plt = ax.pcolormesh(x, y, z[n].T,
88 89 vmin=self.zmin,
89 90 vmax=self.zmax,
90 91 cmap=plt.get_cmap(self.colormap)
91 92 )
92 93
93 94 if self.showprofile:
94 95 ax.plt_profile = self.pf_axes[n].plot(
95 96 data['rti'][n], y)[0]
96 97 ax.plt_noise = self.pf_axes[n].plot(numpy.repeat(noise, len(y)), y,
97 98 color="k", linestyle="dashed", lw=1)[0]
98 99 if self.CODE == 'spc_moments':
99 100 ax.plt_mean = ax.plot(mean, y, color='k')[0]
100 101 else:
101 102 ax.plt.set_array(z[n].T.ravel())
102 103 if self.showprofile:
103 104 ax.plt_profile.set_data(data['rti'][n], y)
104 105 ax.plt_noise.set_data(numpy.repeat(noise, len(y)), y)
105 106 if self.CODE == 'spc_moments':
106 107 ax.plt_mean.set_data(mean, y)
107 108 self.titles.append('CH {}: {:3.2f}dB'.format(self.channelList[n], noise))
108 109
109 110
110 111 class CrossSpectraPlot(Plot):
111 112
112 113 CODE = 'cspc'
113 114 colormap = 'jet'
114 115 plot_type = 'pcolor'
115 116 zmin_coh = None
116 117 zmax_coh = None
117 118 zmin_phase = None
118 119 zmax_phase = None
119 120 realChannels = None
120 121 crossPairs = None
121 122
122 123 def setup(self):
123 124
124 125 self.ncols = 4
125 126 self.nplots = len(self.data.pairs) * 2
126 127 self.nrows = int((1.0 * self.nplots / self.ncols) + 0.9)
127 128 self.width = 3.1 * self.ncols
128 129 self.height = 2.6 * self.nrows
129 130 self.ylabel = 'Range [km]'
130 131 self.showprofile = False
131 132 self.plots_adjust.update({'left': 0.08, 'right': 0.92, 'wspace': 0.5, 'hspace':0.4, 'top':0.95, 'bottom': 0.08})
132 133
133 134 def update(self, dataOut):
134 135
135 136 data = {}
136 137 meta = {}
137 138
138 139 spc = dataOut.data_spc
139 140 cspc = dataOut.data_cspc
140 141 meta['xrange'] = (dataOut.getFreqRange(1)/1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
141 142 rawPairs = list(combinations(list(range(dataOut.nChannels)), 2))
142 #print(rawPairs)
143 143 meta['pairs'] = rawPairs
144 144
145 145 if self.crossPairs == None:
146 146 self.crossPairs = dataOut.pairsList
147 147
148 148 tmp = []
149 149
150 150 for n, pair in enumerate(meta['pairs']):
151 151
152 152 out = cspc[n] / numpy.sqrt(spc[pair[0]] * spc[pair[1]])
153 153 coh = numpy.abs(out)
154 154 phase = numpy.arctan2(out.imag, out.real) * 180 / numpy.pi
155 155 tmp.append(coh)
156 156 tmp.append(phase)
157 157
158 158 data['cspc'] = numpy.array(tmp)
159 159
160 160 return data, meta
161 161
162 162 def plot(self):
163 163
164 164 if self.xaxis == "frequency":
165 165 x = self.data.xrange[0]
166 166 self.xlabel = "Frequency (kHz)"
167 167 elif self.xaxis == "time":
168 168 x = self.data.xrange[1]
169 169 self.xlabel = "Time (ms)"
170 170 else:
171 171 x = self.data.xrange[2]
172 172 self.xlabel = "Velocity (m/s)"
173 173
174 174 self.titles = []
175 175
176 176 y = self.data.yrange
177 177 self.y = y
178 178
179 179 data = self.data[-1]
180 180 cspc = data['cspc']
181 181 #print(self.crossPairs)
182 182 for n in range(len(self.data.pairs)):
183 183 #pair = self.data.pairs[n]
184 184 pair = self.crossPairs[n]
185 185
186 186 coh = cspc[n*2]
187 187 phase = cspc[n*2+1]
188 188 ax = self.axes[2 * n]
189 189
190 190 if ax.firsttime:
191 191 ax.plt = ax.pcolormesh(x, y, coh.T,
192 192 vmin=0,
193 193 vmax=1,
194 194 cmap=plt.get_cmap(self.colormap_coh)
195 195 )
196 196 else:
197 197 ax.plt.set_array(coh.T.ravel())
198 198 self.titles.append(
199 199 'Coherence Ch{} * Ch{}'.format(pair[0], pair[1]))
200 200
201 201 ax = self.axes[2 * n + 1]
202 202 if ax.firsttime:
203 203 ax.plt = ax.pcolormesh(x, y, phase.T,
204 204 vmin=-180,
205 205 vmax=180,
206 206 cmap=plt.get_cmap(self.colormap_phase)
207 207 )
208 208 else:
209 209 ax.plt.set_array(phase.T.ravel())
210 210 self.titles.append('Phase CH{} * CH{}'.format(pair[0], pair[1]))
211 211
212 212
213 213 class RTIPlot(Plot):
214 214 '''
215 215 Plot for RTI data
216 216 '''
217 217
218 218 CODE = 'rti'
219 219 colormap = 'jet'
220 220 plot_type = 'pcolorbuffer'
221 221 titles = None
222 222 channelList = []
223 223
224 224 def setup(self):
225 225 self.xaxis = 'time'
226 226 self.ncols = 1
227 print("dataChannels ",self.data.channels)
227 #print("dataChannels ",self.data.channels)
228 228 self.nrows = len(self.data.channels)
229 229 self.nplots = len(self.data.channels)
230 230 self.ylabel = 'Range [km]'
231 231 self.xlabel = 'Time'
232 232 self.cb_label = 'dB'
233 233 self.plots_adjust.update({'hspace':0.8, 'left': 0.1, 'bottom': 0.08, 'right':0.95})
234 234 self.titles = ['{} Channel {}'.format(
235 235 self.CODE.upper(), x) for x in range(self.nplots)]
236 print("SETUP")
236
237 237 def update(self, dataOut):
238 238 if len(self.channelList) == 0:
239 239 self.channelList = dataOut.channelList
240 240 data = {}
241 241 meta = {}
242 242 data['rti'] = dataOut.getPower()
243 243 data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor)
244
245 244 return data, meta
246 245
247 246 def plot(self):
247
248 248 self.x = self.data.times
249 249 self.y = self.data.yrange
250 250 self.z = self.data[self.CODE]
251 251 self.z = numpy.ma.masked_invalid(self.z)
252 252 try:
253 253 if self.channelList != None:
254 254 self.titles = ['{} Channel {}'.format(
255 255 self.CODE.upper(), x) for x in self.channelList]
256 256 except:
257 257 if self.channelList.any() != None:
258 258 self.titles = ['{} Channel {}'.format(
259 259 self.CODE.upper(), x) for x in self.channelList]
260 260 if self.decimation is None:
261 261 x, y, z = self.fill_gaps(self.x, self.y, self.z)
262 262 else:
263 263 x, y, z = self.fill_gaps(*self.decimate())
264 264
265 265 for n, ax in enumerate(self.axes):
266 266 self.zmin = self.zmin if self.zmin else numpy.min(self.z)
267 267 self.zmax = self.zmax if self.zmax else numpy.max(self.z)
268 268 data = self.data[-1]
269 269 if ax.firsttime:
270 270 ax.plt = ax.pcolormesh(x, y, z[n].T,
271 271 vmin=self.zmin,
272 272 vmax=self.zmax,
273 273 cmap=plt.get_cmap(self.colormap)
274 274 )
275 275 if self.showprofile:
276 276 ax.plot_profile = self.pf_axes[n].plot(
277 277 data['rti'][n], self.y)[0]
278 278 ax.plot_noise = self.pf_axes[n].plot(numpy.repeat(data['noise'][n], len(self.y)), self.y,
279 279 color="k", linestyle="dashed", lw=1)[0]
280 280 else:
281 281 ax.collections.remove(ax.collections[0])
282 282 ax.plt = ax.pcolormesh(x, y, z[n].T,
283 283 vmin=self.zmin,
284 284 vmax=self.zmax,
285 285 cmap=plt.get_cmap(self.colormap)
286 286 )
287 287 if self.showprofile:
288 288 ax.plot_profile.set_data(data['rti'][n], self.y)
289 289 ax.plot_noise.set_data(numpy.repeat(
290 290 data['noise'][n], len(self.y)), self.y)
291 291
292 292
293 293 class CoherencePlot(RTIPlot):
294 294 '''
295 295 Plot for Coherence data
296 296 '''
297 297
298 298 CODE = 'coh'
299 299
300 300 def setup(self):
301 301 self.xaxis = 'time'
302 302 self.ncols = 1
303 303 self.nrows = len(self.data.pairs)
304 304 self.nplots = len(self.data.pairs)
305 305 self.ylabel = 'Range [km]'
306 306 self.xlabel = 'Time'
307 307 self.plots_adjust.update({'hspace':0.6, 'left': 0.1, 'bottom': 0.1,'right':0.95})
308 308 if self.CODE == 'coh':
309 309 self.cb_label = ''
310 310 self.titles = [
311 311 'Coherence Map Ch{} * Ch{}'.format(x[0], x[1]) for x in self.data.pairs]
312 312 else:
313 313 self.cb_label = 'Degrees'
314 314 self.titles = [
315 315 'Phase Map Ch{} * Ch{}'.format(x[0], x[1]) for x in self.data.pairs]
316 316
317 317 def update(self, dataOut):
318 318
319 319 data = {}
320 320 meta = {}
321 321 data['coh'] = dataOut.getCoherence()
322 322 meta['pairs'] = dataOut.pairsList
323 323
324 324 return data, meta
325 325
326 326 class PhasePlot(CoherencePlot):
327 327 '''
328 328 Plot for Phase map data
329 329 '''
330 330
331 331 CODE = 'phase'
332 332 colormap = 'seismic'
333 333
334 334 def update(self, dataOut):
335 335
336 336 data = {}
337 337 meta = {}
338 338 data['phase'] = dataOut.getCoherence(phase=True)
339 339 meta['pairs'] = dataOut.pairsList
340 340
341 341 return data, meta
342 342
343 343 class NoisePlot(Plot):
344 344 '''
345 345 Plot for noise
346 346 '''
347 347
348 348 CODE = 'noise'
349 349 plot_type = 'scatterbuffer'
350 350
351 351 def setup(self):
352 352 self.xaxis = 'time'
353 353 self.ncols = 1
354 354 self.nrows = 1
355 355 self.nplots = 1
356 356 self.ylabel = 'Intensity [dB]'
357 357 self.xlabel = 'Time'
358 358 self.titles = ['Noise']
359 359 self.colorbar = False
360 360 self.plots_adjust.update({'right': 0.85 })
361 361
362 362 def update(self, dataOut):
363 363
364 364 data = {}
365 365 meta = {}
366 366 data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor).reshape(dataOut.nChannels, 1)
367 367 meta['yrange'] = numpy.array([])
368 368
369 369 return data, meta
370 370
371 371 def plot(self):
372 372
373 373 x = self.data.times
374 374 xmin = self.data.min_time
375 375 xmax = xmin + self.xrange * 60 * 60
376 376 Y = self.data['noise']
377 377
378 378 if self.axes[0].firsttime:
379 379 self.ymin = numpy.nanmin(Y) - 5
380 380 self.ymax = numpy.nanmax(Y) + 5
381 381 for ch in self.data.channels:
382 382 y = Y[ch]
383 383 self.axes[0].plot(x, y, lw=1, label='Ch{}'.format(ch))
384 384 plt.legend(bbox_to_anchor=(1.18, 1.0))
385 385 else:
386 386 for ch in self.data.channels:
387 387 y = Y[ch]
388 388 self.axes[0].lines[ch].set_data(x, y)
389 389
390 390
391 391 class PowerProfilePlot(Plot):
392 392
393 393 CODE = 'pow_profile'
394 394 plot_type = 'scatter'
395 395
396 396 def setup(self):
397 397
398 398 self.ncols = 1
399 399 self.nrows = 1
400 400 self.nplots = 1
401 401 self.height = 4
402 402 self.width = 3
403 403 self.ylabel = 'Range [km]'
404 404 self.xlabel = 'Intensity [dB]'
405 405 self.titles = ['Power Profile']
406 406 self.colorbar = False
407 407
408 408 def update(self, dataOut):
409 409
410 410 data = {}
411 411 meta = {}
412 412 data[self.CODE] = dataOut.getPower()
413 413
414 414 return data, meta
415 415
416 416 def plot(self):
417 417
418 418 y = self.data.yrange
419 419 self.y = y
420 420
421 421 x = self.data[-1][self.CODE]
422 422
423 423 if self.xmin is None: self.xmin = numpy.nanmin(x)*0.9
424 424 if self.xmax is None: self.xmax = numpy.nanmax(x)*1.1
425 425
426 426 if self.axes[0].firsttime:
427 427 for ch in self.data.channels:
428 428 self.axes[0].plot(x[ch], y, lw=1, label='Ch{}'.format(ch))
429 429 plt.legend()
430 430 else:
431 431 for ch in self.data.channels:
432 432 self.axes[0].lines[ch].set_data(x[ch], y)
433 433
434 434
435 435 class SpectraCutPlot(Plot):
436 436
437 437 CODE = 'spc_cut'
438 438 plot_type = 'scatter'
439 439 buffering = False
440 440
441 441 def setup(self):
442 442
443 443 self.nplots = len(self.data.channels)
444 444 self.ncols = int(numpy.sqrt(self.nplots) + 0.9)
445 445 self.nrows = int((1.0 * self.nplots / self.ncols) + 0.9)
446 446 self.width = 3.4 * self.ncols + 1.5
447 447 self.height = 3 * self.nrows
448 448 self.ylabel = 'Power [dB]'
449 449 self.colorbar = False
450 450 self.plots_adjust.update({'left':0.1, 'hspace':0.3, 'right': 0.75, 'bottom':0.08})
451 451
452 452 def update(self, dataOut):
453 453
454 454 data = {}
455 455 meta = {}
456 456 spc = 10*numpy.log10(dataOut.data_spc/dataOut.normFactor)
457 457 data['spc'] = spc
458 458 meta['xrange'] = (dataOut.getFreqRange(1)/1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
459 459
460 460 return data, meta
461 461
462 462 def plot(self):
463 463 if self.xaxis == "frequency":
464 464 x = self.data.xrange[0][1:]
465 465 self.xlabel = "Frequency (kHz)"
466 466 elif self.xaxis == "time":
467 467 x = self.data.xrange[1]
468 468 self.xlabel = "Time (ms)"
469 469 else:
470 470 x = self.data.xrange[2]
471 471 self.xlabel = "Velocity (m/s)"
472 472
473 473 self.titles = []
474 474
475 475 y = self.data.yrange
476 476 z = self.data[-1]['spc']
477 477
478 478 if self.height_index:
479 479 index = numpy.array(self.height_index)
480 480 else:
481 481 index = numpy.arange(0, len(y), int((len(y))/9))
482 482
483 483 for n, ax in enumerate(self.axes):
484 484 if ax.firsttime:
485 485 self.xmax = self.xmax if self.xmax else numpy.nanmax(x)
486 486 self.xmin = self.xmin if self.xmin else -self.xmax
487 487 self.ymin = self.ymin if self.ymin else numpy.nanmin(z)
488 488 self.ymax = self.ymax if self.ymax else numpy.nanmax(z)
489 489 ax.plt = ax.plot(x, z[n, :, index].T)
490 490 labels = ['Range = {:2.1f}km'.format(y[i]) for i in index]
491 491 self.figures[0].legend(ax.plt, labels, loc='center right')
492 492 else:
493 493 for i, line in enumerate(ax.plt):
494 494 line.set_data(x, z[n, :, index[i]])
495 495 self.titles.append('CH {}'.format(n))
496 496
497 497
498 498 class BeaconPhase(Plot):
499 499
500 500 __isConfig = None
501 501 __nsubplots = None
502 502
503 503 PREFIX = 'beacon_phase'
504 504
505 505 def __init__(self):
506 506 Plot.__init__(self)
507 507 self.timerange = 24*60*60
508 508 self.isConfig = False
509 509 self.__nsubplots = 1
510 510 self.counter_imagwr = 0
511 511 self.WIDTH = 800
512 512 self.HEIGHT = 400
513 513 self.WIDTHPROF = 120
514 514 self.HEIGHTPROF = 0
515 515 self.xdata = None
516 516 self.ydata = None
517 517
518 518 self.PLOT_CODE = BEACON_CODE
519 519
520 520 self.FTP_WEI = None
521 521 self.EXP_CODE = None
522 522 self.SUB_EXP_CODE = None
523 523 self.PLOT_POS = None
524 524
525 525 self.filename_phase = None
526 526
527 527 self.figfile = None
528 528
529 529 self.xmin = None
530 530 self.xmax = None
531 531
532 532 def getSubplots(self):
533 533
534 534 ncol = 1
535 535 nrow = 1
536 536
537 537 return nrow, ncol
538 538
539 539 def setup(self, id, nplots, wintitle, showprofile=True, show=True):
540 540
541 541 self.__showprofile = showprofile
542 542 self.nplots = nplots
543 543
544 544 ncolspan = 7
545 545 colspan = 6
546 546 self.__nsubplots = 2
547 547
548 548 self.createFigure(id = id,
549 549 wintitle = wintitle,
550 550 widthplot = self.WIDTH+self.WIDTHPROF,
551 551 heightplot = self.HEIGHT+self.HEIGHTPROF,
552 552 show=show)
553 553
554 554 nrow, ncol = self.getSubplots()
555 555
556 556 self.addAxes(nrow, ncol*ncolspan, 0, 0, colspan, 1)
557 557
558 558 def save_phase(self, filename_phase):
559 559 f = open(filename_phase,'w+')
560 560 f.write('\n\n')
561 561 f.write('JICAMARCA RADIO OBSERVATORY - Beacon Phase \n')
562 562 f.write('DD MM YYYY HH MM SS pair(2,0) pair(2,1) pair(2,3) pair(2,4)\n\n' )
563 563 f.close()
564 564
565 565 def save_data(self, filename_phase, data, data_datetime):
566 566 f=open(filename_phase,'a')
567 567 timetuple_data = data_datetime.timetuple()
568 568 day = str(timetuple_data.tm_mday)
569 569 month = str(timetuple_data.tm_mon)
570 570 year = str(timetuple_data.tm_year)
571 571 hour = str(timetuple_data.tm_hour)
572 572 minute = str(timetuple_data.tm_min)
573 573 second = str(timetuple_data.tm_sec)
574 574 f.write(day+' '+month+' '+year+' '+hour+' '+minute+' '+second+' '+str(data[0])+' '+str(data[1])+' '+str(data[2])+' '+str(data[3])+'\n')
575 575 f.close()
576 576
577 577 def plot(self):
578 578 log.warning('TODO: Not yet implemented...')
579 579
580 580 def run(self, dataOut, id, wintitle="", pairsList=None, showprofile='True',
581 581 xmin=None, xmax=None, ymin=None, ymax=None, hmin=None, hmax=None,
582 582 timerange=None,
583 583 save=False, figpath='./', figfile=None, show=True, ftp=False, wr_period=1,
584 584 server=None, folder=None, username=None, password=None,
585 585 ftp_wei=0, exp_code=0, sub_exp_code=0, plot_pos=0):
586 586
587 587 if dataOut.flagNoData:
588 588 return dataOut
589 589
590 590 if not isTimeInHourRange(dataOut.datatime, xmin, xmax):
591 591 return
592 592
593 593 if pairsList == None:
594 594 pairsIndexList = dataOut.pairsIndexList[:10]
595 595 else:
596 596 pairsIndexList = []
597 597 for pair in pairsList:
598 598 if pair not in dataOut.pairsList:
599 599 raise ValueError("Pair %s is not in dataOut.pairsList" %(pair))
600 600 pairsIndexList.append(dataOut.pairsList.index(pair))
601 601
602 602 if pairsIndexList == []:
603 603 return
604 604
605 605 # if len(pairsIndexList) > 4:
606 606 # pairsIndexList = pairsIndexList[0:4]
607 607
608 608 hmin_index = None
609 609 hmax_index = None
610 610
611 611 if hmin != None and hmax != None:
612 612 indexes = numpy.arange(dataOut.nHeights)
613 613 hmin_list = indexes[dataOut.heightList >= hmin]
614 614 hmax_list = indexes[dataOut.heightList <= hmax]
615 615
616 616 if hmin_list.any():
617 617 hmin_index = hmin_list[0]
618 618
619 619 if hmax_list.any():
620 620 hmax_index = hmax_list[-1]+1
621 621
622 622 x = dataOut.getTimeRange()
623 623
624 624 thisDatetime = dataOut.datatime
625 625
626 626 title = wintitle + " Signal Phase" # : %s" %(thisDatetime.strftime("%d-%b-%Y"))
627 627 xlabel = "Local Time"
628 628 ylabel = "Phase (degrees)"
629 629
630 630 update_figfile = False
631 631
632 632 nplots = len(pairsIndexList)
633 633 #phase = numpy.zeros((len(pairsIndexList),len(dataOut.beacon_heiIndexList)))
634 634 phase_beacon = numpy.zeros(len(pairsIndexList))
635 635 for i in range(nplots):
636 636 pair = dataOut.pairsList[pairsIndexList[i]]
637 637 ccf = numpy.average(dataOut.data_cspc[pairsIndexList[i], :, hmin_index:hmax_index], axis=0)
638 638 powa = numpy.average(dataOut.data_spc[pair[0], :, hmin_index:hmax_index], axis=0)
639 639 powb = numpy.average(dataOut.data_spc[pair[1], :, hmin_index:hmax_index], axis=0)
640 640 avgcoherenceComplex = ccf/numpy.sqrt(powa*powb)
641 641 phase = numpy.arctan2(avgcoherenceComplex.imag, avgcoherenceComplex.real)*180/numpy.pi
642 642
643 643 if dataOut.beacon_heiIndexList:
644 644 phase_beacon[i] = numpy.average(phase[dataOut.beacon_heiIndexList])
645 645 else:
646 646 phase_beacon[i] = numpy.average(phase)
647 647
648 648 if not self.isConfig:
649 649
650 650 nplots = len(pairsIndexList)
651 651
652 652 self.setup(id=id,
653 653 nplots=nplots,
654 654 wintitle=wintitle,
655 655 showprofile=showprofile,
656 656 show=show)
657 657
658 658 if timerange != None:
659 659 self.timerange = timerange
660 660
661 661 self.xmin, self.xmax = self.getTimeLim(x, xmin, xmax, timerange)
662 662
663 663 if ymin == None: ymin = 0
664 664 if ymax == None: ymax = 360
665 665
666 666 self.FTP_WEI = ftp_wei
667 667 self.EXP_CODE = exp_code
668 668 self.SUB_EXP_CODE = sub_exp_code
669 669 self.PLOT_POS = plot_pos
670 670
671 671 self.name = thisDatetime.strftime("%Y%m%d_%H%M%S")
672 672 self.isConfig = True
673 673 self.figfile = figfile
674 674 self.xdata = numpy.array([])
675 675 self.ydata = numpy.array([])
676 676
677 677 update_figfile = True
678 678
679 679 #open file beacon phase
680 680 path = '%s%03d' %(self.PREFIX, self.id)
681 681 beacon_file = os.path.join(path,'%s.txt'%self.name)
682 682 self.filename_phase = os.path.join(figpath,beacon_file)
683 683 #self.save_phase(self.filename_phase)
684 684
685 685
686 686 #store data beacon phase
687 687 #self.save_data(self.filename_phase, phase_beacon, thisDatetime)
688 688
689 689 self.setWinTitle(title)
690 690
691 691
692 692 title = "Phase Plot %s" %(thisDatetime.strftime("%Y/%m/%d %H:%M:%S"))
693 693
694 694 legendlabels = ["Pair (%d,%d)"%(pair[0], pair[1]) for pair in dataOut.pairsList]
695 695
696 696 axes = self.axesList[0]
697 697
698 698 self.xdata = numpy.hstack((self.xdata, x[0:1]))
699 699
700 700 if len(self.ydata)==0:
701 701 self.ydata = phase_beacon.reshape(-1,1)
702 702 else:
703 703 self.ydata = numpy.hstack((self.ydata, phase_beacon.reshape(-1,1)))
704 704
705 705
706 706 axes.pmultilineyaxis(x=self.xdata, y=self.ydata,
707 707 xmin=self.xmin, xmax=self.xmax, ymin=ymin, ymax=ymax,
708 708 xlabel=xlabel, ylabel=ylabel, title=title, legendlabels=legendlabels, marker='x', markersize=8, linestyle="solid",
709 709 XAxisAsTime=True, grid='both'
710 710 )
711 711
712 712 self.draw()
713 713
714 714 if dataOut.ltctime >= self.xmax:
715 715 self.counter_imagwr = wr_period
716 716 self.isConfig = False
717 717 update_figfile = True
718 718
719 719 self.save(figpath=figpath,
720 720 figfile=figfile,
721 721 save=save,
722 722 ftp=ftp,
723 723 wr_period=wr_period,
724 724 thisDatetime=thisDatetime,
725 725 update_figfile=update_figfile)
726 726
727 727 return dataOut
@@ -1,1575 +1,1576
1 1 """
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 """
6 6 import os
7 7 import sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import inspect
13 13 import time
14 14 import datetime
15 15 import zmq
16 16
17 17 from schainpy.model.proc.jroproc_base import Operation, MPDecorator
18 18 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
19 19 from schainpy.model.data.jroheaderIO import get_dtype_index, get_numpy_dtype, get_procflag_dtype, get_dtype_width
20 20 from schainpy.utils import log
21 21 import schainpy.admin
22 22
23 23 LOCALTIME = True
24 24 DT_DIRECTIVES = {
25 25 '%Y': 4,
26 26 '%y': 2,
27 27 '%m': 2,
28 28 '%d': 2,
29 29 '%j': 3,
30 30 '%H': 2,
31 31 '%M': 2,
32 32 '%S': 2,
33 33 '%f': 6
34 34 }
35 35
36 36
37 37 def isNumber(cad):
38 38 """
39 39 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
40 40
41 41 Excepciones:
42 42 Si un determinado string no puede ser convertido a numero
43 43 Input:
44 44 str, string al cual se le analiza para determinar si convertible a un numero o no
45 45
46 46 Return:
47 47 True : si el string es uno numerico
48 48 False : no es un string numerico
49 49 """
50 50 try:
51 51 float(cad)
52 52 return True
53 53 except:
54 54 return False
55 55
56 56
57 57 def isFileInEpoch(filename, startUTSeconds, endUTSeconds):
58 58 """
59 59 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
60 60
61 61 Inputs:
62 62 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
63 63
64 64 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
65 65 segundos contados desde 01/01/1970.
66 66 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
67 67 segundos contados desde 01/01/1970.
68 68
69 69 Return:
70 70 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
71 71 fecha especificado, de lo contrario retorna False.
72 72
73 73 Excepciones:
74 74 Si el archivo no existe o no puede ser abierto
75 75 Si la cabecera no puede ser leida.
76 76
77 77 """
78 78 basicHeaderObj = BasicHeader(LOCALTIME)
79 79
80 80 try:
81 81 fp = open(filename, 'rb')
82 82 except IOError:
83 83 print("The file %s can't be opened" % (filename))
84 84 return 0
85 85
86 86 sts = basicHeaderObj.read(fp)
87 87 fp.close()
88 88
89 89 if not(sts):
90 90 print("Skipping the file %s because it has not a valid header" % (filename))
91 91 return 0
92 92
93 93 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
94 94 return 0
95 95
96 96 return 1
97 97
98 98
99 99 def isTimeInRange(thisTime, startTime, endTime):
100 100 if endTime >= startTime:
101 101 if (thisTime < startTime) or (thisTime > endTime):
102 102 return 0
103 103 return 1
104 104 else:
105 105 if (thisTime < startTime) and (thisTime > endTime):
106 106 return 0
107 107 return 1
108 108
109 109
110 110 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
111 111 """
112 112 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
113 113
114 114 Inputs:
115 115 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
116 116
117 117 startDate : fecha inicial del rango seleccionado en formato datetime.date
118 118
119 119 endDate : fecha final del rango seleccionado en formato datetime.date
120 120
121 121 startTime : tiempo inicial del rango seleccionado en formato datetime.time
122 122
123 123 endTime : tiempo final del rango seleccionado en formato datetime.time
124 124
125 125 Return:
126 126 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
127 127 fecha especificado, de lo contrario retorna False.
128 128
129 129 Excepciones:
130 130 Si el archivo no existe o no puede ser abierto
131 131 Si la cabecera no puede ser leida.
132 132
133 133 """
134 134
135 135 try:
136 136 fp = open(filename, 'rb')
137 137 except IOError:
138 138 print("The file %s can't be opened" % (filename))
139 139 return None
140 140
141 141 firstBasicHeaderObj = BasicHeader(LOCALTIME)
142 142 systemHeaderObj = SystemHeader()
143 143 radarControllerHeaderObj = RadarControllerHeader()
144 144 processingHeaderObj = ProcessingHeader()
145 145
146 146 lastBasicHeaderObj = BasicHeader(LOCALTIME)
147 147
148 148 sts = firstBasicHeaderObj.read(fp)
149 149
150 150 if not(sts):
151 151 print("[Reading] Skipping the file %s because it has not a valid header" % (filename))
152 152 return None
153 153
154 154 if not systemHeaderObj.read(fp):
155 155 return None
156 156
157 157 if not radarControllerHeaderObj.read(fp):
158 158 return None
159 159
160 160 if not processingHeaderObj.read(fp):
161 161 return None
162 162
163 163 filesize = os.path.getsize(filename)
164 164
165 165 offset = processingHeaderObj.blockSize + 24 # header size
166 166
167 167 if filesize <= offset:
168 168 print("[Reading] %s: This file has not enough data" % filename)
169 169 return None
170 170
171 171 fp.seek(-offset, 2)
172 172
173 173 sts = lastBasicHeaderObj.read(fp)
174 174
175 175 fp.close()
176 176
177 177 thisDatetime = lastBasicHeaderObj.datatime
178 178 thisTime_last_block = thisDatetime.time()
179 179
180 180 thisDatetime = firstBasicHeaderObj.datatime
181 181 thisDate = thisDatetime.date()
182 182 thisTime_first_block = thisDatetime.time()
183 183
184 184 # General case
185 185 # o>>>>>>>>>>>>>><<<<<<<<<<<<<<o
186 186 #-----------o----------------------------o-----------
187 187 # startTime endTime
188 188
189 189 if endTime >= startTime:
190 190 if (thisTime_last_block < startTime) or (thisTime_first_block > endTime):
191 191 return None
192 192
193 193 return thisDatetime
194 194
195 195 # If endTime < startTime then endTime belongs to the next day
196 196
197 197 #<<<<<<<<<<<o o>>>>>>>>>>>
198 198 #-----------o----------------------------o-----------
199 199 # endTime startTime
200 200
201 201 if (thisDate == startDate) and (thisTime_last_block < startTime):
202 202 return None
203 203
204 204 if (thisDate == endDate) and (thisTime_first_block > endTime):
205 205 return None
206 206
207 207 if (thisTime_last_block < startTime) and (thisTime_first_block > endTime):
208 208 return None
209 209
210 210 return thisDatetime
211 211
212 212
213 213 def isFolderInDateRange(folder, startDate=None, endDate=None):
214 214 """
215 215 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
216 216
217 217 Inputs:
218 218 folder : nombre completo del directorio.
219 219 Su formato deberia ser "/path_root/?YYYYDDD"
220 220
221 221 siendo:
222 222 YYYY : Anio (ejemplo 2015)
223 223 DDD : Dia del anio (ejemplo 305)
224 224
225 225 startDate : fecha inicial del rango seleccionado en formato datetime.date
226 226
227 227 endDate : fecha final del rango seleccionado en formato datetime.date
228 228
229 229 Return:
230 230 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
231 231 fecha especificado, de lo contrario retorna False.
232 232 Excepciones:
233 233 Si el directorio no tiene el formato adecuado
234 234 """
235 235
236 236 basename = os.path.basename(folder)
237 237
238 238 if not isRadarFolder(basename):
239 239 print("The folder %s has not the rigth format" % folder)
240 240 return 0
241 241
242 242 if startDate and endDate:
243 243 thisDate = getDateFromRadarFolder(basename)
244 244
245 245 if thisDate < startDate:
246 246 return 0
247 247
248 248 if thisDate > endDate:
249 249 return 0
250 250
251 251 return 1
252 252
253 253
254 254 def isFileInDateRange(filename, startDate=None, endDate=None):
255 255 """
256 256 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
257 257
258 258 Inputs:
259 259 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
260 260
261 261 Su formato deberia ser "?YYYYDDDsss"
262 262
263 263 siendo:
264 264 YYYY : Anio (ejemplo 2015)
265 265 DDD : Dia del anio (ejemplo 305)
266 266 sss : set
267 267
268 268 startDate : fecha inicial del rango seleccionado en formato datetime.date
269 269
270 270 endDate : fecha final del rango seleccionado en formato datetime.date
271 271
272 272 Return:
273 273 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
274 274 fecha especificado, de lo contrario retorna False.
275 275 Excepciones:
276 276 Si el archivo no tiene el formato adecuado
277 277 """
278 278
279 279 basename = os.path.basename(filename)
280 280
281 281 if not isRadarFile(basename):
282 282 print("The filename %s has not the rigth format" % filename)
283 283 return 0
284 284
285 285 if startDate and endDate:
286 286 thisDate = getDateFromRadarFile(basename)
287 287
288 288 if thisDate < startDate:
289 289 return 0
290 290
291 291 if thisDate > endDate:
292 292 return 0
293 293
294 294 return 1
295 295
296 296
297 297 def getFileFromSet(path, ext, set):
298 298 validFilelist = []
299 299 fileList = os.listdir(path)
300 300
301 301 # 0 1234 567 89A BCDE
302 302 # H YYYY DDD SSS .ext
303 303
304 304 for thisFile in fileList:
305 305 try:
306 306 year = int(thisFile[1:5])
307 307 doy = int(thisFile[5:8])
308 308 except:
309 309 continue
310 310
311 311 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
312 312 continue
313 313
314 314 validFilelist.append(thisFile)
315 315
316 316 myfile = fnmatch.filter(
317 317 validFilelist, '*%4.4d%3.3d%3.3d*' % (year, doy, set))
318 318
319 319 if len(myfile) != 0:
320 320 return myfile[0]
321 321 else:
322 322 filename = '*%4.4d%3.3d%3.3d%s' % (year, doy, set, ext.lower())
323 323 print('the filename %s does not exist' % filename)
324 324 print('...going to the last file: ')
325 325
326 326 if validFilelist:
327 327 validFilelist = sorted(validFilelist, key=str.lower)
328 328 return validFilelist[-1]
329 329
330 330 return None
331 331
332 332
333 333 def getlastFileFromPath(path, ext):
334 334 """
335 335 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
336 336 al final de la depuracion devuelve el ultimo file de la lista que quedo.
337 337
338 338 Input:
339 339 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
340 340 ext : extension de los files contenidos en una carpeta
341 341
342 342 Return:
343 343 El ultimo file de una determinada carpeta, no se considera el path.
344 344 """
345 345 validFilelist = []
346 346 fileList = os.listdir(path)
347 347
348 348 # 0 1234 567 89A BCDE
349 349 # H YYYY DDD SSS .ext
350 350
351 351 for thisFile in fileList:
352 352
353 353 year = thisFile[1:5]
354 354 if not isNumber(year):
355 355 continue
356 356
357 357 doy = thisFile[5:8]
358 358 if not isNumber(doy):
359 359 continue
360 360
361 361 year = int(year)
362 362 doy = int(doy)
363 363
364 364 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
365 365 continue
366 366
367 367 validFilelist.append(thisFile)
368 368
369 369 if validFilelist:
370 370 validFilelist = sorted(validFilelist, key=str.lower)
371 371 return validFilelist[-1]
372 372
373 373 return None
374 374
375 375
376 376 def isRadarFolder(folder):
377 377 try:
378 378 year = int(folder[1:5])
379 379 doy = int(folder[5:8])
380 380 except:
381 381 return 0
382 382
383 383 return 1
384 384
385 385
386 386 def isRadarFile(file):
387 387 try:
388 388 year = int(file[1:5])
389 389 doy = int(file[5:8])
390 390 set = int(file[8:11])
391 391 except:
392 392 return 0
393 393
394 394 return 1
395 395
396 396
397 397 def getDateFromRadarFile(file):
398 398 try:
399 399 year = int(file[1:5])
400 400 doy = int(file[5:8])
401 401 set = int(file[8:11])
402 402 except:
403 403 return None
404 404
405 405 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1)
406 406 return thisDate
407 407
408 408
409 409 def getDateFromRadarFolder(folder):
410 410 try:
411 411 year = int(folder[1:5])
412 412 doy = int(folder[5:8])
413 413 except:
414 414 return None
415 415
416 416 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1)
417 417 return thisDate
418 418
419 419 def parse_format(s, fmt):
420 420
421 421 for i in range(fmt.count('%')):
422 422 x = fmt.index('%')
423 423 d = DT_DIRECTIVES[fmt[x:x+2]]
424 424 fmt = fmt.replace(fmt[x:x+2], s[x:x+d])
425 425 return fmt
426 426
427 427 class Reader(object):
428 428
429 429 c = 3E8
430 430 isConfig = False
431 431 dtype = None
432 432 pathList = []
433 433 filenameList = []
434 434 datetimeList = []
435 435 filename = None
436 436 ext = None
437 437 flagIsNewFile = 1
438 438 flagDiscontinuousBlock = 0
439 439 flagIsNewBlock = 0
440 440 flagNoMoreFiles = 0
441 441 fp = None
442 442 firstHeaderSize = 0
443 443 basicHeaderSize = 24
444 444 versionFile = 1103
445 445 fileSize = None
446 446 fileSizeByHeader = None
447 447 fileIndex = -1
448 448 profileIndex = None
449 449 blockIndex = 0
450 450 nTotalBlocks = 0
451 451 maxTimeStep = 30
452 452 lastUTTime = None
453 453 datablock = None
454 454 dataOut = None
455 455 getByBlock = False
456 456 path = None
457 457 startDate = None
458 458 endDate = None
459 459 startTime = datetime.time(0, 0, 0)
460 460 endTime = datetime.time(23, 59, 59)
461 461 set = None
462 462 expLabel = ""
463 463 online = False
464 464 delay = 60
465 465 nTries = 3 # quantity tries
466 466 nFiles = 3 # number of files for searching
467 467 walk = True
468 468 getblock = False
469 469 nTxs = 1
470 470 realtime = False
471 471 blocksize = 0
472 472 blocktime = None
473 473 warnings = True
474 474 verbose = True
475 475 server = None
476 476 format = None
477 477 oneDDict = None
478 478 twoDDict = None
479 479 independentParam = None
480 480 filefmt = None
481 481 folderfmt = None
482 482 open_file = open
483 483 open_mode = 'rb'
484 484
485 485 def run(self):
486 486
487 487 raise NotImplementedError
488 488
489 489 def getAllowedArgs(self):
490 490 if hasattr(self, '__attrs__'):
491 491 return self.__attrs__
492 492 else:
493 493 return inspect.getargspec(self.run).args
494 494
495 495 def set_kwargs(self, **kwargs):
496 496
497 497 for key, value in kwargs.items():
498 498 setattr(self, key, value)
499 499
500 500 def find_folders(self, path, startDate, endDate, folderfmt, last=False):
501 501
502 502 folders = [x for f in path.split(',')
503 503 for x in os.listdir(f) if os.path.isdir(os.path.join(f, x))]
504 504 folders.sort()
505 505
506 506 if last:
507 507 folders = [folders[-1]]
508 508
509 509 for folder in folders:
510 510 try:
511 511 dt = datetime.datetime.strptime(parse_format(folder, folderfmt), folderfmt).date()
512 512 if dt >= startDate and dt <= endDate:
513 513 yield os.path.join(path, folder)
514 514 else:
515 515 log.log('Skiping folder {}'.format(folder), self.name)
516 516 except Exception as e:
517 517 log.log('Skiping folder {}'.format(folder), self.name)
518 518 continue
519 519 return
520 520
521 521 def find_files(self, folders, ext, filefmt, startDate=None, endDate=None,
522 522 expLabel='', last=False):
523 523
524 524 for path in folders:
525 525 files = glob.glob1(path, '*{}'.format(ext))
526 526 files.sort()
527 527 if last:
528 528 if files:
529 529 fo = files[-1]
530 530 try:
531 531 dt = datetime.datetime.strptime(parse_format(fo, filefmt), filefmt).date()
532 532 yield os.path.join(path, expLabel, fo)
533 533 except Exception as e:
534 534 pass
535 535 return
536 536 else:
537 537 return
538 538
539 539 for fo in files:
540 540 try:
541 541 dt = datetime.datetime.strptime(parse_format(fo, filefmt), filefmt).date()
542 542 if dt >= startDate and dt <= endDate:
543 543 yield os.path.join(path, expLabel, fo)
544 544 else:
545 545 log.log('Skiping file {}'.format(fo), self.name)
546 546 except Exception as e:
547 547 log.log('Skiping file {}'.format(fo), self.name)
548 548 continue
549 549
550 550 def searchFilesOffLine(self, path, startDate, endDate,
551 551 expLabel, ext, walk,
552 552 filefmt, folderfmt):
553 553 """Search files in offline mode for the given arguments
554 554
555 555 Return:
556 556 Generator of files
557 557 """
558 558
559 559 if walk:
560 560 folders = self.find_folders(
561 561 path, startDate, endDate, folderfmt)
562 562 else:
563 563 folders = path.split(',')
564 564
565 565 return self.find_files(
566 566 folders, ext, filefmt, startDate, endDate, expLabel)
567 567
568 568 def searchFilesOnLine(self, path, startDate, endDate,
569 569 expLabel, ext, walk,
570 570 filefmt, folderfmt):
571 571 """Search for the last file of the last folder
572 572
573 573 Arguments:
574 574 path : carpeta donde estan contenidos los files que contiene data
575 575 expLabel : Nombre del subexperimento (subfolder)
576 576 ext : extension de los files
577 577 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
578 578
579 579 Return:
580 580 generator with the full path of last filename
581 581 """
582 582
583 583 if walk:
584 584 folders = self.find_folders(
585 585 path, startDate, endDate, folderfmt, last=True)
586 586 else:
587 587 folders = path.split(',')
588
588
589 589 return self.find_files(
590 590 folders, ext, filefmt, startDate, endDate, expLabel, last=True)
591 591
592 592 def setNextFile(self):
593 593 """Set the next file to be readed open it and parse de file header"""
594 594
595 595 while True:
596 596 if self.fp != None:
597 597 self.fp.close()
598 598
599 599 if self.online:
600 600 newFile = self.setNextFileOnline()
601 601 else:
602 602 newFile = self.setNextFileOffline()
603 603
604 604 if not(newFile):
605 605 if self.online:
606 606 raise schainpy.admin.SchainError('Time to wait for new files reach')
607 607 else:
608 608 if self.fileIndex == -1:
609 609 raise schainpy.admin.SchainWarning('No files found in the given path')
610 610 else:
611 611 raise schainpy.admin.SchainWarning('No more files to read')
612 612
613 613 if self.verifyFile(self.filename):
614 614 break
615 615
616 616 log.log('Opening file: %s' % self.filename, self.name)
617 617
618 618 self.readFirstHeader()
619 619 self.nReadBlocks = 0
620 620
621 621 def setNextFileOnline(self):
622 622 """Check for the next file to be readed in online mode.
623 623
624 624 Set:
625 625 self.filename
626 626 self.fp
627 627 self.filesize
628 628
629 629 Return:
630 630 boolean
631 631
632 632 """
633 633 nextFile = True
634 634 nextDay = False
635 635
636 636 for nFiles in range(self.nFiles+1):
637 637 for nTries in range(self.nTries):
638 638 fullfilename, filename = self.checkForRealPath(nextFile, nextDay)
639 639 if fullfilename is not None:
640 640 break
641 641 log.warning(
642 642 "Waiting %0.2f sec for the next file: \"%s\" , try %02d ..." % (self.delay, filename, nTries + 1),
643 643 self.name)
644 644 time.sleep(self.delay)
645 645 nextFile = False
646 646 continue
647 647
648 648 if fullfilename is not None:
649 649 break
650 650
651 651 self.nTries = 1
652 652 nextFile = True
653 653
654 654 if nFiles == (self.nFiles - 1):
655 655 log.log('Trying with next day...', self.name)
656 656 nextDay = True
657 657 self.nTries = 3
658 658
659 659 if fullfilename:
660 660 self.fileSize = os.path.getsize(fullfilename)
661 661 self.filename = fullfilename
662 662 self.flagIsNewFile = 1
663 663 if self.fp != None:
664 664 self.fp.close()
665 665 self.fp = self.open_file(fullfilename, self.open_mode)
666 666 self.flagNoMoreFiles = 0
667 667 self.fileIndex += 1
668 668 return 1
669 669 else:
670 670 return 0
671 671
672 672 def setNextFileOffline(self):
673 673 """Open the next file to be readed in offline mode"""
674 674
675 675 try:
676 676 filename = next(self.filenameList)
677 677 self.fileIndex +=1
678 678 except StopIteration:
679 679 self.flagNoMoreFiles = 1
680 680 return 0
681 681
682 682 self.filename = filename
683 683 self.fileSize = os.path.getsize(filename)
684 684 self.fp = self.open_file(filename, self.open_mode)
685 685 self.flagIsNewFile = 1
686 686
687 687 return 1
688 688
689 689 @staticmethod
690 690 def isDateTimeInRange(dt, startDate, endDate, startTime, endTime):
691 691 """Check if the given datetime is in range"""
692 692 startDateTime= datetime.datetime.combine(startDate,startTime)
693 693 endDateTime = datetime.datetime.combine(endDate,endTime)
694 694 if startDateTime <= dt <= endDateTime:
695 695 return True
696 696 return False
697 697
698 698 def verifyFile(self, filename):
699 699 """Check for a valid file
700 700
701 701 Arguments:
702 702 filename -- full path filename
703 703
704 704 Return:
705 705 boolean
706 706 """
707 707
708 708 return True
709 709
710 710 def checkForRealPath(self, nextFile, nextDay):
711 711 """Check if the next file to be readed exists"""
712 712
713 713 raise NotImplementedError
714 714
715 715 def readFirstHeader(self):
716 716 """Parse the file header"""
717 717
718 718 pass
719 719
720 720 def waitDataBlock(self, pointer_location, blocksize=None):
721 721 """
722 722 """
723 723
724 724 currentPointer = pointer_location
725 725 if blocksize is None:
726 726 neededSize = self.processingHeaderObj.blockSize # + self.basicHeaderSize
727 727 else:
728 728 neededSize = blocksize
729 729
730 730 for nTries in range(self.nTries):
731 731 self.fp.close()
732 732 self.fp = open(self.filename, 'rb')
733 733 self.fp.seek(currentPointer)
734 734
735 735 self.fileSize = os.path.getsize(self.filename)
736 736 currentSize = self.fileSize - currentPointer
737 737
738 738 if (currentSize >= neededSize):
739 739 return 1
740 740
741 741 log.warning(
742 742 "Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1),
743 743 self.name
744 744 )
745 745 time.sleep(self.delay)
746 746
747 747 return 0
748 748
749 749 class JRODataReader(Reader):
750 750
751 751 utc = 0
752 752 nReadBlocks = 0
753 753 foldercounter = 0
754 754 firstHeaderSize = 0
755 755 basicHeaderSize = 24
756 756 __isFirstTimeOnline = 1
757 757 filefmt = "*%Y%j***"
758 758 folderfmt = "*%Y%j"
759 759 __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'online', 'delay', 'walk']
760 760
761 761 def getDtypeWidth(self):
762 762
763 763 dtype_index = get_dtype_index(self.dtype)
764 764 dtype_width = get_dtype_width(dtype_index)
765 765
766 766 return dtype_width
767 767
768 768 def checkForRealPath(self, nextFile, nextDay):
769 769 """Check if the next file to be readed exists.
770 770
771 771 Example :
772 772 nombre correcto del file es .../.../D2009307/P2009307367.ext
773 773
774 774 Entonces la funcion prueba con las siguientes combinaciones
775 775 .../.../y2009307367.ext
776 776 .../.../Y2009307367.ext
777 777 .../.../x2009307/y2009307367.ext
778 778 .../.../x2009307/Y2009307367.ext
779 779 .../.../X2009307/y2009307367.ext
780 780 .../.../X2009307/Y2009307367.ext
781 781 siendo para este caso, la ultima combinacion de letras, identica al file buscado
782 782
783 783 Return:
784 784 str -- fullpath of the file
785 785 """
786 786
787 787
788 788 if nextFile:
789 789 self.set += 1
790 790 if nextDay:
791 791 self.set = 0
792 792 self.doy += 1
793 793 foldercounter = 0
794 794 prefixDirList = [None, 'd', 'D']
795 795 if self.ext.lower() == ".r": # voltage
796 796 prefixFileList = ['d', 'D']
797 797 elif self.ext.lower() == ".pdata": # spectra
798 798 prefixFileList = ['p', 'P']
799 799
800 800 # barrido por las combinaciones posibles
801 801 for prefixDir in prefixDirList:
802 802 thispath = self.path
803 803 if prefixDir != None:
804 804 # formo el nombre del directorio xYYYYDDD (x=d o x=D)
805 805 if foldercounter == 0:
806 806 thispath = os.path.join(self.path, "%s%04d%03d" %
807 807 (prefixDir, self.year, self.doy))
808 808 else:
809 809 thispath = os.path.join(self.path, "%s%04d%03d_%02d" % (
810 810 prefixDir, self.year, self.doy, foldercounter))
811 811 for prefixFile in prefixFileList: # barrido por las dos combinaciones posibles de "D"
812 812 # formo el nombre del file xYYYYDDDSSS.ext
813 813 filename = "%s%04d%03d%03d%s" % (prefixFile, self.year, self.doy, self.set, self.ext)
814 814 fullfilename = os.path.join(
815 815 thispath, filename)
816 816
817 817 if os.path.exists(fullfilename):
818 818 return fullfilename, filename
819 819
820 820 return None, filename
821 821
822 822 def __waitNewBlock(self):
823 823 """
824 824 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
825 825
826 826 Si el modo de lectura es OffLine siempre retorn 0
827 827 """
828 828 if not self.online:
829 829 return 0
830 830
831 831 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
832 832 return 0
833 833
834 834 currentPointer = self.fp.tell()
835 835
836 836 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
837 837
838 838 for nTries in range(self.nTries):
839 839
840 840 self.fp.close()
841 841 self.fp = open(self.filename, 'rb')
842 842 self.fp.seek(currentPointer)
843 843
844 844 self.fileSize = os.path.getsize(self.filename)
845 845 currentSize = self.fileSize - currentPointer
846 846
847 847 if (currentSize >= neededSize):
848 848 self.basicHeaderObj.read(self.fp)
849 849 return 1
850 850
851 851 if self.fileSize == self.fileSizeByHeader:
852 852 # self.flagEoF = True
853 853 return 0
854 854
855 855 print("[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1))
856 856 time.sleep(self.delay)
857 857
858 858 return 0
859 859
860 860 def __setNewBlock(self):
861 861
862 862 if self.fp == None:
863 863 return 0
864 864
865 865 if self.flagIsNewFile:
866 866 self.lastUTTime = self.basicHeaderObj.utc
867 867 return 1
868 868
869 869 if self.realtime:
870 870 self.flagDiscontinuousBlock = 1
871 871 if not(self.setNextFile()):
872 872 return 0
873 873 else:
874 874 return 1
875 875
876 876 currentSize = self.fileSize - self.fp.tell()
877 877 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
878 878
879 879 if (currentSize >= neededSize):
880 880 self.basicHeaderObj.read(self.fp)
881 881 self.lastUTTime = self.basicHeaderObj.utc
882 882 return 1
883 883
884 884 if self.__waitNewBlock():
885 885 self.lastUTTime = self.basicHeaderObj.utc
886 886 return 1
887 887
888 888 if not(self.setNextFile()):
889 889 return 0
890 890
891 891 deltaTime = self.basicHeaderObj.utc - self.lastUTTime
892 892 self.lastUTTime = self.basicHeaderObj.utc
893 893
894 894 self.flagDiscontinuousBlock = 0
895 895
896 896 if deltaTime > self.maxTimeStep:
897 897 self.flagDiscontinuousBlock = 1
898 898
899 899 return 1
900 900
901 901 def readNextBlock(self):
902 902
903 903 while True:
904 904 if not(self.__setNewBlock()):
905 905 continue
906 906
907 907 if not(self.readBlock()):
908 908 return 0
909 909
910 910 self.getBasicHeader()
911 911
912 912 if not self.isDateTimeInRange(self.dataOut.datatime, self.startDate, self.endDate, self.startTime, self.endTime):
913 913 print("[Reading] Block No. %d/%d -> %s [Skipping]" % (self.nReadBlocks,
914 914 self.processingHeaderObj.dataBlocksPerFile,
915 915 self.dataOut.datatime.ctime()))
916 916 continue
917 917
918 918 break
919 919
920 920 if self.verbose:
921 921 print("[Reading] Block No. %d/%d -> %s" % (self.nReadBlocks,
922 922 self.processingHeaderObj.dataBlocksPerFile,
923 923 self.dataOut.datatime.ctime()))
924 924 return 1
925 925
926 926 def readFirstHeader(self):
927 927
928 928 self.basicHeaderObj.read(self.fp)
929 929 self.systemHeaderObj.read(self.fp)
930 930 self.radarControllerHeaderObj.read(self.fp)
931 931 self.processingHeaderObj.read(self.fp)
932 932 self.firstHeaderSize = self.basicHeaderObj.size
933 933
934 934 datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
935 935 PROCFLAG.DATATYPE_MASK)) - numpy.log2(PROCFLAG.DATATYPE_CHAR))
936 936 if datatype == 0:
937 937 datatype_str = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
938 938 elif datatype == 1:
939 939 datatype_str = numpy.dtype([('real', '<i2'), ('imag', '<i2')])
940 940 elif datatype == 2:
941 941 datatype_str = numpy.dtype([('real', '<i4'), ('imag', '<i4')])
942 942 elif datatype == 3:
943 943 datatype_str = numpy.dtype([('real', '<i8'), ('imag', '<i8')])
944 944 elif datatype == 4:
945 945 datatype_str = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
946 946 elif datatype == 5:
947 947 datatype_str = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
948 948 else:
949 949 raise ValueError('Data type was not defined')
950 950
951 951 self.dtype = datatype_str
952 952 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
953 953 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + \
954 954 self.firstHeaderSize + self.basicHeaderSize * \
955 955 (self.processingHeaderObj.dataBlocksPerFile - 1)
956 956 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
957 957 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
958 958 self.getBlockDimension()
959 959
960 960 def verifyFile(self, filename):
961 961
962 962 flag = True
963 963
964 964 try:
965 965 fp = open(filename, 'rb')
966 966 except IOError:
967 967 log.error("File {} can't be opened".format(filename), self.name)
968 968 return False
969 969
970 970 if self.online and self.waitDataBlock(0):
971 971 pass
972 972
973 973 basicHeaderObj = BasicHeader(LOCALTIME)
974 974 systemHeaderObj = SystemHeader()
975 975 radarControllerHeaderObj = RadarControllerHeader()
976 976 processingHeaderObj = ProcessingHeader()
977 977
978 978 if not(basicHeaderObj.read(fp)):
979 979 flag = False
980 980 if not(systemHeaderObj.read(fp)):
981 981 flag = False
982 982 if not(radarControllerHeaderObj.read(fp)):
983 983 flag = False
984 984 if not(processingHeaderObj.read(fp)):
985 985 flag = False
986 986 if not self.online:
987 987 dt1 = basicHeaderObj.datatime
988 988 pos = self.fileSize-processingHeaderObj.blockSize-24
989 989 if pos<0:
990 990 flag = False
991 991 log.error('Invalid size for file: {}'.format(self.filename), self.name)
992 992 else:
993 993 fp.seek(pos)
994 994 if not(basicHeaderObj.read(fp)):
995 995 flag = False
996 996 dt2 = basicHeaderObj.datatime
997 997 if not self.isDateTimeInRange(dt1, self.startDate, self.endDate, self.startTime, self.endTime) and not \
998 998 self.isDateTimeInRange(dt2, self.startDate, self.endDate, self.startTime, self.endTime):
999 999 flag = False
1000 1000
1001 1001 fp.close()
1002 1002 return flag
1003 1003
1004 1004 def findDatafiles(self, path, startDate=None, endDate=None, expLabel='', ext='.r', walk=True, include_path=False):
1005 1005
1006 1006 path_empty = True
1007 1007
1008 1008 dateList = []
1009 1009 pathList = []
1010 1010
1011 1011 multi_path = path.split(',')
1012 1012
1013 1013 if not walk:
1014 1014
1015 1015 for single_path in multi_path:
1016 1016
1017 1017 if not os.path.isdir(single_path):
1018 1018 continue
1019 1019
1020 1020 fileList = glob.glob1(single_path, "*" + ext)
1021 1021
1022 1022 if not fileList:
1023 1023 continue
1024 1024
1025 1025 path_empty = False
1026 1026
1027 1027 fileList.sort()
1028 1028
1029 1029 for thisFile in fileList:
1030 1030
1031 1031 if not os.path.isfile(os.path.join(single_path, thisFile)):
1032 1032 continue
1033 1033
1034 1034 if not isRadarFile(thisFile):
1035 1035 continue
1036 1036
1037 1037 if not isFileInDateRange(thisFile, startDate, endDate):
1038 1038 continue
1039 1039
1040 1040 thisDate = getDateFromRadarFile(thisFile)
1041 1041
1042 1042 if thisDate in dateList or single_path in pathList:
1043 1043 continue
1044 1044
1045 1045 dateList.append(thisDate)
1046 1046 pathList.append(single_path)
1047 1047
1048 1048 else:
1049 1049 for single_path in multi_path:
1050 1050
1051 1051 if not os.path.isdir(single_path):
1052 1052 continue
1053 1053
1054 1054 dirList = []
1055 1055
1056 1056 for thisPath in os.listdir(single_path):
1057 1057
1058 1058 if not os.path.isdir(os.path.join(single_path, thisPath)):
1059 1059 continue
1060 1060
1061 1061 if not isRadarFolder(thisPath):
1062 1062 continue
1063 1063
1064 1064 if not isFolderInDateRange(thisPath, startDate, endDate):
1065 1065 continue
1066 1066
1067 1067 dirList.append(thisPath)
1068 1068
1069 1069 if not dirList:
1070 1070 continue
1071 1071
1072 1072 dirList.sort()
1073 1073
1074 1074 for thisDir in dirList:
1075 1075
1076 1076 datapath = os.path.join(single_path, thisDir, expLabel)
1077 1077 fileList = glob.glob1(datapath, "*" + ext)
1078 1078
1079 1079 if not fileList:
1080 1080 continue
1081 1081
1082 1082 path_empty = False
1083 1083
1084 1084 thisDate = getDateFromRadarFolder(thisDir)
1085 1085
1086 1086 pathList.append(datapath)
1087 1087 dateList.append(thisDate)
1088 1088
1089 1089 dateList.sort()
1090 1090
1091 1091 if walk:
1092 1092 pattern_path = os.path.join(multi_path[0], "[dYYYYDDD]", expLabel)
1093 1093 else:
1094 1094 pattern_path = multi_path[0]
1095 1095
1096 1096 if path_empty:
1097 1097 raise schainpy.admin.SchainError("[Reading] No *%s files in %s for %s to %s" % (ext, pattern_path, startDate, endDate))
1098 1098 else:
1099 1099 if not dateList:
1100 1100 raise schainpy.admin.SchainError("[Reading] Date range selected invalid [%s - %s]: No *%s files in %s)" % (startDate, endDate, ext, path))
1101 1101
1102 1102 if include_path:
1103 1103 return dateList, pathList
1104 1104
1105 1105 return dateList
1106 1106
1107 1107 def setup(self, **kwargs):
1108 1108
1109 1109 self.set_kwargs(**kwargs)
1110 1110 if not self.ext.startswith('.'):
1111 1111 self.ext = '.{}'.format(self.ext)
1112 1112
1113 1113 if self.server is not None:
1114 1114 if 'tcp://' in self.server:
1115 1115 address = server
1116 1116 else:
1117 1117 address = 'ipc:///tmp/%s' % self.server
1118 1118 self.server = address
1119 1119 self.context = zmq.Context()
1120 1120 self.receiver = self.context.socket(zmq.PULL)
1121 1121 self.receiver.connect(self.server)
1122 1122 time.sleep(0.5)
1123 1123 print('[Starting] ReceiverData from {}'.format(self.server))
1124 1124 else:
1125 1125 self.server = None
1126 1126 if self.path == None:
1127 1127 raise ValueError("[Reading] The path is not valid")
1128 1128
1129 1129 if self.online:
1130 1130 log.log("[Reading] Searching files in online mode...", self.name)
1131 1131
1132 1132 for nTries in range(self.nTries):
1133 1133 fullpath = self.searchFilesOnLine(self.path, self.startDate,
1134 1134 self.endDate, self.expLabel, self.ext, self.walk,
1135 1135 self.filefmt, self.folderfmt)
1136 1136
1137 1137 try:
1138 1138 fullpath = next(fullpath)
1139 1139 except:
1140 1140 fullpath = None
1141 1141
1142 1142 if fullpath:
1143 1143 break
1144 1144
1145 1145 log.warning(
1146 1146 'Waiting {} sec for a valid file in {}: try {} ...'.format(
1147 1147 self.delay, self.path, nTries + 1),
1148 1148 self.name)
1149 1149 time.sleep(self.delay)
1150 1150
1151 1151 if not(fullpath):
1152 1152 raise schainpy.admin.SchainError(
1153 1153 'There isn\'t any valid file in {}'.format(self.path))
1154 1154
1155 1155 pathname, filename = os.path.split(fullpath)
1156 1156 self.year = int(filename[1:5])
1157 1157 self.doy = int(filename[5:8])
1158 1158 self.set = int(filename[8:11]) - 1
1159 1159 else:
1160 1160 log.log("Searching files in {}".format(self.path), self.name)
1161 1161 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
1162 1162 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
1163 1163
1164 1164 self.setNextFile()
1165 1165
1166 1166 return
1167 1167
1168 1168 def getBasicHeader(self):
1169 1169
1170 1170 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond / \
1171 1171 1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds
1172 1172
1173 1173 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
1174 1174
1175 1175 self.dataOut.timeZone = self.basicHeaderObj.timeZone
1176 1176
1177 1177 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
1178 1178
1179 1179 self.dataOut.errorCount = self.basicHeaderObj.errorCount
1180 1180
1181 1181 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
1182 1182
1183 1183 self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds / self.nTxs
1184 1184
1185 1185 def getFirstHeader(self):
1186 1186
1187 1187 raise NotImplementedError
1188 1188
1189 1189 def getData(self):
1190 1190
1191 1191 raise NotImplementedError
1192 1192
1193 1193 def hasNotDataInBuffer(self):
1194 1194
1195 1195 raise NotImplementedError
1196 1196
1197 1197 def readBlock(self):
1198 1198
1199 1199 raise NotImplementedError
1200 1200
1201 1201 def isEndProcess(self):
1202 1202
1203 1203 return self.flagNoMoreFiles
1204 1204
1205 1205 def printReadBlocks(self):
1206 1206
1207 1207 print("[Reading] Number of read blocks per file %04d" % self.nReadBlocks)
1208 1208
1209 1209 def printTotalBlocks(self):
1210 1210
1211 1211 print("[Reading] Number of read blocks %04d" % self.nTotalBlocks)
1212 1212
1213 1213 def run(self, **kwargs):
1214 1214 """
1215 1215
1216 1216 Arguments:
1217 1217 path :
1218 1218 startDate :
1219 1219 endDate :
1220 1220 startTime :
1221 1221 endTime :
1222 1222 set :
1223 1223 expLabel :
1224 1224 ext :
1225 1225 online :
1226 1226 delay :
1227 1227 walk :
1228 1228 getblock :
1229 1229 nTxs :
1230 1230 realtime :
1231 1231 blocksize :
1232 1232 blocktime :
1233 1233 skip :
1234 1234 cursor :
1235 1235 warnings :
1236 1236 server :
1237 1237 verbose :
1238 1238 format :
1239 1239 oneDDict :
1240 1240 twoDDict :
1241 1241 independentParam :
1242 1242 """
1243 1243
1244 1244 if not(self.isConfig):
1245 1245 self.setup(**kwargs)
1246 1246 self.isConfig = True
1247 1247 if self.server is None:
1248 1248 self.getData()
1249 1249 else:
1250 1250 self.getFromServer()
1251 1251
1252 1252
1253 1253 class JRODataWriter(Reader):
1254 1254
1255 1255 """
1256 1256 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
1257 1257 de los datos siempre se realiza por bloques.
1258 1258 """
1259 1259
1260 1260 setFile = None
1261 1261 profilesPerBlock = None
1262 1262 blocksPerFile = None
1263 1263 nWriteBlocks = 0
1264 1264 fileDate = None
1265 1265
1266 1266 def __init__(self, dataOut=None):
1267 1267 raise NotImplementedError
1268 1268
1269 1269 def hasAllDataInBuffer(self):
1270 1270 raise NotImplementedError
1271 1271
1272 1272 def setBlockDimension(self):
1273 1273 raise NotImplementedError
1274 1274
1275 1275 def writeBlock(self):
1276 1276 raise NotImplementedError
1277 1277
1278 1278 def putData(self):
1279 1279 raise NotImplementedError
1280 1280
1281 1281 def getDtypeWidth(self):
1282 1282
1283 1283 dtype_index = get_dtype_index(self.dtype)
1284 1284 dtype_width = get_dtype_width(dtype_index)
1285 1285
1286 1286 return dtype_width
1287 1287
1288 1288 def getProcessFlags(self):
1289 1289
1290 1290 processFlags = 0
1291 1291
1292 1292 dtype_index = get_dtype_index(self.dtype)
1293 1293 procflag_dtype = get_procflag_dtype(dtype_index)
1294 1294
1295 1295 processFlags += procflag_dtype
1296 1296
1297 1297 if self.dataOut.flagDecodeData:
1298 1298 processFlags += PROCFLAG.DECODE_DATA
1299 1299
1300 1300 if self.dataOut.flagDeflipData:
1301 1301 processFlags += PROCFLAG.DEFLIP_DATA
1302 1302
1303 1303 if self.dataOut.code is not None:
1304 1304 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1305 1305
1306 1306 if self.dataOut.nCohInt > 1:
1307 1307 processFlags += PROCFLAG.COHERENT_INTEGRATION
1308 1308
1309 1309 if self.dataOut.type == "Spectra":
1310 1310 if self.dataOut.nIncohInt > 1:
1311 1311 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
1312 1312
1313 1313 if self.dataOut.data_dc is not None:
1314 1314 processFlags += PROCFLAG.SAVE_CHANNELS_DC
1315 1315
1316 1316 if self.dataOut.flagShiftFFT:
1317 1317 processFlags += PROCFLAG.SHIFT_FFT_DATA
1318 1318
1319 1319 return processFlags
1320 1320
1321 1321 def setBasicHeader(self):
1322 1322
1323 1323 self.basicHeaderObj.size = self.basicHeaderSize # bytes
1324 1324 self.basicHeaderObj.version = self.versionFile
1325 1325 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1326 1326 utc = numpy.floor(self.dataOut.utctime)
1327 1327 milisecond = (self.dataOut.utctime - utc) * 1000.0
1328 1328 self.basicHeaderObj.utc = utc
1329 1329 self.basicHeaderObj.miliSecond = milisecond
1330 1330 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1331 1331 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1332 1332 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1333 1333
1334 1334 def setFirstHeader(self):
1335 1335 """
1336 1336 Obtiene una copia del First Header
1337 1337
1338 1338 Affected:
1339 1339
1340 1340 self.basicHeaderObj
1341 1341 self.systemHeaderObj
1342 1342 self.radarControllerHeaderObj
1343 1343 self.processingHeaderObj self.
1344 1344
1345 1345 Return:
1346 1346 None
1347 1347 """
1348 1348
1349 1349 raise NotImplementedError
1350 1350
1351 1351 def __writeFirstHeader(self):
1352 1352 """
1353 1353 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1354 1354
1355 1355 Affected:
1356 1356 __dataType
1357 1357
1358 1358 Return:
1359 1359 None
1360 1360 """
1361 1361
1362 1362 # CALCULAR PARAMETROS
1363 1363
1364 1364 sizeLongHeader = self.systemHeaderObj.size + \
1365 1365 self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1366 1366 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1367 1367
1368 1368 self.basicHeaderObj.write(self.fp)
1369 1369 self.systemHeaderObj.write(self.fp)
1370 1370 self.radarControllerHeaderObj.write(self.fp)
1371 1371 self.processingHeaderObj.write(self.fp)
1372 1372
1373 1373 def __setNewBlock(self):
1374 1374 """
1375 1375 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1376 1376
1377 1377 Return:
1378 1378 0 : si no pudo escribir nada
1379 1379 1 : Si escribio el Basic el First Header
1380 1380 """
1381 1381 if self.fp == None:
1382 1382 self.setNextFile()
1383 1383
1384 1384 if self.flagIsNewFile:
1385 1385 return 1
1386 1386
1387 1387 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1388 1388 self.basicHeaderObj.write(self.fp)
1389 1389 return 1
1390 1390
1391 1391 if not(self.setNextFile()):
1392 1392 return 0
1393 1393
1394 1394 return 1
1395 1395
1396 1396 def writeNextBlock(self):
1397 1397 """
1398 1398 Selecciona el bloque siguiente de datos y los escribe en un file
1399 1399
1400 1400 Return:
1401 1401 0 : Si no hizo pudo escribir el bloque de datos
1402 1402 1 : Si no pudo escribir el bloque de datos
1403 1403 """
1404 1404 if not(self.__setNewBlock()):
1405 1405 return 0
1406 1406
1407 1407 self.writeBlock()
1408 1408
1409 1409 print("[Writing] Block No. %d/%d" % (self.blockIndex,
1410 1410 self.processingHeaderObj.dataBlocksPerFile))
1411 1411
1412 1412 return 1
1413 1413
1414 1414 def setNextFile(self):
1415 1415 """Determina el siguiente file que sera escrito
1416 1416
1417 1417 Affected:
1418 1418 self.filename
1419 1419 self.subfolder
1420 1420 self.fp
1421 1421 self.setFile
1422 1422 self.flagIsNewFile
1423 1423
1424 1424 Return:
1425 1425 0 : Si el archivo no puede ser escrito
1426 1426 1 : Si el archivo esta listo para ser escrito
1427 1427 """
1428 1428 ext = self.ext
1429 1429 path = self.path
1430 1430
1431 1431 if self.fp != None:
1432 1432 self.fp.close()
1433 1433
1434
1434 1435 if not os.path.exists(path):
1435 1436 os.mkdir(path)
1436 1437
1437 1438 timeTuple = time.localtime(self.dataOut.utctime)
1438 1439 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year, timeTuple.tm_yday)
1439 1440
1440 1441 fullpath = os.path.join(path, subfolder)
1441 1442 setFile = self.setFile
1442 1443
1443 1444 if not(os.path.exists(fullpath)):
1444 1445 os.mkdir(fullpath)
1445 1446 setFile = -1 # inicializo mi contador de seteo
1446 1447 else:
1447 1448 filesList = os.listdir(fullpath)
1448 1449 if len(filesList) > 0:
1449 1450 filesList = sorted(filesList, key=str.lower)
1450 1451 filen = filesList[-1]
1451 1452 # el filename debera tener el siguiente formato
1452 1453 # 0 1234 567 89A BCDE (hex)
1453 1454 # x YYYY DDD SSS .ext
1454 1455 if isNumber(filen[8:11]):
1455 1456 # inicializo mi contador de seteo al seteo del ultimo file
1456 1457 setFile = int(filen[8:11])
1457 1458 else:
1458 1459 setFile = -1
1459 1460 else:
1460 1461 setFile = -1 # inicializo mi contador de seteo
1461 1462
1462 1463 setFile += 1
1463 1464
1464 1465 # If this is a new day it resets some values
1465 1466 if self.dataOut.datatime.date() > self.fileDate:
1466 1467 setFile = 0
1467 1468 self.nTotalBlocks = 0
1468 1469
1469 1470 filen = '{}{:04d}{:03d}{:03d}{}'.format(
1470 1471 self.optchar, timeTuple.tm_year, timeTuple.tm_yday, setFile, ext)
1471 1472
1472 1473 filename = os.path.join(path, subfolder, filen)
1473 1474
1474 1475 fp = open(filename, 'wb')
1475 1476
1476 1477 self.blockIndex = 0
1477 1478 self.filename = filename
1478 1479 self.subfolder = subfolder
1479 1480 self.fp = fp
1480 1481 self.setFile = setFile
1481 1482 self.flagIsNewFile = 1
1482 1483 self.fileDate = self.dataOut.datatime.date()
1483 1484 self.setFirstHeader()
1484 1485
1485 1486 print('[Writing] Opening file: %s' % self.filename)
1486 1487
1487 1488 self.__writeFirstHeader()
1488 1489
1489 1490 return 1
1490 1491
1491 1492 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=None, ext=None, datatype=4):
1492 1493 """
1493 1494 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1494 1495
1495 1496 Inputs:
1496 1497 path : directory where data will be saved
1497 1498 profilesPerBlock : number of profiles per block
1498 1499 set : initial file set
1499 1500 datatype : An integer number that defines data type:
1500 1501 0 : int8 (1 byte)
1501 1502 1 : int16 (2 bytes)
1502 1503 2 : int32 (4 bytes)
1503 1504 3 : int64 (8 bytes)
1504 1505 4 : float32 (4 bytes)
1505 1506 5 : double64 (8 bytes)
1506 1507
1507 1508 Return:
1508 1509 0 : Si no realizo un buen seteo
1509 1510 1 : Si realizo un buen seteo
1510 1511 """
1511 1512
1512 1513 if ext == None:
1513 1514 ext = self.ext
1514 1515
1515 1516 self.ext = ext.lower()
1516 1517
1517 1518 self.path = path
1518 1519
1519 1520 if set is None:
1520 1521 self.setFile = -1
1521 1522 else:
1522 1523 self.setFile = set - 1
1523 1524
1524 1525 self.blocksPerFile = blocksPerFile
1525 1526 self.profilesPerBlock = profilesPerBlock
1526 1527 self.dataOut = dataOut
1527 1528 self.fileDate = self.dataOut.datatime.date()
1528 1529 self.dtype = self.dataOut.dtype
1529 1530
1530 1531 if datatype is not None:
1531 1532 self.dtype = get_numpy_dtype(datatype)
1532 1533
1533 1534 if not(self.setNextFile()):
1534 1535 print("[Writing] There isn't a next file")
1535 1536 return 0
1536 1537
1537 1538 self.setBlockDimension()
1538 1539
1539 1540 return 1
1540 1541
1541 1542 def run(self, dataOut, path, blocksPerFile=100, profilesPerBlock=64, set=None, ext=None, datatype=4, **kwargs):
1542 1543
1543 1544 if not(self.isConfig):
1544 1545
1545 1546 self.setup(dataOut, path, blocksPerFile, profilesPerBlock=profilesPerBlock,
1546 1547 set=set, ext=ext, datatype=datatype, **kwargs)
1547 1548 self.isConfig = True
1548 1549
1549 1550 self.dataOut = dataOut
1550 1551 self.putData()
1551 1552 return self.dataOut
1552 1553
1553 1554 @MPDecorator
1554 1555 class printInfo(Operation):
1555 1556
1556 1557 def __init__(self):
1557 1558
1558 1559 Operation.__init__(self)
1559 1560 self.__printInfo = True
1560 1561
1561 1562 def run(self, dataOut, headers = ['systemHeaderObj', 'radarControllerHeaderObj', 'processingHeaderObj']):
1562 1563 if self.__printInfo == False:
1563 1564 return
1564 1565
1565 1566 for header in headers:
1566 1567 if hasattr(dataOut, header):
1567 1568 obj = getattr(dataOut, header)
1568 1569 if hasattr(obj, 'printInfo'):
1569 1570 obj.printInfo()
1570 1571 else:
1571 1572 print(obj)
1572 1573 else:
1573 1574 log.warning('Header {} Not found in object'.format(header))
1574 1575
1575 1576 self.__printInfo = False
@@ -1,661 +1,659
1 '''
1 ''''
2 2 Created on Set 9, 2015
3 3
4 4 @author: roj-idl71 Karim Kuyeng
5 5
6 6 @update: 2021, Joab Apaza
7 7 '''
8 8
9 9 import os
10 10 import sys
11 11 import glob
12 12 import fnmatch
13 13 import datetime
14 14 import time
15 15 import re
16 16 import h5py
17 17 import numpy
18 18
19 19 try:
20 20 from gevent import sleep
21 21 except:
22 22 from time import sleep
23 23
24 24 from schainpy.model.data.jroheaderIO import RadarControllerHeader, SystemHeader
25 25 from schainpy.model.data.jrodata import Voltage
26 26 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
27 27 from numpy import imag
28 28
29 29
30 30 class AMISRReader(ProcessingUnit):
31 31 '''
32 32 classdocs
33 33 '''
34 34
35 35 def __init__(self):
36 36 '''
37 37 Constructor
38 38 '''
39 39
40 40 ProcessingUnit.__init__(self)
41 41
42 42 self.set = None
43 43 self.subset = None
44 44 self.extension_file = '.h5'
45 45 self.dtc_str = 'dtc'
46 46 self.dtc_id = 0
47 47 self.status = True
48 48 self.isConfig = False
49 49 self.dirnameList = []
50 50 self.filenameList = []
51 51 self.fileIndex = None
52 52 self.flagNoMoreFiles = False
53 53 self.flagIsNewFile = 0
54 54 self.filename = ''
55 55 self.amisrFilePointer = None
56 56 self.realBeamCode = []
57 57 self.beamCodeMap = None
58 58 self.azimuthList = []
59 59 self.elevationList = []
60 60 self.dataShape = None
61 61
62 62
63 63
64 64 self.profileIndex = 0
65 65
66 66
67 67 self.beamCodeByFrame = None
68 68 self.radacTimeByFrame = None
69 69
70 70 self.dataset = None
71 71
72 72 self.__firstFile = True
73 73
74 74 self.buffer = None
75 75
76 76 self.timezone = 'ut'
77 77
78 78 self.__waitForNewFile = 20
79 79 self.__filename_online = None
80 80 #Is really necessary create the output object in the initializer
81 81 self.dataOut = Voltage()
82 82 self.dataOut.error=False
83 83
84 84
85 85 def setup(self,path=None,
86 86 startDate=None,
87 87 endDate=None,
88 88 startTime=None,
89 89 endTime=None,
90 90 walk=True,
91 91 timezone='ut',
92 92 all=0,
93 93 code = None,
94 94 nCode = 0,
95 95 nBaud = 0,
96 96 online=False):
97 97
98 98
99 99
100 100 self.timezone = timezone
101 101 self.all = all
102 102 self.online = online
103 103
104 104 self.code = code
105 105 self.nCode = int(nCode)
106 106 self.nBaud = int(nBaud)
107 107
108 108
109 109
110 110 #self.findFiles()
111 111 if not(online):
112 112 #Busqueda de archivos offline
113 113 self.searchFilesOffLine(path, startDate, endDate, startTime, endTime, walk)
114 114 else:
115 115 self.searchFilesOnLine(path, startDate, endDate, startTime,endTime,walk)
116 116
117 117 if not(self.filenameList):
118 118 print("There is no files into the folder: %s"%(path))
119 119 sys.exit()
120 120
121 121 self.fileIndex = 0
122 122
123 123 self.readNextFile(online)
124 124
125 125 '''
126 126 Add code
127 127 '''
128 128 self.isConfig = True
129 129 # print("Setup Done")
130 130 pass
131 131
132 132
133 133 def readAMISRHeader(self,fp):
134 134
135 135 if self.isConfig and (not self.flagNoMoreFiles):
136 136 newShape = fp.get('Raw11/Data/Samples/Data').shape[1:]
137 137 if self.dataShape != newShape and newShape != None:
138 138 print("\nNEW FILE HAS A DIFFERENT SHAPE")
139 139 print(self.dataShape,newShape,"\n")
140 140 return 0
141 141 else:
142 142 self.dataShape = fp.get('Raw11/Data/Samples/Data').shape[1:]
143 143
144 144
145 145 header = 'Raw11/Data/RadacHeader'
146 146 self.beamCodeByPulse = fp.get(header+'/BeamCode') # LIST OF BEAMS PER PROFILE, TO BE USED ON REARRANGE
147 147 if (self.startDate> datetime.date(2021, 7, 15)): #Se cambió la forma de extracción de Apuntes el 17
148 148 self.beamcodeFile = fp['Setup/Beamcodefile'][()].decode()
149 149 self.trueBeams = self.beamcodeFile.split("\n")
150 150 self.trueBeams.pop()#remove last
151 151 [self.realBeamCode.append(x) for x in self.trueBeams if x not in self.realBeamCode]
152 152 self.beamCode = [int(x, 16) for x in self.realBeamCode]
153 153 else:
154 154 _beamCode= fp.get('Raw11/Data/Beamcodes') #se usa la manera previa al cambio de apuntes
155 155 self.beamCode = _beamCode[0,:]
156 156
157 157 if self.beamCodeMap == None:
158 158 self.beamCodeMap = fp['Setup/BeamcodeMap']
159 159 for beam in self.beamCode:
160 160 beamAziElev = numpy.where(self.beamCodeMap[:,0]==beam)
161 161 beamAziElev = beamAziElev[0].squeeze()
162 162 self.azimuthList.append(self.beamCodeMap[beamAziElev,1])
163 163 self.elevationList.append(self.beamCodeMap[beamAziElev,2])
164 164 #print("Beamssss: ",self.beamCodeMap[beamAziElev,1],self.beamCodeMap[beamAziElev,2])
165 165 #print(self.beamCode)
166 166 #self.code = fp.get(header+'/Code') # NOT USE FOR THIS
167 167 self.frameCount = fp.get(header+'/FrameCount')# NOT USE FOR THIS
168 168 self.modeGroup = fp.get(header+'/ModeGroup')# NOT USE FOR THIS
169 169 self.nsamplesPulse = fp.get(header+'/NSamplesPulse')# TO GET NSA OR USING DATA FOR THAT
170 170 self.pulseCount = fp.get(header+'/PulseCount')# NOT USE FOR THIS
171 171 self.radacTime = fp.get(header+'/RadacTime')# 1st TIME ON FILE ANDE CALCULATE THE REST WITH IPP*nindexprofile
172 172 self.timeCount = fp.get(header+'/TimeCount')# NOT USE FOR THIS
173 173 self.timeStatus = fp.get(header+'/TimeStatus')# NOT USE FOR THIS
174 174 self.rangeFromFile = fp.get('Raw11/Data/Samples/Range')
175 175 self.frequency = fp.get('Rx/Frequency')
176 176 txAus = fp.get('Raw11/Data/Pulsewidth')
177 177
178 178
179 179 self.nblocks = self.pulseCount.shape[0] #nblocks
180 180
181 181 self.nprofiles = self.pulseCount.shape[1] #nprofile
182 182 self.nsa = self.nsamplesPulse[0,0] #ngates
183 183 self.nchannels = len(self.beamCode)
184 184 self.ippSeconds = (self.radacTime[0][1] -self.radacTime[0][0]) #Ipp in seconds
185 185 #self.__waitForNewFile = self.nblocks # wait depending on the number of blocks since each block is 1 sec
186 186 self.__waitForNewFile = self.nblocks * self.nprofiles * self.ippSeconds # wait until new file is created
187 187
188 188 #filling radar controller header parameters
189 189 self.__ippKm = self.ippSeconds *.15*1e6 # in km
190 190 self.__txA = (txAus.value)*.15 #(ipp[us]*.15km/1us) in km
191 191 self.__txB = 0
192 192 nWindows=1
193 193 self.__nSamples = self.nsa
194 194 self.__firstHeight = self.rangeFromFile[0][0]/1000 #in km
195 195 self.__deltaHeight = (self.rangeFromFile[0][1] - self.rangeFromFile[0][0])/1000
196 196
197 197 #for now until understand why the code saved is different (code included even though code not in tuf file)
198 198 #self.__codeType = 0
199 199 # self.__nCode = None
200 200 # self.__nBaud = None
201 201 self.__code = self.code
202 202 self.__codeType = 0
203 203 if self.code != None:
204 204 self.__codeType = 1
205 205 self.__nCode = self.nCode
206 206 self.__nBaud = self.nBaud
207 207 #self.__code = 0
208 208
209 209 #filling system header parameters
210 210 self.__nSamples = self.nsa
211 211 self.newProfiles = self.nprofiles/self.nchannels
212 212 self.__channelList = list(range(self.nchannels))
213 213
214 214 self.__frequency = self.frequency[0][0]
215 215
216 216
217 217 return 1
218 218
219 219
220 220 def createBuffers(self):
221 221
222 222 pass
223 223
224 224 def __setParameters(self,path='', startDate='',endDate='',startTime='', endTime='', walk=''):
225 225 self.path = path
226 226 self.startDate = startDate
227 227 self.endDate = endDate
228 228 self.startTime = startTime
229 229 self.endTime = endTime
230 230 self.walk = walk
231 231
232 232 def __checkPath(self):
233 233 if os.path.exists(self.path):
234 234 self.status = 1
235 235 else:
236 236 self.status = 0
237 237 print('Path:%s does not exists'%self.path)
238 238
239 239 return
240 240
241 241
242 242 def __selDates(self, amisr_dirname_format):
243 243 try:
244 244 year = int(amisr_dirname_format[0:4])
245 245 month = int(amisr_dirname_format[4:6])
246 246 dom = int(amisr_dirname_format[6:8])
247 247 thisDate = datetime.date(year,month,dom)
248
249 if (thisDate>=self.startDate and thisDate <= self.endDate):
248 #margen de un día extra, igual luego se filtra for fecha y hora
249 if (thisDate>=(self.startDate - datetime.timedelta(days=1)) and thisDate <= (self.endDate)+ datetime.timedelta(days=1)):
250 250 return amisr_dirname_format
251 251 except:
252 252 return None
253 253
254 254
255 255 def __findDataForDates(self,online=False):
256 256
257 257 if not(self.status):
258 258 return None
259 259
260 260 pat = '\d+.\d+'
261 261 dirnameList = [re.search(pat,x) for x in os.listdir(self.path)]
262 262 dirnameList = [x for x in dirnameList if x!=None]
263 263 dirnameList = [x.string for x in dirnameList]
264 264 if not(online):
265 265 dirnameList = [self.__selDates(x) for x in dirnameList]
266 266 dirnameList = [x for x in dirnameList if x!=None]
267 267 if len(dirnameList)>0:
268 268 self.status = 1
269 269 self.dirnameList = dirnameList
270 270 self.dirnameList.sort()
271 271 else:
272 272 self.status = 0
273 273 return None
274 274
275 275 def __getTimeFromData(self):
276 276 startDateTime_Reader = datetime.datetime.combine(self.startDate,self.startTime)
277 277 endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
278 278
279 279 print('Filtering Files from %s to %s'%(startDateTime_Reader, endDateTime_Reader))
280 280 print('........................................')
281 281 filter_filenameList = []
282 282 self.filenameList.sort()
283 283 #for i in range(len(self.filenameList)-1):
284 284 for i in range(len(self.filenameList)):
285 285 filename = self.filenameList[i]
286 286 fp = h5py.File(filename,'r')
287 287 time_str = fp.get('Time/RadacTimeString')
288 288
289 289 startDateTimeStr_File = time_str[0][0].decode('UTF-8').split('.')[0]
290 290 #startDateTimeStr_File = "2019-12-16 09:21:11"
291 291 junk = time.strptime(startDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
292 292 startDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
293 293
294 294 #endDateTimeStr_File = "2019-12-16 11:10:11"
295 295 endDateTimeStr_File = time_str[-1][-1].decode('UTF-8').split('.')[0]
296 296 junk = time.strptime(endDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
297 297 endDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
298 298
299 299 fp.close()
300 300
301 301 #print("check time", startDateTime_File)
302 302 if self.timezone == 'lt':
303 303 startDateTime_File = startDateTime_File - datetime.timedelta(minutes = 300)
304 304 endDateTime_File = endDateTime_File - datetime.timedelta(minutes = 300)
305 if (endDateTime_File>=startDateTime_Reader and endDateTime_File<=endDateTime_Reader):
305 if (startDateTime_File >=startDateTime_Reader and endDateTime_File<=endDateTime_Reader):
306 306 filter_filenameList.append(filename)
307 307
308 if (endDateTime_File>endDateTime_Reader):
308 if (startDateTime_File>endDateTime_Reader):
309 309 break
310 310
311 311
312 312 filter_filenameList.sort()
313 313 self.filenameList = filter_filenameList
314
314 315 return 1
315 316
316 317 def __filterByGlob1(self, dirName):
317 318 filter_files = glob.glob1(dirName, '*.*%s'%self.extension_file)
318 319 filter_files.sort()
319 320 filterDict = {}
320 321 filterDict.setdefault(dirName)
321 322 filterDict[dirName] = filter_files
322 323 return filterDict
323 324
324 325 def __getFilenameList(self, fileListInKeys, dirList):
325 326 for value in fileListInKeys:
326 327 dirName = list(value.keys())[0]
327 328 for file in value[dirName]:
328 329 filename = os.path.join(dirName, file)
329 330 self.filenameList.append(filename)
330 331
331 332
332 333 def __selectDataForTimes(self, online=False):
333 334 #aun no esta implementado el filtro for tiempo
334 335 if not(self.status):
335 336 return None
336 337
337 338 dirList = [os.path.join(self.path,x) for x in self.dirnameList]
338
339 339 fileListInKeys = [self.__filterByGlob1(x) for x in dirList]
340
341 340 self.__getFilenameList(fileListInKeys, dirList)
342 341 if not(online):
343 342 #filtro por tiempo
344 343 if not(self.all):
345 344 self.__getTimeFromData()
346 345
347 346 if len(self.filenameList)>0:
348 347 self.status = 1
349 348 self.filenameList.sort()
350 349 else:
351 350 self.status = 0
352 351 return None
353 352
354 353 else:
355 354 #get the last file - 1
356 355 self.filenameList = [self.filenameList[-2]]
357 356 new_dirnameList = []
358 357 for dirname in self.dirnameList:
359 358 junk = numpy.array([dirname in x for x in self.filenameList])
360 359 junk_sum = junk.sum()
361 360 if junk_sum > 0:
362 361 new_dirnameList.append(dirname)
363 362 self.dirnameList = new_dirnameList
364 363 return 1
365 364
366 365 def searchFilesOnLine(self, path, startDate, endDate, startTime=datetime.time(0,0,0),
367 366 endTime=datetime.time(23,59,59),walk=True):
368 367
369 368 if endDate ==None:
370 369 startDate = datetime.datetime.utcnow().date()
371 370 endDate = datetime.datetime.utcnow().date()
372 371
373 372 self.__setParameters(path=path, startDate=startDate, endDate=endDate,startTime = startTime,endTime=endTime, walk=walk)
374 373
375 374 self.__checkPath()
376 375
377 376 self.__findDataForDates(online=True)
378 377
379 378 self.dirnameList = [self.dirnameList[-1]]
380 379
381 380 self.__selectDataForTimes(online=True)
382 381
383 382 return
384 383
385 384
386 385 def searchFilesOffLine(self,
387 386 path,
388 387 startDate,
389 388 endDate,
390 389 startTime=datetime.time(0,0,0),
391 390 endTime=datetime.time(23,59,59),
392 391 walk=True):
393 392
394 393 self.__setParameters(path, startDate, endDate, startTime, endTime, walk)
395 394
396 395 self.__checkPath()
397 396
398 397 self.__findDataForDates()
399 398
400 399 self.__selectDataForTimes()
401 400
402 401 for i in range(len(self.filenameList)):
403 402 print("%s" %(self.filenameList[i]))
404 403
405 404 return
406 405
407 406 def __setNextFileOffline(self):
408 407
409 408 try:
410 409 self.filename = self.filenameList[self.fileIndex]
411 410 self.amisrFilePointer = h5py.File(self.filename,'r')
412 411 self.fileIndex += 1
413 412 except:
414 413 self.flagNoMoreFiles = 1
415 414 print("No more Files")
416 415 return 0
417 416
418 417 self.flagIsNewFile = 1
419 418 print("Setting the file: %s"%self.filename)
420 419
421 420 return 1
422 421
423 422
424 423 def __setNextFileOnline(self):
425 424 filename = self.filenameList[0]
426 425 if self.__filename_online != None:
427 426 self.__selectDataForTimes(online=True)
428 427 filename = self.filenameList[0]
429 428 wait = 0
430 429 self.__waitForNewFile=300 ## DEBUG:
431 430 while self.__filename_online == filename:
432 431 print('waiting %d seconds to get a new file...'%(self.__waitForNewFile))
433 432 if wait == 5:
434 433 self.flagNoMoreFiles = 1
435 434 return 0
436 435 sleep(self.__waitForNewFile)
437 436 self.__selectDataForTimes(online=True)
438 437 filename = self.filenameList[0]
439 438 wait += 1
440 439
441 440 self.__filename_online = filename
442 441
443 442 self.amisrFilePointer = h5py.File(filename,'r')
444 443 self.flagIsNewFile = 1
445 444 self.filename = filename
446 445 print("Setting the file: %s"%self.filename)
447 446 return 1
448 447
449 448
450 449 def readData(self):
451 450 buffer = self.amisrFilePointer.get('Raw11/Data/Samples/Data')
452 451 re = buffer[:,:,:,0]
453 452 im = buffer[:,:,:,1]
454 453 dataset = re + im*1j
455 454
456 455 self.radacTime = self.amisrFilePointer.get('Raw11/Data/RadacHeader/RadacTime')
457 456 timeset = self.radacTime[:,0]
458 457
459 458 return dataset,timeset
460 459
461 460 def reshapeData(self):
462 461 #self.beamCodeByPulse, self.beamCode, self.nblocks, self.nprofiles, self.nsa,
463 462 channels = self.beamCodeByPulse[0,:]
464 463 nchan = self.nchannels
465 464 #self.newProfiles = self.nprofiles/nchan #must be defined on filljroheader
466 465 nblocks = self.nblocks
467 466 nsamples = self.nsa
468 467
469 468 #Dimensions : nChannels, nProfiles, nSamples
470 469 new_block = numpy.empty((nblocks, nchan, numpy.int_(self.newProfiles), nsamples), dtype="complex64")
471 470 ############################################
472 471
473 472 for thisChannel in range(nchan):
474 473 new_block[:,thisChannel,:,:] = self.dataset[:,numpy.where(channels==self.beamCode[thisChannel])[0],:]
475 474
476 475
477 476 new_block = numpy.transpose(new_block, (1,0,2,3))
478 477 new_block = numpy.reshape(new_block, (nchan,-1, nsamples))
479 478
480 479 return new_block
481 480
482 481 def updateIndexes(self):
483 482
484 483 pass
485 484
486 485 def fillJROHeader(self):
487 486
488 487 #fill radar controller header
489 488 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(ipp=self.__ippKm,
490 489 txA=self.__txA,
491 490 txB=0,
492 491 nWindows=1,
493 492 nHeights=self.__nSamples,
494 493 firstHeight=self.__firstHeight,
495 494 deltaHeight=self.__deltaHeight,
496 495 codeType=self.__codeType,
497 496 nCode=self.__nCode, nBaud=self.__nBaud,
498 497 code = self.__code,
499 498 fClock=1)
500 499
501 500 #fill system header
502 501 self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples,
503 502 nProfiles=self.newProfiles,
504 503 nChannels=len(self.__channelList),
505 504 adcResolution=14,
506 505 pciDioBusWidth=32)
507 506
508 507 self.dataOut.type = "Voltage"
509 508 self.dataOut.data = None
510 509 self.dataOut.dtype = numpy.dtype([('real','<i8'),('imag','<i8')])
511 510 # self.dataOut.nChannels = 0
512 511
513 512 # self.dataOut.nHeights = 0
514 513
515 514 self.dataOut.nProfiles = self.newProfiles*self.nblocks
516 515 #self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype = numpy.float)*self.__deltaHeigth
517 516 ranges = numpy.reshape(self.rangeFromFile.value,(-1))
518 517 self.dataOut.heightList = ranges/1000.0 #km
519 518 self.dataOut.channelList = self.__channelList
520 519 self.dataOut.blocksize = self.dataOut.nChannels * self.dataOut.nHeights
521 520
522 521 # self.dataOut.channelIndexList = None
523 522
524 523
525 524 self.dataOut.azimuthList = numpy.array(self.azimuthList)
526 525 self.dataOut.elevationList = numpy.array(self.elevationList)
527 526 self.dataOut.codeList = numpy.array(self.beamCode)
528 527 #print(self.dataOut.elevationList)
529 528 self.dataOut.flagNoData = True
530 529
531 530 #Set to TRUE if the data is discontinuous
532 531 self.dataOut.flagDiscontinuousBlock = False
533 532
534 533 self.dataOut.utctime = None
535 534
536 535 #self.dataOut.timeZone = -5 #self.__timezone/60 #timezone like jroheader, difference in minutes between UTC and localtime
537 536 if self.timezone == 'lt':
538 537 self.dataOut.timeZone = time.timezone / 60. #get the timezone in minutes
539 538 else:
540 539 self.dataOut.timeZone = 0 #by default time is UTC
541 540
542 541 self.dataOut.dstFlag = 0
543 542 self.dataOut.errorCount = 0
544 543 self.dataOut.nCohInt = 1
545 544 self.dataOut.flagDecodeData = False #asumo que la data esta decodificada
546 545 self.dataOut.flagDeflipData = False #asumo que la data esta sin flip
547 546 self.dataOut.flagShiftFFT = False
548 547 self.dataOut.ippSeconds = self.ippSeconds
549 548
550 549 #Time interval between profiles
551 550 #self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
552 551
553 552 self.dataOut.frequency = self.__frequency
554 553 self.dataOut.realtime = self.online
555 554 pass
556 555
557 556 def readNextFile(self,online=False):
558 557
559 558 if not(online):
560 559 newFile = self.__setNextFileOffline()
561 560 else:
562 561 newFile = self.__setNextFileOnline()
563 562
564 563 if not(newFile):
565 564 self.dataOut.error = True
566 565 return 0
567 566
568 567 if not self.readAMISRHeader(self.amisrFilePointer):
569 568 self.dataOut.error = True
570 569 return 0
571 570
572 571 self.createBuffers()
573 572 self.fillJROHeader()
574 573
575 574 #self.__firstFile = False
576 575
577 576
578 577
579 578 self.dataset,self.timeset = self.readData()
580 579
581 580 if self.endDate!=None:
582 581 endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
583 582 time_str = self.amisrFilePointer.get('Time/RadacTimeString')
584 583 startDateTimeStr_File = time_str[0][0].decode('UTF-8').split('.')[0]
585 584 junk = time.strptime(startDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
586 585 startDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
587 586 if self.timezone == 'lt':
588 587 startDateTime_File = startDateTime_File - datetime.timedelta(minutes = 300)
589 588 if (startDateTime_File>endDateTime_Reader):
590 589 return 0
591 590
592 591 self.jrodataset = self.reshapeData()
593 592 #----self.updateIndexes()
594 593 self.profileIndex = 0
595 594
596 595 return 1
597 596
598 597
599 598 def __hasNotDataInBuffer(self):
600 599 if self.profileIndex >= (self.newProfiles*self.nblocks):
601 600 return 1
602 601 return 0
603 602
604 603
605 604 def getData(self):
606 605
607 606 if self.flagNoMoreFiles:
608 607 self.dataOut.flagNoData = True
609 608 return 0
610 609
611 610 if self.__hasNotDataInBuffer():
612 611 if not (self.readNextFile(self.online)):
613 612 return 0
614 613
615 614
616 615 if self.dataset is None: # setear esta condicion cuando no hayan datos por leer
617 616 self.dataOut.flagNoData = True
618 617 return 0
619 618
620 619 #self.dataOut.data = numpy.reshape(self.jrodataset[self.profileIndex,:],(1,-1))
621 620
622 621 self.dataOut.data = self.jrodataset[:,self.profileIndex,:]
623 622
624 623 #print("R_t",self.timeset)
625 624
626 625 #self.dataOut.utctime = self.jrotimeset[self.profileIndex]
627 626 #verificar basic header de jro data y ver si es compatible con este valor
628 627 #self.dataOut.utctime = self.timeset + (self.profileIndex * self.ippSeconds * self.nchannels)
629 628 indexprof = numpy.mod(self.profileIndex, self.newProfiles)
630 629 indexblock = self.profileIndex/self.newProfiles
631 630 #print (indexblock, indexprof)
632 diffUTC = 1.8e4 #UTC diference from peru in seconds --Joab
633 631 diffUTC = 0
634 632 t_comp = (indexprof * self.ippSeconds * self.nchannels) + diffUTC #
635 633
636 634 #print("utc :",indexblock," __ ",t_comp)
637 635 #print(numpy.shape(self.timeset))
638 636 self.dataOut.utctime = self.timeset[numpy.int_(indexblock)] + t_comp
639 637 #self.dataOut.utctime = self.timeset[self.profileIndex] + t_comp
640 #print(self.dataOut.utctime)
638
641 639 self.dataOut.profileIndex = self.profileIndex
642 640 #print("N profile:",self.profileIndex,self.newProfiles,self.nblocks,self.dataOut.utctime)
643 641 self.dataOut.flagNoData = False
644 # if indexprof == 0:
645 # print self.dataOut.utctime
642 # if indexprof == 0:
643 # print("kamisr: ",self.dataOut.utctime)
646 644
647 645 self.profileIndex += 1
648 646
649 647 return self.dataOut.data
650 648
651 649
652 650 def run(self, **kwargs):
653 651 '''
654 652 This method will be called many times so here you should put all your code
655 653 '''
656 654 #print("running kamisr")
657 655 if not self.isConfig:
658 656 self.setup(**kwargs)
659 657 self.isConfig = True
660 658
661 659 self.getData()
@@ -1,651 +1,651
1 1 import os
2 2 import time
3 3 import datetime
4 4
5 5 import numpy
6 6 import h5py
7 7
8 8 import schainpy.admin
9 9 from schainpy.model.data.jrodata import *
10 10 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
11 11 from schainpy.model.io.jroIO_base import *
12 12 from schainpy.utils import log
13 13
14 14
15 15 class HDFReader(Reader, ProcessingUnit):
16 16 """Processing unit to read HDF5 format files
17 17
18 18 This unit reads HDF5 files created with `HDFWriter` operation contains
19 19 by default two groups Data and Metadata all variables would be saved as `dataOut`
20 20 attributes.
21 21 It is possible to read any HDF5 file by given the structure in the `description`
22 22 parameter, also you can add extra values to metadata with the parameter `extras`.
23 23
24 24 Parameters:
25 25 -----------
26 26 path : str
27 27 Path where files are located.
28 28 startDate : date
29 29 Start date of the files
30 30 endDate : list
31 31 End date of the files
32 32 startTime : time
33 33 Start time of the files
34 34 endTime : time
35 35 End time of the files
36 36 description : dict, optional
37 37 Dictionary with the description of the HDF5 file
38 38 extras : dict, optional
39 39 Dictionary with extra metadata to be be added to `dataOut`
40 40
41 41 Examples
42 42 --------
43 43
44 44 desc = {
45 45 'Data': {
46 46 'data_output': ['u', 'v', 'w'],
47 47 'utctime': 'timestamps',
48 48 } ,
49 49 'Metadata': {
50 50 'heightList': 'heights'
51 51 }
52 52 }
53 53
54 54 desc = {
55 55 'Data': {
56 56 'data_output': 'winds',
57 57 'utctime': 'timestamps'
58 58 },
59 59 'Metadata': {
60 60 'heightList': 'heights'
61 61 }
62 62 }
63 63
64 64 extras = {
65 65 'timeZone': 300
66 66 }
67 67
68 68 reader = project.addReadUnit(
69 69 name='HDFReader',
70 70 path='/path/to/files',
71 71 startDate='2019/01/01',
72 72 endDate='2019/01/31',
73 73 startTime='00:00:00',
74 74 endTime='23:59:59',
75 75 # description=json.dumps(desc),
76 76 # extras=json.dumps(extras),
77 77 )
78 78
79 79 """
80 80
81 81 __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'description', 'extras']
82 82
83 83 def __init__(self):
84 84 ProcessingUnit.__init__(self)
85 85 self.dataOut = Parameters()
86 86 self.ext = ".hdf5"
87 87 self.optchar = "D"
88 88 self.meta = {}
89 89 self.data = {}
90 90 self.open_file = h5py.File
91 91 self.open_mode = 'r'
92 92 self.description = {}
93 93 self.extras = {}
94 94 self.filefmt = "*%Y%j***"
95 95 self.folderfmt = "*%Y%j"
96 96 self.utcoffset = 0
97 97
98 98 def setup(self, **kwargs):
99 99
100 100 self.set_kwargs(**kwargs)
101 101 if not self.ext.startswith('.'):
102 102 self.ext = '.{}'.format(self.ext)
103 103
104 104 if self.online:
105 105 log.log("Searching files in online mode...", self.name)
106 106
107 107 for nTries in range(self.nTries):
108 108 fullpath = self.searchFilesOnLine(self.path, self.startDate,
109 109 self.endDate, self.expLabel, self.ext, self.walk,
110 110 self.filefmt, self.folderfmt)
111 111 pathname, filename = os.path.split(fullpath)
112 print(pathname,filename)
112 #print(pathname,filename)
113 113 try:
114 114 fullpath = next(fullpath)
115 115
116 116 except:
117 117 fullpath = None
118 118
119 119 if fullpath:
120 120 break
121 121
122 122 log.warning(
123 123 'Waiting {} sec for a valid file in {}: try {} ...'.format(
124 124 self.delay, self.path, nTries + 1),
125 125 self.name)
126 126 time.sleep(self.delay)
127 127
128 128 if not(fullpath):
129 129 raise schainpy.admin.SchainError(
130 130 'There isn\'t any valid file in {}'.format(self.path))
131 131
132 132 pathname, filename = os.path.split(fullpath)
133 133 self.year = int(filename[1:5])
134 134 self.doy = int(filename[5:8])
135 135 self.set = int(filename[8:11]) - 1
136 136 else:
137 137 log.log("Searching files in {}".format(self.path), self.name)
138 138 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
139 139 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
140 140
141 141 self.setNextFile()
142 142
143 143 return
144 144
145 145
146 146 def readFirstHeader(self):
147 147 '''Read metadata and data'''
148 148
149 149 self.__readMetadata()
150 150 self.__readData()
151 151 self.__setBlockList()
152 152
153 153 if 'type' in self.meta:
154 154 self.dataOut = eval(self.meta['type'])()
155 155
156 156 for attr in self.meta:
157 157 print("attr: ", attr)
158 158 setattr(self.dataOut, attr, self.meta[attr])
159 159
160 160
161 161 self.blockIndex = 0
162 162
163 163 return
164 164
165 165 def __setBlockList(self):
166 166 '''
167 167 Selects the data within the times defined
168 168
169 169 self.fp
170 170 self.startTime
171 171 self.endTime
172 172 self.blockList
173 173 self.blocksPerFile
174 174
175 175 '''
176 176
177 177 startTime = self.startTime
178 178 endTime = self.endTime
179 179 thisUtcTime = self.data['utctime'] + self.utcoffset
180 180 self.interval = numpy.min(thisUtcTime[1:] - thisUtcTime[:-1])
181 181 thisDatetime = datetime.datetime.utcfromtimestamp(thisUtcTime[0])
182 182 self.startFileDatetime = thisDatetime
183 print("datee ",self.startFileDatetime)
184 183 thisDate = thisDatetime.date()
185 184 thisTime = thisDatetime.time()
186 185
187 186 startUtcTime = (datetime.datetime.combine(thisDate, startTime) - datetime.datetime(1970, 1, 1)).total_seconds()
188 187 endUtcTime = (datetime.datetime.combine(thisDate, endTime) - datetime.datetime(1970, 1, 1)).total_seconds()
189 188
190 189 ind = numpy.where(numpy.logical_and(thisUtcTime >= startUtcTime, thisUtcTime < endUtcTime))[0]
191 190
192 191 self.blockList = ind
193 192 self.blocksPerFile = len(ind)
194 193 self.blocksPerFile = len(thisUtcTime)
195 194 return
196 195
197 196 def __readMetadata(self):
198 197 '''
199 198 Reads Metadata
200 199 '''
201 200
202 201 meta = {}
203 202
204 203 if self.description:
205 204 for key, value in self.description['Metadata'].items():
206 205 meta[key] = self.fp[value][()]
207 206 else:
208 207 grp = self.fp['Metadata']
209 208 for name in grp:
210 209 meta[name] = grp[name][()]
211 210
212 211 if self.extras:
213 212 for key, value in self.extras.items():
214 213 meta[key] = value
215 214 self.meta = meta
216 215
217 216 return
218 217
219 218
220 219
221 220 def checkForRealPath(self, nextFile, nextDay):
222 221
223 222 # print("check FRP")
224 223 # dt = self.startFileDatetime + datetime.timedelta(1)
225 224 # filename = '{}.{}{}'.format(self.path, dt.strftime('%Y%m%d'), self.ext)
226 225 # fullfilename = os.path.join(self.path, filename)
227 226 # print("check Path ",fullfilename,filename)
228 227 # if os.path.exists(fullfilename):
229 228 # return fullfilename, filename
230 229 # return None, filename
231 230 return None,None
232 231
233 232 def __readData(self):
234 233
235 234 data = {}
236 235
237 236 if self.description:
238 237 for key, value in self.description['Data'].items():
239 238 if isinstance(value, str):
240 239 if isinstance(self.fp[value], h5py.Dataset):
241 240 data[key] = self.fp[value][()]
242 241 elif isinstance(self.fp[value], h5py.Group):
243 242 array = []
244 243 for ch in self.fp[value]:
245 244 array.append(self.fp[value][ch][()])
246 245 data[key] = numpy.array(array)
247 246 elif isinstance(value, list):
248 247 array = []
249 248 for ch in value:
250 249 array.append(self.fp[ch][()])
251 250 data[key] = numpy.array(array)
252 251 else:
253 252 grp = self.fp['Data']
254 253 for name in grp:
255 254 if isinstance(grp[name], h5py.Dataset):
256 255 array = grp[name][()]
257 256 elif isinstance(grp[name], h5py.Group):
258 257 array = []
259 258 for ch in grp[name]:
260 259 array.append(grp[name][ch][()])
261 260 array = numpy.array(array)
262 261 else:
263 262 log.warning('Unknown type: {}'.format(name))
264 263
265 264 if name in self.description:
266 265 key = self.description[name]
267 266 else:
268 267 key = name
269 268 data[key] = array
270 269
271 270 self.data = data
272 271 return
273 272
274 273 def getData(self):
275 274 if not self.isDateTimeInRange(self.startFileDatetime, self.startDate, self.endDate, self.startTime, self.endTime):
276 275 self.dataOut.flagNoData = True
277 276 self.dataOut.error = True
278 277 return
279 278 for attr in self.data:
280 279 if self.data[attr].ndim == 1:
281 280 setattr(self.dataOut, attr, self.data[attr][self.blockIndex])
282 281 else:
283 282 setattr(self.dataOut, attr, self.data[attr][:, self.blockIndex])
284 283
285 284 self.dataOut.flagNoData = False
286 285 self.blockIndex += 1
287 286
288 287 log.log("Block No. {}/{} -> {}".format(
289 288 self.blockIndex,
290 289 self.blocksPerFile,
291 290 self.dataOut.datatime.ctime()), self.name)
292 291
293 292 return
294 293
295 294 def run(self, **kwargs):
296 295
297 296 if not(self.isConfig):
298 297 self.setup(**kwargs)
299 298 self.isConfig = True
300 299
301 300 if self.blockIndex == self.blocksPerFile:
302 301 self.setNextFile()
303 302
304 303 self.getData()
305 304
306 305 return
307 306
308 307 @MPDecorator
309 308 class HDFWriter(Operation):
310 309 """Operation to write HDF5 files.
311 310
312 311 The HDF5 file contains by default two groups Data and Metadata where
313 312 you can save any `dataOut` attribute specified by `dataList` and `metadataList`
314 313 parameters, data attributes are normaly time dependent where the metadata
315 314 are not.
316 315 It is possible to customize the structure of the HDF5 file with the
317 316 optional description parameter see the examples.
318 317
319 318 Parameters:
320 319 -----------
321 320 path : str
322 321 Path where files will be saved.
323 322 blocksPerFile : int
324 323 Number of blocks per file
325 324 metadataList : list
326 325 List of the dataOut attributes that will be saved as metadata
327 326 dataList : int
328 327 List of the dataOut attributes that will be saved as data
329 328 setType : bool
330 329 If True the name of the files corresponds to the timestamp of the data
331 330 description : dict, optional
332 331 Dictionary with the desired description of the HDF5 file
333 332
334 333 Examples
335 334 --------
336 335
337 336 desc = {
338 337 'data_output': {'winds': ['z', 'w', 'v']},
339 338 'utctime': 'timestamps',
340 339 'heightList': 'heights'
341 340 }
342 341 desc = {
343 342 'data_output': ['z', 'w', 'v'],
344 343 'utctime': 'timestamps',
345 344 'heightList': 'heights'
346 345 }
347 346 desc = {
348 347 'Data': {
349 348 'data_output': 'winds',
350 349 'utctime': 'timestamps'
351 350 },
352 351 'Metadata': {
353 352 'heightList': 'heights'
354 353 }
355 354 }
356 355
357 356 writer = proc_unit.addOperation(name='HDFWriter')
358 357 writer.addParameter(name='path', value='/path/to/file')
359 358 writer.addParameter(name='blocksPerFile', value='32')
360 359 writer.addParameter(name='metadataList', value='heightList,timeZone')
361 360 writer.addParameter(name='dataList',value='data_output,utctime')
362 361 # writer.addParameter(name='description',value=json.dumps(desc))
363 362
364 363 """
365 364
366 365 ext = ".hdf5"
367 366 optchar = "D"
368 367 filename = None
369 368 path = None
370 369 setFile = None
371 370 fp = None
372 371 firsttime = True
373 372 #Configurations
374 373 blocksPerFile = None
375 374 blockIndex = None
376 375 dataOut = None
377 376 #Data Arrays
378 377 dataList = None
379 378 metadataList = None
380 379 currentDay = None
381 380 lastTime = None
382 381
383 382 def __init__(self):
384 383
385 384 Operation.__init__(self)
386 385 return
387 386
388 387 def setup(self, path=None, blocksPerFile=10, metadataList=None, dataList=None, setType=None, description=None):
389 388 self.path = path
390 389 self.blocksPerFile = blocksPerFile
391 390 self.metadataList = metadataList
392 391 self.dataList = [s.strip() for s in dataList]
393 392 self.setType = setType
394 393 self.description = description
395 394
396 395 if self.metadataList is None:
397 396 self.metadataList = self.dataOut.metadata_list
398 397
399 398 tableList = []
400 399 dsList = []
401 400
402 401 for i in range(len(self.dataList)):
403 402 dsDict = {}
404 403 if hasattr(self.dataOut, self.dataList[i]):
405 404 dataAux = getattr(self.dataOut, self.dataList[i])
406 405 dsDict['variable'] = self.dataList[i]
407 406 else:
408 407 log.warning('Attribute {} not found in dataOut', self.name)
409 408 continue
410 409
411 410 if dataAux is None:
412 411 continue
413 412 elif isinstance(dataAux, (int, float, numpy.integer, numpy.float)):
414 413 dsDict['nDim'] = 0
415 414 else:
416 415 dsDict['nDim'] = len(dataAux.shape)
417 416 dsDict['shape'] = dataAux.shape
418 417 dsDict['dsNumber'] = dataAux.shape[0]
419 418 dsDict['dtype'] = dataAux.dtype
420 419
421 420 dsList.append(dsDict)
422 421
423 422 self.dsList = dsList
424 423 self.currentDay = self.dataOut.datatime.date()
425 424
426 425 def timeFlag(self):
427 426 currentTime = self.dataOut.utctime
428 427 timeTuple = time.localtime(currentTime)
429 428 dataDay = timeTuple.tm_yday
430
429 #print("time UTC: ",currentTime, self.dataOut.datatime)
431 430 if self.lastTime is None:
432 431 self.lastTime = currentTime
433 432 self.currentDay = dataDay
434 433 return False
435 434
436 435 timeDiff = currentTime - self.lastTime
437 436
438 437 #Si el dia es diferente o si la diferencia entre un dato y otro supera la hora
439 438 if dataDay != self.currentDay:
440 439 self.currentDay = dataDay
441 440 return True
442 441 elif timeDiff > 3*60*60:
443 442 self.lastTime = currentTime
444 443 return True
445 444 else:
446 445 self.lastTime = currentTime
447 446 return False
448 447
449 448 def run(self, dataOut, path, blocksPerFile=10, metadataList=None,
450 449 dataList=[], setType=None, description={}):
451 450
452 451 self.dataOut = dataOut
453 452 if not(self.isConfig):
454 453 self.setup(path=path, blocksPerFile=blocksPerFile,
455 454 metadataList=metadataList, dataList=dataList,
456 455 setType=setType, description=description)
457 456
458 457 self.isConfig = True
459 458 self.setNextFile()
460 459
461 460 self.putData()
462 461 return
463 462
464 463 def setNextFile(self):
465 464
466 465 ext = self.ext
467 466 path = self.path
468 467 setFile = self.setFile
469 468
470 timeTuple = time.localtime(self.dataOut.utctime)
469 timeTuple = time.gmtime(self.dataOut.utctime)
470 #print("path: ",timeTuple)
471 471 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
472 472 fullpath = os.path.join(path, subfolder)
473 473
474 474 if os.path.exists(fullpath):
475 475 filesList = os.listdir(fullpath)
476 476 filesList = [k for k in filesList if k.startswith(self.optchar)]
477 477 if len( filesList ) > 0:
478 478 filesList = sorted(filesList, key=str.lower)
479 479 filen = filesList[-1]
480 480 # el filename debera tener el siguiente formato
481 481 # 0 1234 567 89A BCDE (hex)
482 482 # x YYYY DDD SSS .ext
483 483 if isNumber(filen[8:11]):
484 484 setFile = int(filen[8:11]) #inicializo mi contador de seteo al seteo del ultimo file
485 485 else:
486 486 setFile = -1
487 487 else:
488 488 setFile = -1 #inicializo mi contador de seteo
489 489 else:
490 490 os.makedirs(fullpath)
491 491 setFile = -1 #inicializo mi contador de seteo
492 492
493 493 if self.setType is None:
494 494 setFile += 1
495 495 file = '%s%4.4d%3.3d%03d%s' % (self.optchar,
496 496 timeTuple.tm_year,
497 497 timeTuple.tm_yday,
498 498 setFile,
499 499 ext )
500 500 else:
501 501 setFile = timeTuple.tm_hour*60+timeTuple.tm_min
502 502 file = '%s%4.4d%3.3d%04d%s' % (self.optchar,
503 503 timeTuple.tm_year,
504 504 timeTuple.tm_yday,
505 505 setFile,
506 506 ext )
507 507
508 508 self.filename = os.path.join( path, subfolder, file )
509 509
510 510 #Setting HDF5 File
511 511 self.fp = h5py.File(self.filename, 'w')
512 512 #write metadata
513 513 self.writeMetadata(self.fp)
514 514 #Write data
515 515 self.writeData(self.fp)
516 516
517 517 def getLabel(self, name, x=None):
518 518
519 519 if x is None:
520 520 if 'Data' in self.description:
521 521 data = self.description['Data']
522 522 if 'Metadata' in self.description:
523 523 data.update(self.description['Metadata'])
524 524 else:
525 525 data = self.description
526 526 if name in data:
527 527 if isinstance(data[name], str):
528 528 return data[name]
529 529 elif isinstance(data[name], list):
530 530 return None
531 531 elif isinstance(data[name], dict):
532 532 for key, value in data[name].items():
533 533 return key
534 534 return name
535 535 else:
536 536 if 'Metadata' in self.description:
537 537 meta = self.description['Metadata']
538 538 else:
539 539 meta = self.description
540 540 if name in meta:
541 541 if isinstance(meta[name], list):
542 542 return meta[name][x]
543 543 elif isinstance(meta[name], dict):
544 544 for key, value in meta[name].items():
545 545 return value[x]
546 546 if 'cspc' in name:
547 547 return 'pair{:02d}'.format(x)
548 548 else:
549 549 return 'channel{:02d}'.format(x)
550 550
551 551 def writeMetadata(self, fp):
552 552
553 553 if self.description:
554 554 if 'Metadata' in self.description:
555 555 grp = fp.create_group('Metadata')
556 556 else:
557 557 grp = fp
558 558 else:
559 559 grp = fp.create_group('Metadata')
560 560
561 561 for i in range(len(self.metadataList)):
562 562 if not hasattr(self.dataOut, self.metadataList[i]):
563 563 log.warning('Metadata: `{}` not found'.format(self.metadataList[i]), self.name)
564 564 continue
565 565 value = getattr(self.dataOut, self.metadataList[i])
566 566 if isinstance(value, bool):
567 567 if value is True:
568 568 value = 1
569 569 else:
570 570 value = 0
571 571 grp.create_dataset(self.getLabel(self.metadataList[i]), data=value)
572 572 return
573 573
574 574 def writeData(self, fp):
575 575
576 576 if self.description:
577 577 if 'Data' in self.description:
578 578 grp = fp.create_group('Data')
579 579 else:
580 580 grp = fp
581 581 else:
582 582 grp = fp.create_group('Data')
583 583
584 584 dtsets = []
585 585 data = []
586 586
587 587 for dsInfo in self.dsList:
588 588 if dsInfo['nDim'] == 0:
589 589 ds = grp.create_dataset(
590 590 self.getLabel(dsInfo['variable']),
591 591 (self.blocksPerFile, ),
592 592 chunks=True,
593 593 dtype=numpy.float64)
594 594 dtsets.append(ds)
595 595 data.append((dsInfo['variable'], -1))
596 596 else:
597 597 label = self.getLabel(dsInfo['variable'])
598 598 if label is not None:
599 599 sgrp = grp.create_group(label)
600 600 else:
601 601 sgrp = grp
602 602 for i in range(dsInfo['dsNumber']):
603 603 ds = sgrp.create_dataset(
604 604 self.getLabel(dsInfo['variable'], i),
605 605 (self.blocksPerFile, ) + dsInfo['shape'][1:],
606 606 chunks=True,
607 607 dtype=dsInfo['dtype'])
608 608 dtsets.append(ds)
609 609 data.append((dsInfo['variable'], i))
610 610 fp.flush()
611 611
612 612 log.log('Creating file: {}'.format(fp.filename), self.name)
613 613
614 614 self.ds = dtsets
615 615 self.data = data
616 616 self.firsttime = True
617 617 self.blockIndex = 0
618 618 return
619 619
620 620 def putData(self):
621 621
622 622 if (self.blockIndex == self.blocksPerFile) or self.timeFlag():
623 623 self.closeFile()
624 624 self.setNextFile()
625 625
626 626 for i, ds in enumerate(self.ds):
627 627 attr, ch = self.data[i]
628 628 if ch == -1:
629 629 ds[self.blockIndex] = getattr(self.dataOut, attr)
630 630 else:
631 631 ds[self.blockIndex] = getattr(self.dataOut, attr)[ch]
632 632
633 633 self.fp.flush()
634 634 self.blockIndex += 1
635 635 log.log('Block No. {}/{}'.format(self.blockIndex, self.blocksPerFile), self.name)
636 636
637 637 return
638 638
639 639 def closeFile(self):
640 640
641 641 if self.blockIndex != self.blocksPerFile:
642 642 for ds in self.ds:
643 643 ds.resize(self.blockIndex, axis=0)
644 644
645 645 if self.fp:
646 646 self.fp.flush()
647 647 self.fp.close()
648 648
649 649 def close(self):
650 650
651 651 self.closeFile()
@@ -1,3889 +1,3888
1 1 import numpy
2 2 import math
3 3 from scipy import optimize, interpolate, signal, stats, ndimage
4 4 import scipy
5 5 import re
6 6 import datetime
7 7 import copy
8 8 import sys
9 9 import importlib
10 10 import itertools
11 11 from multiprocessing import Pool, TimeoutError
12 12 from multiprocessing.pool import ThreadPool
13 13 import time
14 14
15 15 from scipy.optimize import fmin_l_bfgs_b #optimize with bounds on state papameters
16 16 from .jroproc_base import ProcessingUnit, Operation, MPDecorator
17 17 from schainpy.model.data.jrodata import Parameters, hildebrand_sekhon
18 18 from scipy import asarray as ar,exp
19 19 from scipy.optimize import curve_fit
20 20 from schainpy.utils import log
21 21 import warnings
22 22 from numpy import NaN
23 23 from scipy.optimize.optimize import OptimizeWarning
24 24 warnings.filterwarnings('ignore')
25 25
26 26 import matplotlib.pyplot as plt
27 27
28 28 SPEED_OF_LIGHT = 299792458
29 29
30 30 '''solving pickling issue'''
31 31
32 32 def _pickle_method(method):
33 33 func_name = method.__func__.__name__
34 34 obj = method.__self__
35 35 cls = method.__self__.__class__
36 36 return _unpickle_method, (func_name, obj, cls)
37 37
38 38 def _unpickle_method(func_name, obj, cls):
39 39 for cls in cls.mro():
40 40 try:
41 41 func = cls.__dict__[func_name]
42 42 except KeyError:
43 43 pass
44 44 else:
45 45 break
46 46 return func.__get__(obj, cls)
47 47
48 48
49 49 class ParametersProc(ProcessingUnit):
50 50
51 51 METHODS = {}
52 52 nSeconds = None
53 53
54 54 def __init__(self):
55 55 ProcessingUnit.__init__(self)
56 56
57 57 # self.objectDict = {}
58 58 self.buffer = None
59 59 self.firstdatatime = None
60 60 self.profIndex = 0
61 61 self.dataOut = Parameters()
62 62 self.setupReq = False #Agregar a todas las unidades de proc
63 print("INIT PROC")
63
64 64 def __updateObjFromInput(self):
65 65
66 66 self.dataOut.inputUnit = self.dataIn.type
67 67
68 68 self.dataOut.timeZone = self.dataIn.timeZone
69 69 self.dataOut.dstFlag = self.dataIn.dstFlag
70 70 self.dataOut.errorCount = self.dataIn.errorCount
71 71 self.dataOut.useLocalTime = self.dataIn.useLocalTime
72 72
73 73 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
74 74 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
75 75 self.dataOut.channelList = self.dataIn.channelList
76 76 self.dataOut.heightList = self.dataIn.heightList
77 77 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
78 78 # self.dataOut.nHeights = self.dataIn.nHeights
79 79 # self.dataOut.nChannels = self.dataIn.nChannels
80 80 # self.dataOut.nBaud = self.dataIn.nBaud
81 81 # self.dataOut.nCode = self.dataIn.nCode
82 82 # self.dataOut.code = self.dataIn.code
83 83 # self.dataOut.nProfiles = self.dataOut.nFFTPoints
84 84 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
85 85 # self.dataOut.utctime = self.firstdatatime
86 86 self.dataOut.utctime = self.dataIn.utctime
87 87 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
88 88 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
89 89 self.dataOut.nCohInt = self.dataIn.nCohInt
90 90 # self.dataOut.nIncohInt = 1
91 91 # self.dataOut.ippSeconds = self.dataIn.ippSeconds
92 92 # self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
93 93 self.dataOut.timeInterval1 = self.dataIn.timeInterval
94 94 self.dataOut.heightList = self.dataIn.heightList
95 95 self.dataOut.frequency = self.dataIn.frequency
96 96 # self.dataOut.noise = self.dataIn.noise
97 97 self.dataOut.codeList = self.dataIn.codeList
98 98 self.dataOut.azimuthList = self.dataIn.azimuthList
99 99 self.dataOut.elevationList = self.dataIn.elevationList
100 100
101 101 def run(self):
102 print("run proc param")
103 102
104 103 #---------------------- Voltage Data ---------------------------
105 104
106 105 if self.dataIn.type == "Voltage":
107 106
108 107 self.__updateObjFromInput()
109 108 self.dataOut.data_pre = self.dataIn.data.copy()
110 109 self.dataOut.flagNoData = False
111 110 self.dataOut.utctimeInit = self.dataIn.utctime
112 111 self.dataOut.paramInterval = self.dataIn.nProfiles*self.dataIn.nCohInt*self.dataIn.ippSeconds
113 112 if hasattr(self.dataIn, 'dataPP_POW'):
114 113 self.dataOut.dataPP_POW = self.dataIn.dataPP_POW
115 114
116 115 if hasattr(self.dataIn, 'dataPP_POWER'):
117 116 self.dataOut.dataPP_POWER = self.dataIn.dataPP_POWER
118 117
119 118 if hasattr(self.dataIn, 'dataPP_DOP'):
120 119 self.dataOut.dataPP_DOP = self.dataIn.dataPP_DOP
121 120
122 121 if hasattr(self.dataIn, 'dataPP_SNR'):
123 122 self.dataOut.dataPP_SNR = self.dataIn.dataPP_SNR
124 123
125 124 if hasattr(self.dataIn, 'dataPP_WIDTH'):
126 125 self.dataOut.dataPP_WIDTH = self.dataIn.dataPP_WIDTH
127 126 return
128 127
129 128 #---------------------- Spectra Data ---------------------------
130 129
131 130 if self.dataIn.type == "Spectra":
132 131
133 132 self.dataOut.data_pre = [self.dataIn.data_spc, self.dataIn.data_cspc]
134 133 self.dataOut.data_spc = self.dataIn.data_spc
135 134 self.dataOut.data_cspc = self.dataIn.data_cspc
136 135 self.dataOut.nProfiles = self.dataIn.nProfiles
137 136 self.dataOut.nIncohInt = self.dataIn.nIncohInt
138 137 self.dataOut.nFFTPoints = self.dataIn.nFFTPoints
139 138 self.dataOut.ippFactor = self.dataIn.ippFactor
140 139 self.dataOut.abscissaList = self.dataIn.getVelRange(1)
141 140 self.dataOut.spc_noise = self.dataIn.getNoise()
142 141 self.dataOut.spc_range = (self.dataIn.getFreqRange(1) , self.dataIn.getAcfRange(1) , self.dataIn.getVelRange(1))
143 142 # self.dataOut.normFactor = self.dataIn.normFactor
144 143 self.dataOut.pairsList = self.dataIn.pairsList
145 144 self.dataOut.groupList = self.dataIn.pairsList
146 145 self.dataOut.flagNoData = False
147 146
148 147 if hasattr(self.dataIn, 'ChanDist'): #Distances of receiver channels
149 148 self.dataOut.ChanDist = self.dataIn.ChanDist
150 149 else: self.dataOut.ChanDist = None
151 150
152 151 #if hasattr(self.dataIn, 'VelRange'): #Velocities range
153 152 # self.dataOut.VelRange = self.dataIn.VelRange
154 153 #else: self.dataOut.VelRange = None
155 154
156 155 if hasattr(self.dataIn, 'RadarConst'): #Radar Constant
157 156 self.dataOut.RadarConst = self.dataIn.RadarConst
158 157
159 158 if hasattr(self.dataIn, 'NPW'): #NPW
160 159 self.dataOut.NPW = self.dataIn.NPW
161 160
162 161 if hasattr(self.dataIn, 'COFA'): #COFA
163 162 self.dataOut.COFA = self.dataIn.COFA
164 163
165 164
166 165
167 166 #---------------------- Correlation Data ---------------------------
168 167
169 168 if self.dataIn.type == "Correlation":
170 169 acf_ind, ccf_ind, acf_pairs, ccf_pairs, data_acf, data_ccf = self.dataIn.splitFunctions()
171 170
172 171 self.dataOut.data_pre = (self.dataIn.data_cf[acf_ind,:], self.dataIn.data_cf[ccf_ind,:,:])
173 172 self.dataOut.normFactor = (self.dataIn.normFactor[acf_ind,:], self.dataIn.normFactor[ccf_ind,:])
174 173 self.dataOut.groupList = (acf_pairs, ccf_pairs)
175 174
176 175 self.dataOut.abscissaList = self.dataIn.lagRange
177 176 self.dataOut.noise = self.dataIn.noise
178 177 self.dataOut.data_snr = self.dataIn.SNR
179 178 self.dataOut.flagNoData = False
180 179 self.dataOut.nAvg = self.dataIn.nAvg
181 180
182 181 #---------------------- Parameters Data ---------------------------
183 182
184 183 if self.dataIn.type == "Parameters":
185 184 self.dataOut.copy(self.dataIn)
186 185 self.dataOut.flagNoData = False
187 186 self.prin("DAta In")
188 187 return True
189 188
190 189 self.__updateObjFromInput()
191 190 self.dataOut.utctimeInit = self.dataIn.utctime
192 191 self.dataOut.paramInterval = self.dataIn.timeInterval
193 192
194 193 return
195 194
196 195
197 196 def target(tups):
198 197
199 198 obj, args = tups
200 199
201 200 return obj.FitGau(args)
202 201
203 202 class RemoveWideGC(Operation):
204 203 ''' This class remove the wide clutter and replace it with a simple interpolation points
205 204 This mainly applies to CLAIRE radar
206 205
207 206 ClutterWidth : Width to look for the clutter peak
208 207
209 208 Input:
210 209
211 210 self.dataOut.data_pre : SPC and CSPC
212 211 self.dataOut.spc_range : To select wind and rainfall velocities
213 212
214 213 Affected:
215 214
216 215 self.dataOut.data_pre : It is used for the new SPC and CSPC ranges of wind
217 216
218 217 Written by D. Scipión 25.02.2021
219 218 '''
220 219 def __init__(self):
221 220 Operation.__init__(self)
222 221 self.i = 0
223 222 self.ich = 0
224 223 self.ir = 0
225 224
226 225 def run(self, dataOut, ClutterWidth=2.5):
227 226 # print ('Entering RemoveWideGC ... ')
228 227
229 228 self.spc = dataOut.data_pre[0].copy()
230 229 self.spc_out = dataOut.data_pre[0].copy()
231 230 self.Num_Chn = self.spc.shape[0]
232 231 self.Num_Hei = self.spc.shape[2]
233 232 VelRange = dataOut.spc_range[2][:-1]
234 233 dv = VelRange[1]-VelRange[0]
235 234
236 235 # Find the velocities that corresponds to zero
237 236 gc_values = numpy.squeeze(numpy.where(numpy.abs(VelRange) <= ClutterWidth))
238 237
239 238 # Removing novalid data from the spectra
240 239 for ich in range(self.Num_Chn) :
241 240 for ir in range(self.Num_Hei) :
242 241 # Estimate the noise at each range
243 242 HSn = hildebrand_sekhon(self.spc[ich,:,ir],dataOut.nIncohInt)
244 243
245 244 # Removing the noise floor at each range
246 245 novalid = numpy.where(self.spc[ich,:,ir] < HSn)
247 246 self.spc[ich,novalid,ir] = HSn
248 247
249 248 junk = numpy.append(numpy.insert(numpy.squeeze(self.spc[ich,gc_values,ir]),0,HSn),HSn)
250 249 j1index = numpy.squeeze(numpy.where(numpy.diff(junk)>0))
251 250 j2index = numpy.squeeze(numpy.where(numpy.diff(junk)<0))
252 251 if ((numpy.size(j1index)<=1) | (numpy.size(j2index)<=1)) :
253 252 continue
254 253 junk3 = numpy.squeeze(numpy.diff(j1index))
255 254 junk4 = numpy.squeeze(numpy.diff(j2index))
256 255
257 256 valleyindex = j2index[numpy.where(junk4>1)]
258 257 peakindex = j1index[numpy.where(junk3>1)]
259 258
260 259 isvalid = numpy.squeeze(numpy.where(numpy.abs(VelRange[gc_values[peakindex]]) <= 2.5*dv))
261 260 if numpy.size(isvalid) == 0 :
262 261 continue
263 262 if numpy.size(isvalid) >1 :
264 263 vindex = numpy.argmax(self.spc[ich,gc_values[peakindex[isvalid]],ir])
265 264 isvalid = isvalid[vindex]
266 265
267 266 # clutter peak
268 267 gcpeak = peakindex[isvalid]
269 268 vl = numpy.where(valleyindex < gcpeak)
270 269 if numpy.size(vl) == 0:
271 270 continue
272 271 gcvl = valleyindex[vl[0][-1]]
273 272 vr = numpy.where(valleyindex > gcpeak)
274 273 if numpy.size(vr) == 0:
275 274 continue
276 275 gcvr = valleyindex[vr[0][0]]
277 276
278 277 # Removing the clutter
279 278 interpindex = numpy.array([gc_values[gcvl], gc_values[gcvr]])
280 279 gcindex = gc_values[gcvl+1:gcvr-1]
281 280 self.spc_out[ich,gcindex,ir] = numpy.interp(VelRange[gcindex],VelRange[interpindex],self.spc[ich,interpindex,ir])
282 281
283 282 dataOut.data_pre[0] = self.spc_out
284 283 #print ('Leaving RemoveWideGC ... ')
285 284 return dataOut
286 285
287 286 class SpectralFilters(Operation):
288 287 ''' This class allows to replace the novalid values with noise for each channel
289 288 This applies to CLAIRE RADAR
290 289
291 290 PositiveLimit : RightLimit of novalid data
292 291 NegativeLimit : LeftLimit of novalid data
293 292
294 293 Input:
295 294
296 295 self.dataOut.data_pre : SPC and CSPC
297 296 self.dataOut.spc_range : To select wind and rainfall velocities
298 297
299 298 Affected:
300 299
301 300 self.dataOut.data_pre : It is used for the new SPC and CSPC ranges of wind
302 301
303 302 Written by D. Scipión 29.01.2021
304 303 '''
305 304 def __init__(self):
306 305 Operation.__init__(self)
307 306 self.i = 0
308 307
309 308 def run(self, dataOut, ):
310 309
311 310 self.spc = dataOut.data_pre[0].copy()
312 311 self.Num_Chn = self.spc.shape[0]
313 312 VelRange = dataOut.spc_range[2]
314 313
315 314 # novalid corresponds to data within the Negative and PositiveLimit
316 315
317 316
318 317 # Removing novalid data from the spectra
319 318 for i in range(self.Num_Chn):
320 319 self.spc[i,novalid,:] = dataOut.noise[i]
321 320 dataOut.data_pre[0] = self.spc
322 321 return dataOut
323 322
324 323 class GaussianFit(Operation):
325 324
326 325 '''
327 326 Function that fit of one and two generalized gaussians (gg) based
328 327 on the PSD shape across an "power band" identified from a cumsum of
329 328 the measured spectrum - noise.
330 329
331 330 Input:
332 331 self.dataOut.data_pre : SelfSpectra
333 332
334 333 Output:
335 334 self.dataOut.SPCparam : SPC_ch1, SPC_ch2
336 335
337 336 '''
338 337 def __init__(self):
339 338 Operation.__init__(self)
340 339 self.i=0
341 340
342 341
343 342 # def run(self, dataOut, num_intg=7, pnoise=1., SNRlimit=-9): #num_intg: Incoherent integrations, pnoise: Noise, vel_arr: range of velocities, similar to the ftt points
344 343 def run(self, dataOut, SNRdBlimit=-9, method='generalized'):
345 344 """This routine will find a couple of generalized Gaussians to a power spectrum
346 345 methods: generalized, squared
347 346 input: spc
348 347 output:
349 348 noise, amplitude0,shift0,width0,p0,Amplitude1,shift1,width1,p1
350 349 """
351 350 print ('Entering ',method,' double Gaussian fit')
352 351 self.spc = dataOut.data_pre[0].copy()
353 352 self.Num_Hei = self.spc.shape[2]
354 353 self.Num_Bin = self.spc.shape[1]
355 354 self.Num_Chn = self.spc.shape[0]
356 355
357 356 start_time = time.time()
358 357
359 358 pool = Pool(processes=self.Num_Chn)
360 359 args = [(dataOut.spc_range[2], ich, dataOut.spc_noise[ich], dataOut.nIncohInt, SNRdBlimit) for ich in range(self.Num_Chn)]
361 360 objs = [self for __ in range(self.Num_Chn)]
362 361 attrs = list(zip(objs, args))
363 362 DGauFitParam = pool.map(target, attrs)
364 363 # Parameters:
365 364 # 0. Noise, 1. Amplitude, 2. Shift, 3. Width 4. Power
366 365 dataOut.DGauFitParams = numpy.asarray(DGauFitParam)
367 366
368 367 # Double Gaussian Curves
369 368 gau0 = numpy.zeros([self.Num_Chn,self.Num_Bin,self.Num_Hei])
370 369 gau0[:] = numpy.NaN
371 370 gau1 = numpy.zeros([self.Num_Chn,self.Num_Bin,self.Num_Hei])
372 371 gau1[:] = numpy.NaN
373 372 x_mtr = numpy.transpose(numpy.tile(dataOut.getVelRange(1)[:-1], (self.Num_Hei,1)))
374 373 for iCh in range(self.Num_Chn):
375 374 N0 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][0,:,0]] * self.Num_Bin))
376 375 N1 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][0,:,1]] * self.Num_Bin))
377 376 A0 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][1,:,0]] * self.Num_Bin))
378 377 A1 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][1,:,1]] * self.Num_Bin))
379 378 v0 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][2,:,0]] * self.Num_Bin))
380 379 v1 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][2,:,1]] * self.Num_Bin))
381 380 s0 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][3,:,0]] * self.Num_Bin))
382 381 s1 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][3,:,1]] * self.Num_Bin))
383 382 if method == 'genealized':
384 383 p0 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][4,:,0]] * self.Num_Bin))
385 384 p1 = numpy.transpose(numpy.transpose([dataOut.DGauFitParams[iCh][4,:,1]] * self.Num_Bin))
386 385 elif method == 'squared':
387 386 p0 = 2.
388 387 p1 = 2.
389 388 gau0[iCh] = A0*numpy.exp(-0.5*numpy.abs((x_mtr-v0)/s0)**p0)+N0
390 389 gau1[iCh] = A1*numpy.exp(-0.5*numpy.abs((x_mtr-v1)/s1)**p1)+N1
391 390 dataOut.GaussFit0 = gau0
392 391 dataOut.GaussFit1 = gau1
393 392
394 393 print('Leaving ',method ,' double Gaussian fit')
395 394 return dataOut
396 395
397 396 def FitGau(self, X):
398 397 # print('Entering FitGau')
399 398 # Assigning the variables
400 399 Vrange, ch, wnoise, num_intg, SNRlimit = X
401 400 # Noise Limits
402 401 noisebl = wnoise * 0.9
403 402 noisebh = wnoise * 1.1
404 403 # Radar Velocity
405 404 Va = max(Vrange)
406 405 deltav = Vrange[1] - Vrange[0]
407 406 x = numpy.arange(self.Num_Bin)
408 407
409 408 # print ('stop 0')
410 409
411 410 # 5 parameters, 2 Gaussians
412 411 DGauFitParam = numpy.zeros([5, self.Num_Hei,2])
413 412 DGauFitParam[:] = numpy.NaN
414 413
415 414 # SPCparam = []
416 415 # SPC_ch1 = numpy.zeros([self.Num_Bin,self.Num_Hei])
417 416 # SPC_ch2 = numpy.zeros([self.Num_Bin,self.Num_Hei])
418 417 # SPC_ch1[:] = 0 #numpy.NaN
419 418 # SPC_ch2[:] = 0 #numpy.NaN
420 419 # print ('stop 1')
421 420 for ht in range(self.Num_Hei):
422 421 # print (ht)
423 422 # print ('stop 2')
424 423 # Spectra at each range
425 424 spc = numpy.asarray(self.spc)[ch,:,ht]
426 425 snr = ( spc.mean() - wnoise ) / wnoise
427 426 snrdB = 10.*numpy.log10(snr)
428 427
429 428 #print ('stop 3')
430 429 if snrdB < SNRlimit :
431 430 # snr = numpy.NaN
432 431 # SPC_ch1[:,ht] = 0#numpy.NaN
433 432 # SPC_ch1[:,ht] = 0#numpy.NaN
434 433 # SPCparam = (SPC_ch1,SPC_ch2)
435 434 # print ('SNR less than SNRth')
436 435 continue
437 436 # wnoise = hildebrand_sekhon(spc,num_intg)
438 437 # print ('stop 2.01')
439 438 #############################################
440 439 # normalizing spc and noise
441 440 # This part differs from gg1
442 441 # spc_norm_max = max(spc) #commented by D. Scipión 19.03.2021
443 442 #spc = spc / spc_norm_max
444 443 # pnoise = pnoise #/ spc_norm_max #commented by D. Scipión 19.03.2021
445 444 #############################################
446 445
447 446 # print ('stop 2.1')
448 447 fatspectra=1.0
449 448 # noise per channel.... we might want to use the noise at each range
450 449
451 450 # wnoise = noise_ #/ spc_norm_max #commented by D. Scipión 19.03.2021
452 451 #wnoise,stdv,i_max,index =enoise(spc,num_intg) #noise estimate using Hildebrand Sekhon, only wnoise is used
453 452 #if wnoise>1.1*pnoise: # to be tested later
454 453 # wnoise=pnoise
455 454 # noisebl = wnoise*0.9
456 455 # noisebh = wnoise*1.1
457 456 spc = spc - wnoise # signal
458 457
459 458 # print ('stop 2.2')
460 459 minx = numpy.argmin(spc)
461 460 #spcs=spc.copy()
462 461 spcs = numpy.roll(spc,-minx)
463 462 cum = numpy.cumsum(spcs)
464 463 # tot_noise = wnoise * self.Num_Bin #64;
465 464
466 465 # print ('stop 2.3')
467 466 # snr = sum(spcs) / tot_noise
468 467 # snrdB = 10.*numpy.log10(snr)
469 468 #print ('stop 3')
470 469 # if snrdB < SNRlimit :
471 470 # snr = numpy.NaN
472 471 # SPC_ch1[:,ht] = 0#numpy.NaN
473 472 # SPC_ch1[:,ht] = 0#numpy.NaN
474 473 # SPCparam = (SPC_ch1,SPC_ch2)
475 474 # print ('SNR less than SNRth')
476 475 # continue
477 476
478 477
479 478 #if snrdB<-18 or numpy.isnan(snrdB) or num_intg<4:
480 479 # return [None,]*4,[None,]*4,None,snrdB,None,None,[None,]*5,[None,]*9,None
481 480 # print ('stop 4')
482 481 cummax = max(cum)
483 482 epsi = 0.08 * fatspectra # cumsum to narrow down the energy region
484 483 cumlo = cummax * epsi
485 484 cumhi = cummax * (1-epsi)
486 485 powerindex = numpy.array(numpy.where(numpy.logical_and(cum>cumlo, cum<cumhi))[0])
487 486
488 487 # print ('stop 5')
489 488 if len(powerindex) < 1:# case for powerindex 0
490 489 # print ('powerindex < 1')
491 490 continue
492 491 powerlo = powerindex[0]
493 492 powerhi = powerindex[-1]
494 493 powerwidth = powerhi-powerlo
495 494 if powerwidth <= 1:
496 495 # print('powerwidth <= 1')
497 496 continue
498 497
499 498 # print ('stop 6')
500 499 firstpeak = powerlo + powerwidth/10.# first gaussian energy location
501 500 secondpeak = powerhi - powerwidth/10. #second gaussian energy location
502 501 midpeak = (firstpeak + secondpeak)/2.
503 502 firstamp = spcs[int(firstpeak)]
504 503 secondamp = spcs[int(secondpeak)]
505 504 midamp = spcs[int(midpeak)]
506 505
507 506 y_data = spc + wnoise
508 507
509 508 ''' single Gaussian '''
510 509 shift0 = numpy.mod(midpeak+minx, self.Num_Bin )
511 510 width0 = powerwidth/4.#Initialization entire power of spectrum divided by 4
512 511 power0 = 2.
513 512 amplitude0 = midamp
514 513 state0 = [shift0,width0,amplitude0,power0,wnoise]
515 514 bnds = ((0,self.Num_Bin-1),(1,powerwidth),(0,None),(0.5,3.),(noisebl,noisebh))
516 515 lsq1 = fmin_l_bfgs_b(self.misfit1, state0, args=(y_data,x,num_intg), bounds=bnds, approx_grad=True)
517 516 # print ('stop 7.1')
518 517 # print (bnds)
519 518
520 519 chiSq1=lsq1[1]
521 520
522 521 # print ('stop 8')
523 522 if fatspectra<1.0 and powerwidth<4:
524 523 choice=0
525 524 Amplitude0=lsq1[0][2]
526 525 shift0=lsq1[0][0]
527 526 width0=lsq1[0][1]
528 527 p0=lsq1[0][3]
529 528 Amplitude1=0.
530 529 shift1=0.
531 530 width1=0.
532 531 p1=0.
533 532 noise=lsq1[0][4]
534 533 #return (numpy.array([shift0,width0,Amplitude0,p0]),
535 534 # numpy.array([shift1,width1,Amplitude1,p1]),noise,snrdB,chiSq1,6.,sigmas1,[None,]*9,choice)
536 535
537 536 # print ('stop 9')
538 537 ''' two Gaussians '''
539 538 #shift0=numpy.mod(firstpeak+minx,64); shift1=numpy.mod(secondpeak+minx,64)
540 539 shift0 = numpy.mod(firstpeak+minx, self.Num_Bin )
541 540 shift1 = numpy.mod(secondpeak+minx, self.Num_Bin )
542 541 width0 = powerwidth/6.
543 542 width1 = width0
544 543 power0 = 2.
545 544 power1 = power0
546 545 amplitude0 = firstamp
547 546 amplitude1 = secondamp
548 547 state0 = [shift0,width0,amplitude0,power0,shift1,width1,amplitude1,power1,wnoise]
549 548 #bnds=((0,63),(1,powerwidth/2.),(0,None),(0.5,3.),(0,63),(1,powerwidth/2.),(0,None),(0.5,3.),(noisebl,noisebh))
550 549 bnds=((0,self.Num_Bin-1),(1,powerwidth/2.),(0,None),(0.5,3.),(0,self.Num_Bin-1),(1,powerwidth/2.),(0,None),(0.5,3.),(noisebl,noisebh))
551 550 #bnds=(( 0,(self.Num_Bin-1) ),(1,powerwidth/2.),(0,None),(0.5,3.),( 0,(self.Num_Bin-1)),(1,powerwidth/2.),(0,None),(0.5,3.),(0.1,0.5))
552 551
553 552 # print ('stop 10')
554 553 lsq2 = fmin_l_bfgs_b( self.misfit2 , state0 , args=(y_data,x,num_intg) , bounds=bnds , approx_grad=True )
555 554
556 555 # print ('stop 11')
557 556 chiSq2 = lsq2[1]
558 557
559 558 # print ('stop 12')
560 559
561 560 oneG = (chiSq1<5 and chiSq1/chiSq2<2.0) and (abs(lsq2[0][0]-lsq2[0][4])<(lsq2[0][1]+lsq2[0][5])/3. or abs(lsq2[0][0]-lsq2[0][4])<10)
562 561
563 562 # print ('stop 13')
564 563 if snrdB>-12: # when SNR is strong pick the peak with least shift (LOS velocity) error
565 564 if oneG:
566 565 choice = 0
567 566 else:
568 567 w1 = lsq2[0][1]; w2 = lsq2[0][5]
569 568 a1 = lsq2[0][2]; a2 = lsq2[0][6]
570 569 p1 = lsq2[0][3]; p2 = lsq2[0][7]
571 570 s1 = (2**(1+1./p1))*scipy.special.gamma(1./p1)/p1
572 571 s2 = (2**(1+1./p2))*scipy.special.gamma(1./p2)/p2
573 572 gp1 = a1*w1*s1; gp2 = a2*w2*s2 # power content of each ggaussian with proper p scaling
574 573
575 574 if gp1>gp2:
576 575 if a1>0.7*a2:
577 576 choice = 1
578 577 else:
579 578 choice = 2
580 579 elif gp2>gp1:
581 580 if a2>0.7*a1:
582 581 choice = 2
583 582 else:
584 583 choice = 1
585 584 else:
586 585 choice = numpy.argmax([a1,a2])+1
587 586 #else:
588 587 #choice=argmin([std2a,std2b])+1
589 588
590 589 else: # with low SNR go to the most energetic peak
591 590 choice = numpy.argmax([lsq1[0][2]*lsq1[0][1],lsq2[0][2]*lsq2[0][1],lsq2[0][6]*lsq2[0][5]])
592 591
593 592 # print ('stop 14')
594 593 shift0 = lsq2[0][0]
595 594 vel0 = Vrange[0] + shift0 * deltav
596 595 shift1 = lsq2[0][4]
597 596 # vel1=Vrange[0] + shift1 * deltav
598 597
599 598 # max_vel = 1.0
600 599 # Va = max(Vrange)
601 600 # deltav = Vrange[1]-Vrange[0]
602 601 # print ('stop 15')
603 602 #first peak will be 0, second peak will be 1
604 603 # if vel0 > -1.0 and vel0 < max_vel : #first peak is in the correct range # Commented by D.Scipión 19.03.2021
605 604 if vel0 > -Va and vel0 < Va : #first peak is in the correct range
606 605 shift0 = lsq2[0][0]
607 606 width0 = lsq2[0][1]
608 607 Amplitude0 = lsq2[0][2]
609 608 p0 = lsq2[0][3]
610 609
611 610 shift1 = lsq2[0][4]
612 611 width1 = lsq2[0][5]
613 612 Amplitude1 = lsq2[0][6]
614 613 p1 = lsq2[0][7]
615 614 noise = lsq2[0][8]
616 615 else:
617 616 shift1 = lsq2[0][0]
618 617 width1 = lsq2[0][1]
619 618 Amplitude1 = lsq2[0][2]
620 619 p1 = lsq2[0][3]
621 620
622 621 shift0 = lsq2[0][4]
623 622 width0 = lsq2[0][5]
624 623 Amplitude0 = lsq2[0][6]
625 624 p0 = lsq2[0][7]
626 625 noise = lsq2[0][8]
627 626
628 627 if Amplitude0<0.05: # in case the peak is noise
629 628 shift0,width0,Amplitude0,p0 = 4*[numpy.NaN]
630 629 if Amplitude1<0.05:
631 630 shift1,width1,Amplitude1,p1 = 4*[numpy.NaN]
632 631
633 632 # print ('stop 16 ')
634 633 # SPC_ch1[:,ht] = noise + Amplitude0*numpy.exp(-0.5*(abs(x-shift0)/width0)**p0)
635 634 # SPC_ch2[:,ht] = noise + Amplitude1*numpy.exp(-0.5*(abs(x-shift1)/width1)**p1)
636 635 # SPCparam = (SPC_ch1,SPC_ch2)
637 636
638 637 DGauFitParam[0,ht,0] = noise
639 638 DGauFitParam[0,ht,1] = noise
640 639 DGauFitParam[1,ht,0] = Amplitude0
641 640 DGauFitParam[1,ht,1] = Amplitude1
642 641 DGauFitParam[2,ht,0] = Vrange[0] + shift0 * deltav
643 642 DGauFitParam[2,ht,1] = Vrange[0] + shift1 * deltav
644 643 DGauFitParam[3,ht,0] = width0 * deltav
645 644 DGauFitParam[3,ht,1] = width1 * deltav
646 645 DGauFitParam[4,ht,0] = p0
647 646 DGauFitParam[4,ht,1] = p1
648 647
649 648 # print (DGauFitParam.shape)
650 649 # print ('Leaving FitGau')
651 650 return DGauFitParam
652 651 # return SPCparam
653 652 # return GauSPC
654 653
655 654 def y_model1(self,x,state):
656 655 shift0, width0, amplitude0, power0, noise = state
657 656 model0 = amplitude0*numpy.exp(-0.5*abs((x - shift0)/width0)**power0)
658 657 model0u = amplitude0*numpy.exp(-0.5*abs((x - shift0 - self.Num_Bin)/width0)**power0)
659 658 model0d = amplitude0*numpy.exp(-0.5*abs((x - shift0 + self.Num_Bin)/width0)**power0)
660 659 return model0 + model0u + model0d + noise
661 660
662 661 def y_model2(self,x,state): #Equation for two generalized Gaussians with Nyquist
663 662 shift0, width0, amplitude0, power0, shift1, width1, amplitude1, power1, noise = state
664 663 model0 = amplitude0*numpy.exp(-0.5*abs((x-shift0)/width0)**power0)
665 664 model0u = amplitude0*numpy.exp(-0.5*abs((x - shift0 - self.Num_Bin)/width0)**power0)
666 665 model0d = amplitude0*numpy.exp(-0.5*abs((x - shift0 + self.Num_Bin)/width0)**power0)
667 666
668 667 model1 = amplitude1*numpy.exp(-0.5*abs((x - shift1)/width1)**power1)
669 668 model1u = amplitude1*numpy.exp(-0.5*abs((x - shift1 - self.Num_Bin)/width1)**power1)
670 669 model1d = amplitude1*numpy.exp(-0.5*abs((x - shift1 + self.Num_Bin)/width1)**power1)
671 670 return model0 + model0u + model0d + model1 + model1u + model1d + noise
672 671
673 672 def misfit1(self,state,y_data,x,num_intg): # This function compares how close real data is with the model data, the close it is, the better it is.
674 673
675 674 return num_intg*sum((numpy.log(y_data)-numpy.log(self.y_model1(x,state)))**2)#/(64-5.) # /(64-5.) can be commented
676 675
677 676 def misfit2(self,state,y_data,x,num_intg):
678 677 return num_intg*sum((numpy.log(y_data)-numpy.log(self.y_model2(x,state)))**2)#/(64-9.)
679 678
680 679
681 680
682 681 class PrecipitationProc(Operation):
683 682
684 683 '''
685 684 Operator that estimates Reflectivity factor (Z), and estimates rainfall Rate (R)
686 685
687 686 Input:
688 687 self.dataOut.data_pre : SelfSpectra
689 688
690 689 Output:
691 690
692 691 self.dataOut.data_output : Reflectivity factor, rainfall Rate
693 692
694 693
695 694 Parameters affected:
696 695 '''
697 696
698 697 def __init__(self):
699 698 Operation.__init__(self)
700 699 self.i=0
701 700
702 701 def run(self, dataOut, radar=None, Pt=5000, Gt=295.1209, Gr=70.7945, Lambda=0.6741, aL=2.5118,
703 702 tauW=4e-06, ThetaT=0.1656317, ThetaR=0.36774087, Km2 = 0.93, Altitude=3350,SNRdBlimit=-30):
704 703
705 704 # print ('Entering PrecepitationProc ... ')
706 705
707 706 if radar == "MIRA35C" :
708 707
709 708 self.spc = dataOut.data_pre[0].copy()
710 709 self.Num_Hei = self.spc.shape[2]
711 710 self.Num_Bin = self.spc.shape[1]
712 711 self.Num_Chn = self.spc.shape[0]
713 712 Ze = self.dBZeMODE2(dataOut)
714 713
715 714 else:
716 715
717 716 self.spc = dataOut.data_pre[0].copy()
718 717
719 718 #NOTA SE DEBE REMOVER EL RANGO DEL PULSO TX
720 719 self.spc[:,:,0:7]= numpy.NaN
721 720
722 721 self.Num_Hei = self.spc.shape[2]
723 722 self.Num_Bin = self.spc.shape[1]
724 723 self.Num_Chn = self.spc.shape[0]
725 724
726 725 VelRange = dataOut.spc_range[2]
727 726
728 727 ''' Se obtiene la constante del RADAR '''
729 728
730 729 self.Pt = Pt
731 730 self.Gt = Gt
732 731 self.Gr = Gr
733 732 self.Lambda = Lambda
734 733 self.aL = aL
735 734 self.tauW = tauW
736 735 self.ThetaT = ThetaT
737 736 self.ThetaR = ThetaR
738 737 self.GSys = 10**(36.63/10) # Ganancia de los LNA 36.63 dB
739 738 self.lt = 10**(1.67/10) # Perdida en cables Tx 1.67 dB
740 739 self.lr = 10**(5.73/10) # Perdida en cables Rx 5.73 dB
741 740
742 741 Numerator = ( (4*numpy.pi)**3 * aL**2 * 16 * numpy.log(2) )
743 742 Denominator = ( Pt * Gt * Gr * Lambda**2 * SPEED_OF_LIGHT * tauW * numpy.pi * ThetaT * ThetaR)
744 743 RadarConstant = 10e-26 * Numerator / Denominator #
745 744 ExpConstant = 10**(40/10) #Constante Experimental
746 745
747 746 SignalPower = numpy.zeros([self.Num_Chn,self.Num_Bin,self.Num_Hei])
748 747 for i in range(self.Num_Chn):
749 748 SignalPower[i,:,:] = self.spc[i,:,:] - dataOut.noise[i]
750 749 SignalPower[numpy.where(SignalPower < 0)] = 1e-20
751 750
752 751 SPCmean = numpy.mean(SignalPower, 0)
753 752 Pr = SPCmean[:,:]/dataOut.normFactor
754 753
755 754 # Declaring auxiliary variables
756 755 Range = dataOut.heightList*1000. #Range in m
757 756 # replicate the heightlist to obtain a matrix [Num_Bin,Num_Hei]
758 757 rMtrx = numpy.transpose(numpy.transpose([dataOut.heightList*1000.] * self.Num_Bin))
759 758 zMtrx = rMtrx+Altitude
760 759 # replicate the VelRange to obtain a matrix [Num_Bin,Num_Hei]
761 760 VelMtrx = numpy.transpose(numpy.tile(VelRange[:-1], (self.Num_Hei,1)))
762 761
763 762 # height dependence to air density Foote and Du Toit (1969)
764 763 delv_z = 1 + 3.68e-5 * zMtrx + 1.71e-9 * zMtrx**2
765 764 VMtrx = VelMtrx / delv_z #Normalized velocity
766 765 VMtrx[numpy.where(VMtrx> 9.6)] = numpy.NaN
767 766 # Diameter is related to the fall speed of falling drops
768 767 D_Vz = -1.667 * numpy.log( 0.9369 - 0.097087 * VMtrx ) # D in [mm]
769 768 # Only valid for D>= 0.16 mm
770 769 D_Vz[numpy.where(D_Vz < 0.16)] = numpy.NaN
771 770
772 771 #Calculate Radar Reflectivity ETAn
773 772 ETAn = (RadarConstant *ExpConstant) * Pr * rMtrx**2 #Reflectivity (ETA)
774 773 ETAd = ETAn * 6.18 * exp( -0.6 * D_Vz ) * delv_z
775 774 # Radar Cross Section
776 775 sigmaD = Km2 * (D_Vz * 1e-3 )**6 * numpy.pi**5 / Lambda**4
777 776 # Drop Size Distribution
778 777 DSD = ETAn / sigmaD
779 778 # Equivalente Reflectivy
780 779 Ze_eqn = numpy.nansum( DSD * D_Vz**6 ,axis=0)
781 780 Ze_org = numpy.nansum(ETAn * Lambda**4, axis=0) / (1e-18*numpy.pi**5 * Km2) # [mm^6 /m^3]
782 781 # RainFall Rate
783 782 RR = 0.0006*numpy.pi * numpy.nansum( D_Vz**3 * DSD * VelMtrx ,0) #mm/hr
784 783
785 784 # Censoring the data
786 785 # Removing data with SNRth < 0dB se debe considerar el SNR por canal
787 786 SNRth = 10**(SNRdBlimit/10) #-30dB
788 787 novalid = numpy.where((dataOut.data_snr[0,:] <SNRth) | (dataOut.data_snr[1,:] <SNRth) | (dataOut.data_snr[2,:] <SNRth)) # AND condition. Maybe OR condition better
789 788 W = numpy.nanmean(dataOut.data_dop,0)
790 789 W[novalid] = numpy.NaN
791 790 Ze_org[novalid] = numpy.NaN
792 791 RR[novalid] = numpy.NaN
793 792
794 793 dataOut.data_output = RR[8]
795 794 dataOut.data_param = numpy.ones([3,self.Num_Hei])
796 795 dataOut.channelList = [0,1,2]
797 796
798 797 dataOut.data_param[0]=10*numpy.log10(Ze_org)
799 798 dataOut.data_param[1]=-W
800 799 dataOut.data_param[2]=RR
801 800
802 801 # print ('Leaving PrecepitationProc ... ')
803 802 return dataOut
804 803
805 804 def dBZeMODE2(self, dataOut): # Processing for MIRA35C
806 805
807 806 NPW = dataOut.NPW
808 807 COFA = dataOut.COFA
809 808
810 809 SNR = numpy.array([self.spc[0,:,:] / NPW[0]]) #, self.spc[1,:,:] / NPW[1]])
811 810 RadarConst = dataOut.RadarConst
812 811 #frequency = 34.85*10**9
813 812
814 813 ETA = numpy.zeros(([self.Num_Chn ,self.Num_Hei]))
815 814 data_output = numpy.ones([self.Num_Chn , self.Num_Hei])*numpy.NaN
816 815
817 816 ETA = numpy.sum(SNR,1)
818 817
819 818 ETA = numpy.where(ETA != 0. , ETA, numpy.NaN)
820 819
821 820 Ze = numpy.ones([self.Num_Chn, self.Num_Hei] )
822 821
823 822 for r in range(self.Num_Hei):
824 823
825 824 Ze[0,r] = ( ETA[0,r] ) * COFA[0,r][0] * RadarConst * ((r/5000.)**2)
826 825 #Ze[1,r] = ( ETA[1,r] ) * COFA[1,r][0] * RadarConst * ((r/5000.)**2)
827 826
828 827 return Ze
829 828
830 829 # def GetRadarConstant(self):
831 830 #
832 831 # """
833 832 # Constants:
834 833 #
835 834 # Pt: Transmission Power dB 5kW 5000
836 835 # Gt: Transmission Gain dB 24.7 dB 295.1209
837 836 # Gr: Reception Gain dB 18.5 dB 70.7945
838 837 # Lambda: Wavelenght m 0.6741 m 0.6741
839 838 # aL: Attenuation loses dB 4dB 2.5118
840 839 # tauW: Width of transmission pulse s 4us 4e-6
841 840 # ThetaT: Transmission antenna bean angle rad 0.1656317 rad 0.1656317
842 841 # ThetaR: Reception antenna beam angle rad 0.36774087 rad 0.36774087
843 842 #
844 843 # """
845 844 #
846 845 # Numerator = ( (4*numpy.pi)**3 * aL**2 * 16 * numpy.log(2) )
847 846 # Denominator = ( Pt * Gt * Gr * Lambda**2 * SPEED_OF_LIGHT * TauW * numpy.pi * ThetaT * TheraR)
848 847 # RadarConstant = Numerator / Denominator
849 848 #
850 849 # return RadarConstant
851 850
852 851
853 852
854 853 class FullSpectralAnalysis(Operation):
855 854
856 855 """
857 856 Function that implements Full Spectral Analysis technique.
858 857
859 858 Input:
860 859 self.dataOut.data_pre : SelfSpectra and CrossSpectra data
861 860 self.dataOut.groupList : Pairlist of channels
862 861 self.dataOut.ChanDist : Physical distance between receivers
863 862
864 863
865 864 Output:
866 865
867 866 self.dataOut.data_output : Zonal wind, Meridional wind, and Vertical wind
868 867
869 868
870 869 Parameters affected: Winds, height range, SNR
871 870
872 871 """
873 872 def run(self, dataOut, Xi01=None, Xi02=None, Xi12=None, Eta01=None, Eta02=None, Eta12=None, SNRdBlimit=-30,
874 873 minheight=None, maxheight=None, NegativeLimit=None, PositiveLimit=None):
875 874
876 875 spc = dataOut.data_pre[0].copy()
877 876 cspc = dataOut.data_pre[1]
878 877 nHeights = spc.shape[2]
879 878
880 879 # first_height = 0.75 #km (ref: data header 20170822)
881 880 # resolution_height = 0.075 #km
882 881 '''
883 882 finding height range. check this when radar parameters are changed!
884 883 '''
885 884 if maxheight is not None:
886 885 # range_max = math.ceil((maxheight - first_height) / resolution_height) # theoretical
887 886 range_max = math.ceil(13.26 * maxheight - 3) # empirical, works better
888 887 else:
889 888 range_max = nHeights
890 889 if minheight is not None:
891 890 # range_min = int((minheight - first_height) / resolution_height) # theoretical
892 891 range_min = int(13.26 * minheight - 5) # empirical, works better
893 892 if range_min < 0:
894 893 range_min = 0
895 894 else:
896 895 range_min = 0
897 896
898 897 pairsList = dataOut.groupList
899 898 if dataOut.ChanDist is not None :
900 899 ChanDist = dataOut.ChanDist
901 900 else:
902 901 ChanDist = numpy.array([[Xi01, Eta01],[Xi02,Eta02],[Xi12,Eta12]])
903 902
904 903 # 4 variables: zonal, meridional, vertical, and average SNR
905 904 data_param = numpy.zeros([4,nHeights]) * numpy.NaN
906 905 velocityX = numpy.zeros([nHeights]) * numpy.NaN
907 906 velocityY = numpy.zeros([nHeights]) * numpy.NaN
908 907 velocityZ = numpy.zeros([nHeights]) * numpy.NaN
909 908
910 909 dbSNR = 10*numpy.log10(numpy.average(dataOut.data_snr,0))
911 910
912 911 '''***********************************************WIND ESTIMATION**************************************'''
913 912 for Height in range(nHeights):
914 913
915 914 if Height >= range_min and Height < range_max:
916 915 # error_code will be useful in future analysis
917 916 [Vzon,Vmer,Vver, error_code] = self.WindEstimation(spc[:,:,Height], cspc[:,:,Height], pairsList,
918 917 ChanDist, Height, dataOut.noise, dataOut.spc_range, dbSNR[Height], SNRdBlimit, NegativeLimit, PositiveLimit,dataOut.frequency)
919 918
920 919 if abs(Vzon) < 100. and abs(Vmer) < 100.:
921 920 velocityX[Height] = Vzon
922 921 velocityY[Height] = -Vmer
923 922 velocityZ[Height] = Vver
924 923
925 924 # Censoring data with SNR threshold
926 925 dbSNR [dbSNR < SNRdBlimit] = numpy.NaN
927 926
928 927 data_param[0] = velocityX
929 928 data_param[1] = velocityY
930 929 data_param[2] = velocityZ
931 930 data_param[3] = dbSNR
932 931 dataOut.data_param = data_param
933 932 return dataOut
934 933
935 934 def moving_average(self,x, N=2):
936 935 """ convolution for smoothenig data. note that last N-1 values are convolution with zeroes """
937 936 return numpy.convolve(x, numpy.ones((N,))/N)[(N-1):]
938 937
939 938 def gaus(self,xSamples,Amp,Mu,Sigma):
940 939 return Amp * numpy.exp(-0.5*((xSamples - Mu)/Sigma)**2)
941 940
942 941 def Moments(self, ySamples, xSamples):
943 942 Power = numpy.nanmean(ySamples) # Power, 0th Moment
944 943 yNorm = ySamples / numpy.nansum(ySamples)
945 944 RadVel = numpy.nansum(xSamples * yNorm) # Radial Velocity, 1st Moment
946 945 Sigma2 = numpy.nansum(yNorm * (xSamples - RadVel)**2) # Spectral Width, 2nd Moment
947 946 StdDev = numpy.sqrt(numpy.abs(Sigma2)) # Desv. Estandar, Ancho espectral
948 947 return numpy.array([Power,RadVel,StdDev])
949 948
950 949 def StopWindEstimation(self, error_code):
951 950 Vzon = numpy.NaN
952 951 Vmer = numpy.NaN
953 952 Vver = numpy.NaN
954 953 return Vzon, Vmer, Vver, error_code
955 954
956 955 def AntiAliasing(self, interval, maxstep):
957 956 """
958 957 function to prevent errors from aliased values when computing phaseslope
959 958 """
960 959 antialiased = numpy.zeros(len(interval))
961 960 copyinterval = interval.copy()
962 961
963 962 antialiased[0] = copyinterval[0]
964 963
965 964 for i in range(1,len(antialiased)):
966 965 step = interval[i] - interval[i-1]
967 966 if step > maxstep:
968 967 copyinterval -= 2*numpy.pi
969 968 antialiased[i] = copyinterval[i]
970 969 elif step < maxstep*(-1):
971 970 copyinterval += 2*numpy.pi
972 971 antialiased[i] = copyinterval[i]
973 972 else:
974 973 antialiased[i] = copyinterval[i].copy()
975 974
976 975 return antialiased
977 976
978 977 def WindEstimation(self, spc, cspc, pairsList, ChanDist, Height, noise, AbbsisaRange, dbSNR, SNRlimit, NegativeLimit, PositiveLimit, radfreq):
979 978 """
980 979 Function that Calculates Zonal, Meridional and Vertical wind velocities.
981 980 Initial Version by E. Bocanegra updated by J. Zibell until Nov. 2019.
982 981
983 982 Input:
984 983 spc, cspc : self spectra and cross spectra data. In Briggs notation something like S_i*(S_i)_conj, (S_j)_conj respectively.
985 984 pairsList : Pairlist of channels
986 985 ChanDist : array of xi_ij and eta_ij
987 986 Height : height at which data is processed
988 987 noise : noise in [channels] format for specific height
989 988 Abbsisarange : range of the frequencies or velocities
990 989 dbSNR, SNRlimit : signal to noise ratio in db, lower limit
991 990
992 991 Output:
993 992 Vzon, Vmer, Vver : wind velocities
994 993 error_code : int that states where code is terminated
995 994
996 995 0 : no error detected
997 996 1 : Gaussian of mean spc exceeds widthlimit
998 997 2 : no Gaussian of mean spc found
999 998 3 : SNR to low or velocity to high -> prec. e.g.
1000 999 4 : at least one Gaussian of cspc exceeds widthlimit
1001 1000 5 : zero out of three cspc Gaussian fits converged
1002 1001 6 : phase slope fit could not be found
1003 1002 7 : arrays used to fit phase have different length
1004 1003 8 : frequency range is either too short (len <= 5) or very long (> 30% of cspc)
1005 1004
1006 1005 """
1007 1006
1008 1007 error_code = 0
1009 1008
1010 1009 nChan = spc.shape[0]
1011 1010 nProf = spc.shape[1]
1012 1011 nPair = cspc.shape[0]
1013 1012
1014 1013 SPC_Samples = numpy.zeros([nChan, nProf]) # for normalized spc values for one height
1015 1014 CSPC_Samples = numpy.zeros([nPair, nProf], dtype=numpy.complex_) # for normalized cspc values
1016 1015 phase = numpy.zeros([nPair, nProf]) # phase between channels
1017 1016 PhaseSlope = numpy.zeros(nPair) # slope of the phases, channelwise
1018 1017 PhaseInter = numpy.zeros(nPair) # intercept to the slope of the phases, channelwise
1019 1018 xFrec = AbbsisaRange[0][:-1] # frequency range
1020 1019 xVel = AbbsisaRange[2][:-1] # velocity range
1021 1020 xSamples = xFrec # the frequency range is taken
1022 1021 delta_x = xSamples[1] - xSamples[0] # delta_f or delta_x
1023 1022
1024 1023 # only consider velocities with in NegativeLimit and PositiveLimit
1025 1024 if (NegativeLimit is None):
1026 1025 NegativeLimit = numpy.min(xVel)
1027 1026 if (PositiveLimit is None):
1028 1027 PositiveLimit = numpy.max(xVel)
1029 1028 xvalid = numpy.where((xVel > NegativeLimit) & (xVel < PositiveLimit))
1030 1029 xSamples_zoom = xSamples[xvalid]
1031 1030
1032 1031 '''Getting Eij and Nij'''
1033 1032 Xi01, Xi02, Xi12 = ChanDist[:,0]
1034 1033 Eta01, Eta02, Eta12 = ChanDist[:,1]
1035 1034
1036 1035 # spwd limit - updated by D. Scipión 30.03.2021
1037 1036 widthlimit = 10
1038 1037 '''************************* SPC is normalized ********************************'''
1039 1038 spc_norm = spc.copy()
1040 1039 # For each channel
1041 1040 for i in range(nChan):
1042 1041 spc_sub = spc_norm[i,:] - noise[i] # only the signal power
1043 1042 SPC_Samples[i] = spc_sub / (numpy.nansum(spc_sub) * delta_x)
1044 1043
1045 1044 '''********************** FITTING MEAN SPC GAUSSIAN **********************'''
1046 1045
1047 1046 """ the gaussian of the mean: first subtract noise, then normalize. this is legal because
1048 1047 you only fit the curve and don't need the absolute value of height for calculation,
1049 1048 only for estimation of width. for normalization of cross spectra, you need initial,
1050 1049 unnormalized self-spectra With noise.
1051 1050
1052 1051 Technically, you don't even need to normalize the self-spectra, as you only need the
1053 1052 width of the peak. However, it was left this way. Note that the normalization has a flaw:
1054 1053 due to subtraction of the noise, some values are below zero. Raw "spc" values should be
1055 1054 >= 0, as it is the modulus squared of the signals (complex * it's conjugate)
1056 1055 """
1057 1056 # initial conditions
1058 1057 popt = [1e-10,0,1e-10]
1059 1058 # Spectra average
1060 1059 SPCMean = numpy.average(SPC_Samples,0)
1061 1060 # Moments in frequency
1062 1061 SPCMoments = self.Moments(SPCMean[xvalid], xSamples_zoom)
1063 1062
1064 1063 # Gauss Fit SPC in frequency domain
1065 1064 if dbSNR > SNRlimit: # only if SNR > SNRth
1066 1065 try:
1067 1066 popt,pcov = curve_fit(self.gaus,xSamples_zoom,SPCMean[xvalid],p0=SPCMoments)
1068 1067 if popt[2] <= 0 or popt[2] > widthlimit: # CONDITION
1069 1068 return self.StopWindEstimation(error_code = 1)
1070 1069 FitGauss = self.gaus(xSamples_zoom,*popt)
1071 1070 except :#RuntimeError:
1072 1071 return self.StopWindEstimation(error_code = 2)
1073 1072 else:
1074 1073 return self.StopWindEstimation(error_code = 3)
1075 1074
1076 1075 '''***************************** CSPC Normalization *************************
1077 1076 The Spc spectra are used to normalize the crossspectra. Peaks from precipitation
1078 1077 influence the norm which is not desired. First, a range is identified where the
1079 1078 wind peak is estimated -> sum_wind is sum of those frequencies. Next, the area
1080 1079 around it gets cut off and values replaced by mean determined by the boundary
1081 1080 data -> sum_noise (spc is not normalized here, thats why the noise is important)
1082 1081
1083 1082 The sums are then added and multiplied by range/datapoints, because you need
1084 1083 an integral and not a sum for normalization.
1085 1084
1086 1085 A norm is found according to Briggs 92.
1087 1086 '''
1088 1087 # for each pair
1089 1088 for i in range(nPair):
1090 1089 cspc_norm = cspc[i,:].copy()
1091 1090 chan_index0 = pairsList[i][0]
1092 1091 chan_index1 = pairsList[i][1]
1093 1092 CSPC_Samples[i] = cspc_norm / (numpy.sqrt(numpy.nansum(spc_norm[chan_index0])*numpy.nansum(spc_norm[chan_index1])) * delta_x)
1094 1093 phase[i] = numpy.arctan2(CSPC_Samples[i].imag, CSPC_Samples[i].real)
1095 1094
1096 1095 CSPCmoments = numpy.vstack([self.Moments(numpy.abs(CSPC_Samples[0,xvalid]), xSamples_zoom),
1097 1096 self.Moments(numpy.abs(CSPC_Samples[1,xvalid]), xSamples_zoom),
1098 1097 self.Moments(numpy.abs(CSPC_Samples[2,xvalid]), xSamples_zoom)])
1099 1098
1100 1099 popt01, popt02, popt12 = [1e-10,0,1e-10], [1e-10,0,1e-10] ,[1e-10,0,1e-10]
1101 1100 FitGauss01, FitGauss02, FitGauss12 = numpy.zeros(len(xSamples)), numpy.zeros(len(xSamples)), numpy.zeros(len(xSamples))
1102 1101
1103 1102 '''*******************************FIT GAUSS CSPC************************************'''
1104 1103 try:
1105 1104 popt01,pcov = curve_fit(self.gaus,xSamples_zoom,numpy.abs(CSPC_Samples[0][xvalid]),p0=CSPCmoments[0])
1106 1105 if popt01[2] > widthlimit: # CONDITION
1107 1106 return self.StopWindEstimation(error_code = 4)
1108 1107 popt02,pcov = curve_fit(self.gaus,xSamples_zoom,numpy.abs(CSPC_Samples[1][xvalid]),p0=CSPCmoments[1])
1109 1108 if popt02[2] > widthlimit: # CONDITION
1110 1109 return self.StopWindEstimation(error_code = 4)
1111 1110 popt12,pcov = curve_fit(self.gaus,xSamples_zoom,numpy.abs(CSPC_Samples[2][xvalid]),p0=CSPCmoments[2])
1112 1111 if popt12[2] > widthlimit: # CONDITION
1113 1112 return self.StopWindEstimation(error_code = 4)
1114 1113
1115 1114 FitGauss01 = self.gaus(xSamples_zoom, *popt01)
1116 1115 FitGauss02 = self.gaus(xSamples_zoom, *popt02)
1117 1116 FitGauss12 = self.gaus(xSamples_zoom, *popt12)
1118 1117 except:
1119 1118 return self.StopWindEstimation(error_code = 5)
1120 1119
1121 1120
1122 1121 '''************* Getting Fij ***************'''
1123 1122 # x-axis point of the gaussian where the center is located from GaussFit of spectra
1124 1123 GaussCenter = popt[1]
1125 1124 ClosestCenter = xSamples_zoom[numpy.abs(xSamples_zoom-GaussCenter).argmin()]
1126 1125 PointGauCenter = numpy.where(xSamples_zoom==ClosestCenter)[0][0]
1127 1126
1128 1127 # Point where e^-1 is located in the gaussian
1129 1128 PeMinus1 = numpy.max(FitGauss) * numpy.exp(-1)
1130 1129 FijClosest = FitGauss[numpy.abs(FitGauss-PeMinus1).argmin()] # The closest point to"Peminus1" in "FitGauss"
1131 1130 PointFij = numpy.where(FitGauss==FijClosest)[0][0]
1132 1131 Fij = numpy.abs(xSamples_zoom[PointFij] - xSamples_zoom[PointGauCenter])
1133 1132
1134 1133 '''********** Taking frequency ranges from mean SPCs **********'''
1135 1134 GauWidth = popt[2] * 3/2 # Bandwidth of Gau01
1136 1135 Range = numpy.empty(2)
1137 1136 Range[0] = GaussCenter - GauWidth
1138 1137 Range[1] = GaussCenter + GauWidth
1139 1138 # Point in x-axis where the bandwidth is located (min:max)
1140 1139 ClosRangeMin = xSamples_zoom[numpy.abs(xSamples_zoom-Range[0]).argmin()]
1141 1140 ClosRangeMax = xSamples_zoom[numpy.abs(xSamples_zoom-Range[1]).argmin()]
1142 1141 PointRangeMin = numpy.where(xSamples_zoom==ClosRangeMin)[0][0]
1143 1142 PointRangeMax = numpy.where(xSamples_zoom==ClosRangeMax)[0][0]
1144 1143 Range = numpy.array([ PointRangeMin, PointRangeMax ])
1145 1144 FrecRange = xSamples_zoom[ Range[0] : Range[1] ]
1146 1145
1147 1146 '''************************** Getting Phase Slope ***************************'''
1148 1147 for i in range(nPair):
1149 1148 if len(FrecRange) > 5:
1150 1149 PhaseRange = phase[i, xvalid[0][Range[0]:Range[1]]].copy()
1151 1150 mask = ~numpy.isnan(FrecRange) & ~numpy.isnan(PhaseRange)
1152 1151 if len(FrecRange) == len(PhaseRange):
1153 1152 try:
1154 1153 slope, intercept, _, _, _ = stats.linregress(FrecRange[mask], self.AntiAliasing(PhaseRange[mask], 4.5))
1155 1154 PhaseSlope[i] = slope
1156 1155 PhaseInter[i] = intercept
1157 1156 except:
1158 1157 return self.StopWindEstimation(error_code = 6)
1159 1158 else:
1160 1159 return self.StopWindEstimation(error_code = 7)
1161 1160 else:
1162 1161 return self.StopWindEstimation(error_code = 8)
1163 1162
1164 1163 '''*** Constants A-H correspond to the convention as in Briggs and Vincent 1992 ***'''
1165 1164
1166 1165 '''Getting constant C'''
1167 1166 cC=(Fij*numpy.pi)**2
1168 1167
1169 1168 '''****** Getting constants F and G ******'''
1170 1169 MijEijNij = numpy.array([[Xi02,Eta02], [Xi12,Eta12]])
1171 1170 # MijEijNij = numpy.array([[Xi01,Eta01], [Xi02,Eta02], [Xi12,Eta12]])
1172 1171 # MijResult0 = (-PhaseSlope[0] * cC) / (2*numpy.pi)
1173 1172 MijResult1 = (-PhaseSlope[1] * cC) / (2*numpy.pi)
1174 1173 MijResult2 = (-PhaseSlope[2] * cC) / (2*numpy.pi)
1175 1174 # MijResults = numpy.array([MijResult0, MijResult1, MijResult2])
1176 1175 MijResults = numpy.array([MijResult1, MijResult2])
1177 1176 (cF,cG) = numpy.linalg.solve(MijEijNij, MijResults)
1178 1177
1179 1178 '''****** Getting constants A, B and H ******'''
1180 1179 W01 = numpy.nanmax( FitGauss01 )
1181 1180 W02 = numpy.nanmax( FitGauss02 )
1182 1181 W12 = numpy.nanmax( FitGauss12 )
1183 1182
1184 1183 WijResult01 = ((cF * Xi01 + cG * Eta01)**2)/cC - numpy.log(W01 / numpy.sqrt(numpy.pi / cC))
1185 1184 WijResult02 = ((cF * Xi02 + cG * Eta02)**2)/cC - numpy.log(W02 / numpy.sqrt(numpy.pi / cC))
1186 1185 WijResult12 = ((cF * Xi12 + cG * Eta12)**2)/cC - numpy.log(W12 / numpy.sqrt(numpy.pi / cC))
1187 1186 WijResults = numpy.array([WijResult01, WijResult02, WijResult12])
1188 1187
1189 1188 WijEijNij = numpy.array([ [Xi01**2, Eta01**2, 2*Xi01*Eta01] , [Xi02**2, Eta02**2, 2*Xi02*Eta02] , [Xi12**2, Eta12**2, 2*Xi12*Eta12] ])
1190 1189 (cA,cB,cH) = numpy.linalg.solve(WijEijNij, WijResults)
1191 1190
1192 1191 VxVy = numpy.array([[cA,cH],[cH,cB]])
1193 1192 VxVyResults = numpy.array([-cF,-cG])
1194 1193 (Vmer,Vzon) = numpy.linalg.solve(VxVy, VxVyResults)
1195 1194 Vver = -SPCMoments[1]*SPEED_OF_LIGHT/(2*radfreq)
1196 1195 error_code = 0
1197 1196
1198 1197 return Vzon, Vmer, Vver, error_code
1199 1198
1200 1199 class SpectralMoments(Operation):
1201 1200
1202 1201 '''
1203 1202 Function SpectralMoments()
1204 1203
1205 1204 Calculates moments (power, mean, standard deviation) and SNR of the signal
1206 1205
1207 1206 Type of dataIn: Spectra
1208 1207
1209 1208 Configuration Parameters:
1210 1209
1211 1210 dirCosx : Cosine director in X axis
1212 1211 dirCosy : Cosine director in Y axis
1213 1212
1214 1213 elevation :
1215 1214 azimuth :
1216 1215
1217 1216 Input:
1218 1217 channelList : simple channel list to select e.g. [2,3,7]
1219 1218 self.dataOut.data_pre : Spectral data
1220 1219 self.dataOut.abscissaList : List of frequencies
1221 1220 self.dataOut.noise : Noise level per channel
1222 1221
1223 1222 Affected:
1224 1223 self.dataOut.moments : Parameters per channel
1225 1224 self.dataOut.data_snr : SNR per channel
1226 1225
1227 1226 '''
1228 1227
1229 1228 def run(self, dataOut):
1230 1229
1231 1230 data = dataOut.data_pre[0]
1232 1231 absc = dataOut.abscissaList[:-1]
1233 1232 noise = dataOut.noise
1234 1233 nChannel = data.shape[0]
1235 1234 data_param = numpy.zeros((nChannel, 4, data.shape[2]))
1236 1235
1237 1236 for ind in range(nChannel):
1238 1237 data_param[ind,:,:] = self.__calculateMoments( data[ind,:,:] , absc , noise[ind] )
1239 1238
1240 1239 dataOut.moments = data_param[:,1:,:]
1241 1240 dataOut.data_snr = data_param[:,0]
1242 1241 dataOut.data_pow = data_param[:,1]
1243 1242 dataOut.data_dop = data_param[:,2]
1244 1243 dataOut.data_width = data_param[:,3]
1245 1244
1246 1245 return dataOut
1247 1246
1248 1247 def __calculateMoments(self, oldspec, oldfreq, n0,
1249 1248 nicoh = None, graph = None, smooth = None, type1 = None, fwindow = None, snrth = None, dc = None, aliasing = None, oldfd = None, wwauto = None):
1250 1249
1251 1250 if (nicoh is None): nicoh = 1
1252 1251 if (graph is None): graph = 0
1253 1252 if (smooth is None): smooth = 0
1254 1253 elif (self.smooth < 3): smooth = 0
1255 1254
1256 1255 if (type1 is None): type1 = 0
1257 1256 if (fwindow is None): fwindow = numpy.zeros(oldfreq.size) + 1
1258 1257 if (snrth is None): snrth = -3
1259 1258 if (dc is None): dc = 0
1260 1259 if (aliasing is None): aliasing = 0
1261 1260 if (oldfd is None): oldfd = 0
1262 1261 if (wwauto is None): wwauto = 0
1263 1262
1264 1263 if (n0 < 1.e-20): n0 = 1.e-20
1265 1264
1266 1265 freq = oldfreq
1267 1266 vec_power = numpy.zeros(oldspec.shape[1])
1268 1267 vec_fd = numpy.zeros(oldspec.shape[1])
1269 1268 vec_w = numpy.zeros(oldspec.shape[1])
1270 1269 vec_snr = numpy.zeros(oldspec.shape[1])
1271 1270
1272 1271 # oldspec = numpy.ma.masked_invalid(oldspec)
1273 1272
1274 1273 for ind in range(oldspec.shape[1]):
1275 1274
1276 1275 spec = oldspec[:,ind]
1277 1276 aux = spec*fwindow
1278 1277 max_spec = aux.max()
1279 1278 m = aux.tolist().index(max_spec)
1280 1279
1281 1280 # Smooth
1282 1281 if (smooth == 0):
1283 1282 spec2 = spec
1284 1283 else:
1285 1284 spec2 = scipy.ndimage.filters.uniform_filter1d(spec,size=smooth)
1286 1285
1287 1286 # Moments Estimation
1288 1287 bb = spec2[numpy.arange(m,spec2.size)]
1289 1288 bb = (bb<n0).nonzero()
1290 1289 bb = bb[0]
1291 1290
1292 1291 ss = spec2[numpy.arange(0,m + 1)]
1293 1292 ss = (ss<n0).nonzero()
1294 1293 ss = ss[0]
1295 1294
1296 1295 if (bb.size == 0):
1297 1296 bb0 = spec.size - 1 - m
1298 1297 else:
1299 1298 bb0 = bb[0] - 1
1300 1299 if (bb0 < 0):
1301 1300 bb0 = 0
1302 1301
1303 1302 if (ss.size == 0):
1304 1303 ss1 = 1
1305 1304 else:
1306 1305 ss1 = max(ss) + 1
1307 1306
1308 1307 if (ss1 > m):
1309 1308 ss1 = m
1310 1309
1311 1310 valid = numpy.arange(int(m + bb0 - ss1 + 1)) + ss1
1312 1311
1313 1312 signal_power = ((spec2[valid] - n0) * fwindow[valid]).mean() # D. Scipión added with correct definition
1314 1313 total_power = (spec2[valid] * fwindow[valid]).mean() # D. Scipión added with correct definition
1315 1314 power = ((spec2[valid] - n0) * fwindow[valid]).sum()
1316 1315 fd = ((spec2[valid]- n0)*freq[valid] * fwindow[valid]).sum() / power
1317 1316 w = numpy.sqrt(((spec2[valid] - n0)*fwindow[valid]*(freq[valid]- fd)**2).sum() / power)
1318 1317 snr = (spec2.mean()-n0)/n0
1319 1318 if (snr < 1.e-20) :
1320 1319 snr = 1.e-20
1321 1320
1322 1321 # vec_power[ind] = power #D. Scipión replaced with the line below
1323 1322 vec_power[ind] = total_power
1324 1323 vec_fd[ind] = fd
1325 1324 vec_w[ind] = w
1326 1325 vec_snr[ind] = snr
1327 1326
1328 1327 return numpy.vstack((vec_snr, vec_power, vec_fd, vec_w))
1329 1328
1330 1329 #------------------ Get SA Parameters --------------------------
1331 1330
1332 1331 def GetSAParameters(self):
1333 1332 #SA en frecuencia
1334 1333 pairslist = self.dataOut.groupList
1335 1334 num_pairs = len(pairslist)
1336 1335
1337 1336 vel = self.dataOut.abscissaList
1338 1337 spectra = self.dataOut.data_pre
1339 1338 cspectra = self.dataIn.data_cspc
1340 1339 delta_v = vel[1] - vel[0]
1341 1340
1342 1341 #Calculating the power spectrum
1343 1342 spc_pow = numpy.sum(spectra, 3)*delta_v
1344 1343 #Normalizing Spectra
1345 1344 norm_spectra = spectra/spc_pow
1346 1345 #Calculating the norm_spectra at peak
1347 1346 max_spectra = numpy.max(norm_spectra, 3)
1348 1347
1349 1348 #Normalizing Cross Spectra
1350 1349 norm_cspectra = numpy.zeros(cspectra.shape)
1351 1350
1352 1351 for i in range(num_chan):
1353 1352 norm_cspectra[i,:,:] = cspectra[i,:,:]/numpy.sqrt(spc_pow[pairslist[i][0],:]*spc_pow[pairslist[i][1],:])
1354 1353
1355 1354 max_cspectra = numpy.max(norm_cspectra,2)
1356 1355 max_cspectra_index = numpy.argmax(norm_cspectra, 2)
1357 1356
1358 1357 for i in range(num_pairs):
1359 1358 cspc_par[i,:,:] = __calculateMoments(norm_cspectra)
1360 1359 #------------------- Get Lags ----------------------------------
1361 1360
1362 1361 class SALags(Operation):
1363 1362 '''
1364 1363 Function GetMoments()
1365 1364
1366 1365 Input:
1367 1366 self.dataOut.data_pre
1368 1367 self.dataOut.abscissaList
1369 1368 self.dataOut.noise
1370 1369 self.dataOut.normFactor
1371 1370 self.dataOut.data_snr
1372 1371 self.dataOut.groupList
1373 1372 self.dataOut.nChannels
1374 1373
1375 1374 Affected:
1376 1375 self.dataOut.data_param
1377 1376
1378 1377 '''
1379 1378 def run(self, dataOut):
1380 1379 data_acf = dataOut.data_pre[0]
1381 1380 data_ccf = dataOut.data_pre[1]
1382 1381 normFactor_acf = dataOut.normFactor[0]
1383 1382 normFactor_ccf = dataOut.normFactor[1]
1384 1383 pairs_acf = dataOut.groupList[0]
1385 1384 pairs_ccf = dataOut.groupList[1]
1386 1385
1387 1386 nHeights = dataOut.nHeights
1388 1387 absc = dataOut.abscissaList
1389 1388 noise = dataOut.noise
1390 1389 SNR = dataOut.data_snr
1391 1390 nChannels = dataOut.nChannels
1392 1391 # pairsList = dataOut.groupList
1393 1392 # pairsAutoCorr, pairsCrossCorr = self.__getPairsAutoCorr(pairsList, nChannels)
1394 1393
1395 1394 for l in range(len(pairs_acf)):
1396 1395 data_acf[l,:,:] = data_acf[l,:,:]/normFactor_acf[l,:]
1397 1396
1398 1397 for l in range(len(pairs_ccf)):
1399 1398 data_ccf[l,:,:] = data_ccf[l,:,:]/normFactor_ccf[l,:]
1400 1399
1401 1400 dataOut.data_param = numpy.zeros((len(pairs_ccf)*2 + 1, nHeights))
1402 1401 dataOut.data_param[:-1,:] = self.__calculateTaus(data_acf, data_ccf, absc)
1403 1402 dataOut.data_param[-1,:] = self.__calculateLag1Phase(data_acf, absc)
1404 1403 return
1405 1404
1406 1405 # def __getPairsAutoCorr(self, pairsList, nChannels):
1407 1406 #
1408 1407 # pairsAutoCorr = numpy.zeros(nChannels, dtype = 'int')*numpy.nan
1409 1408 #
1410 1409 # for l in range(len(pairsList)):
1411 1410 # firstChannel = pairsList[l][0]
1412 1411 # secondChannel = pairsList[l][1]
1413 1412 #
1414 1413 # #Obteniendo pares de Autocorrelacion
1415 1414 # if firstChannel == secondChannel:
1416 1415 # pairsAutoCorr[firstChannel] = int(l)
1417 1416 #
1418 1417 # pairsAutoCorr = pairsAutoCorr.astype(int)
1419 1418 #
1420 1419 # pairsCrossCorr = range(len(pairsList))
1421 1420 # pairsCrossCorr = numpy.delete(pairsCrossCorr,pairsAutoCorr)
1422 1421 #
1423 1422 # return pairsAutoCorr, pairsCrossCorr
1424 1423
1425 1424 def __calculateTaus(self, data_acf, data_ccf, lagRange):
1426 1425
1427 1426 lag0 = data_acf.shape[1]/2
1428 1427 #Funcion de Autocorrelacion
1429 1428 mean_acf = stats.nanmean(data_acf, axis = 0)
1430 1429
1431 1430 #Obtencion Indice de TauCross
1432 1431 ind_ccf = data_ccf.argmax(axis = 1)
1433 1432 #Obtencion Indice de TauAuto
1434 1433 ind_acf = numpy.zeros(ind_ccf.shape,dtype = 'int')
1435 1434 ccf_lag0 = data_ccf[:,lag0,:]
1436 1435
1437 1436 for i in range(ccf_lag0.shape[0]):
1438 1437 ind_acf[i,:] = numpy.abs(mean_acf - ccf_lag0[i,:]).argmin(axis = 0)
1439 1438
1440 1439 #Obtencion de TauCross y TauAuto
1441 1440 tau_ccf = lagRange[ind_ccf]
1442 1441 tau_acf = lagRange[ind_acf]
1443 1442
1444 1443 Nan1, Nan2 = numpy.where(tau_ccf == lagRange[0])
1445 1444
1446 1445 tau_ccf[Nan1,Nan2] = numpy.nan
1447 1446 tau_acf[Nan1,Nan2] = numpy.nan
1448 1447 tau = numpy.vstack((tau_ccf,tau_acf))
1449 1448
1450 1449 return tau
1451 1450
1452 1451 def __calculateLag1Phase(self, data, lagTRange):
1453 1452 data1 = stats.nanmean(data, axis = 0)
1454 1453 lag1 = numpy.where(lagTRange == 0)[0][0] + 1
1455 1454
1456 1455 phase = numpy.angle(data1[lag1,:])
1457 1456
1458 1457 return phase
1459 1458
1460 1459 class SpectralFitting(Operation):
1461 1460 '''
1462 1461 Function GetMoments()
1463 1462
1464 1463 Input:
1465 1464 Output:
1466 1465 Variables modified:
1467 1466 '''
1468 1467
1469 1468 def run(self, dataOut, getSNR = True, path=None, file=None, groupList=None):
1470 1469
1471 1470
1472 1471 if path != None:
1473 1472 sys.path.append(path)
1474 1473 self.dataOut.library = importlib.import_module(file)
1475 1474
1476 1475 #To be inserted as a parameter
1477 1476 groupArray = numpy.array(groupList)
1478 1477 # groupArray = numpy.array([[0,1],[2,3]])
1479 1478 self.dataOut.groupList = groupArray
1480 1479
1481 1480 nGroups = groupArray.shape[0]
1482 1481 nChannels = self.dataIn.nChannels
1483 1482 nHeights=self.dataIn.heightList.size
1484 1483
1485 1484 #Parameters Array
1486 1485 self.dataOut.data_param = None
1487 1486
1488 1487 #Set constants
1489 1488 constants = self.dataOut.library.setConstants(self.dataIn)
1490 1489 self.dataOut.constants = constants
1491 1490 M = self.dataIn.normFactor
1492 1491 N = self.dataIn.nFFTPoints
1493 1492 ippSeconds = self.dataIn.ippSeconds
1494 1493 K = self.dataIn.nIncohInt
1495 1494 pairsArray = numpy.array(self.dataIn.pairsList)
1496 1495
1497 1496 #List of possible combinations
1498 1497 listComb = itertools.combinations(numpy.arange(groupArray.shape[1]),2)
1499 1498 indCross = numpy.zeros(len(list(listComb)), dtype = 'int')
1500 1499
1501 1500 if getSNR:
1502 1501 listChannels = groupArray.reshape((groupArray.size))
1503 1502 listChannels.sort()
1504 1503 noise = self.dataIn.getNoise()
1505 1504 self.dataOut.data_snr = self.__getSNR(self.dataIn.data_spc[listChannels,:,:], noise[listChannels])
1506 1505
1507 1506 for i in range(nGroups):
1508 1507 coord = groupArray[i,:]
1509 1508
1510 1509 #Input data array
1511 1510 data = self.dataIn.data_spc[coord,:,:]/(M*N)
1512 1511 data = data.reshape((data.shape[0]*data.shape[1],data.shape[2]))
1513 1512
1514 1513 #Cross Spectra data array for Covariance Matrixes
1515 1514 ind = 0
1516 1515 for pairs in listComb:
1517 1516 pairsSel = numpy.array([coord[x],coord[y]])
1518 1517 indCross[ind] = int(numpy.where(numpy.all(pairsArray == pairsSel, axis = 1))[0][0])
1519 1518 ind += 1
1520 1519 dataCross = self.dataIn.data_cspc[indCross,:,:]/(M*N)
1521 1520 dataCross = dataCross**2/K
1522 1521
1523 1522 for h in range(nHeights):
1524 1523
1525 1524 #Input
1526 1525 d = data[:,h]
1527 1526
1528 1527 #Covariance Matrix
1529 1528 D = numpy.diag(d**2/K)
1530 1529 ind = 0
1531 1530 for pairs in listComb:
1532 1531 #Coordinates in Covariance Matrix
1533 1532 x = pairs[0]
1534 1533 y = pairs[1]
1535 1534 #Channel Index
1536 1535 S12 = dataCross[ind,:,h]
1537 1536 D12 = numpy.diag(S12)
1538 1537 #Completing Covariance Matrix with Cross Spectras
1539 1538 D[x*N:(x+1)*N,y*N:(y+1)*N] = D12
1540 1539 D[y*N:(y+1)*N,x*N:(x+1)*N] = D12
1541 1540 ind += 1
1542 1541 Dinv=numpy.linalg.inv(D)
1543 1542 L=numpy.linalg.cholesky(Dinv)
1544 1543 LT=L.T
1545 1544
1546 1545 dp = numpy.dot(LT,d)
1547 1546
1548 1547 #Initial values
1549 1548 data_spc = self.dataIn.data_spc[coord,:,h]
1550 1549
1551 1550 if (h>0)and(error1[3]<5):
1552 1551 p0 = self.dataOut.data_param[i,:,h-1]
1553 1552 else:
1554 1553 p0 = numpy.array(self.dataOut.library.initialValuesFunction(data_spc, constants, i))
1555 1554
1556 1555 try:
1557 1556 #Least Squares
1558 1557 minp,covp,infodict,mesg,ier = optimize.leastsq(self.__residFunction,p0,args=(dp,LT,constants),full_output=True)
1559 1558 # minp,covp = optimize.leastsq(self.__residFunction,p0,args=(dp,LT,constants))
1560 1559 #Chi square error
1561 1560 error0 = numpy.sum(infodict['fvec']**2)/(2*N)
1562 1561 #Error with Jacobian
1563 1562 error1 = self.dataOut.library.errorFunction(minp,constants,LT)
1564 1563 except:
1565 1564 minp = p0*numpy.nan
1566 1565 error0 = numpy.nan
1567 1566 error1 = p0*numpy.nan
1568 1567
1569 1568 #Save
1570 1569 if self.dataOut.data_param is None:
1571 1570 self.dataOut.data_param = numpy.zeros((nGroups, p0.size, nHeights))*numpy.nan
1572 1571 self.dataOut.data_error = numpy.zeros((nGroups, p0.size + 1, nHeights))*numpy.nan
1573 1572
1574 1573 self.dataOut.data_error[i,:,h] = numpy.hstack((error0,error1))
1575 1574 self.dataOut.data_param[i,:,h] = minp
1576 1575 return
1577 1576
1578 1577 def __residFunction(self, p, dp, LT, constants):
1579 1578
1580 1579 fm = self.dataOut.library.modelFunction(p, constants)
1581 1580 fmp=numpy.dot(LT,fm)
1582 1581
1583 1582 return dp-fmp
1584 1583
1585 1584 def __getSNR(self, z, noise):
1586 1585
1587 1586 avg = numpy.average(z, axis=1)
1588 1587 SNR = (avg.T-noise)/noise
1589 1588 SNR = SNR.T
1590 1589 return SNR
1591 1590
1592 1591 def __chisq(p,chindex,hindex):
1593 1592 #similar to Resid but calculates CHI**2
1594 1593 [LT,d,fm]=setupLTdfm(p,chindex,hindex)
1595 1594 dp=numpy.dot(LT,d)
1596 1595 fmp=numpy.dot(LT,fm)
1597 1596 chisq=numpy.dot((dp-fmp).T,(dp-fmp))
1598 1597 return chisq
1599 1598
1600 1599 class WindProfiler(Operation):
1601 1600
1602 1601 __isConfig = False
1603 1602
1604 1603 __initime = None
1605 1604 __lastdatatime = None
1606 1605 __integrationtime = None
1607 1606
1608 1607 __buffer = None
1609 1608
1610 1609 __dataReady = False
1611 1610
1612 1611 __firstdata = None
1613 1612
1614 1613 n = None
1615 1614
1616 1615 def __init__(self):
1617 1616 Operation.__init__(self)
1618 1617
1619 1618 def __calculateCosDir(self, elev, azim):
1620 1619 zen = (90 - elev)*numpy.pi/180
1621 1620 azim = azim*numpy.pi/180
1622 1621 cosDirX = numpy.sqrt((1-numpy.cos(zen)**2)/((1+numpy.tan(azim)**2)))
1623 1622 cosDirY = numpy.sqrt(1-numpy.cos(zen)**2-cosDirX**2)
1624 1623
1625 1624 signX = numpy.sign(numpy.cos(azim))
1626 1625 signY = numpy.sign(numpy.sin(azim))
1627 1626
1628 1627 cosDirX = numpy.copysign(cosDirX, signX)
1629 1628 cosDirY = numpy.copysign(cosDirY, signY)
1630 1629 return cosDirX, cosDirY
1631 1630
1632 1631 def __calculateAngles(self, theta_x, theta_y, azimuth):
1633 1632
1634 1633 dir_cosw = numpy.sqrt(1-theta_x**2-theta_y**2)
1635 1634 zenith_arr = numpy.arccos(dir_cosw)
1636 1635 azimuth_arr = numpy.arctan2(theta_x,theta_y) + azimuth*math.pi/180
1637 1636
1638 1637 dir_cosu = numpy.sin(azimuth_arr)*numpy.sin(zenith_arr)
1639 1638 dir_cosv = numpy.cos(azimuth_arr)*numpy.sin(zenith_arr)
1640 1639
1641 1640 return azimuth_arr, zenith_arr, dir_cosu, dir_cosv, dir_cosw
1642 1641
1643 1642 def __calculateMatA(self, dir_cosu, dir_cosv, dir_cosw, horOnly):
1644 1643
1645 1644 #
1646 1645 if horOnly:
1647 1646 A = numpy.c_[dir_cosu,dir_cosv]
1648 1647 else:
1649 1648 A = numpy.c_[dir_cosu,dir_cosv,dir_cosw]
1650 1649 A = numpy.asmatrix(A)
1651 1650 A1 = numpy.linalg.inv(A.transpose()*A)*A.transpose()
1652 1651
1653 1652 return A1
1654 1653
1655 1654 def __correctValues(self, heiRang, phi, velRadial, SNR):
1656 1655 listPhi = phi.tolist()
1657 1656 maxid = listPhi.index(max(listPhi))
1658 1657 minid = listPhi.index(min(listPhi))
1659 1658
1660 1659 rango = list(range(len(phi)))
1661 1660 # rango = numpy.delete(rango,maxid)
1662 1661
1663 1662 heiRang1 = heiRang*math.cos(phi[maxid])
1664 1663 heiRangAux = heiRang*math.cos(phi[minid])
1665 1664 indOut = (heiRang1 < heiRangAux[0]).nonzero()
1666 1665 heiRang1 = numpy.delete(heiRang1,indOut)
1667 1666
1668 1667 velRadial1 = numpy.zeros([len(phi),len(heiRang1)])
1669 1668 SNR1 = numpy.zeros([len(phi),len(heiRang1)])
1670 1669
1671 1670 for i in rango:
1672 1671 x = heiRang*math.cos(phi[i])
1673 1672 y1 = velRadial[i,:]
1674 1673 f1 = interpolate.interp1d(x,y1,kind = 'cubic')
1675 1674
1676 1675 x1 = heiRang1
1677 1676 y11 = f1(x1)
1678 1677
1679 1678 y2 = SNR[i,:]
1680 1679 f2 = interpolate.interp1d(x,y2,kind = 'cubic')
1681 1680 y21 = f2(x1)
1682 1681
1683 1682 velRadial1[i,:] = y11
1684 1683 SNR1[i,:] = y21
1685 1684
1686 1685 return heiRang1, velRadial1, SNR1
1687 1686
1688 1687 def __calculateVelUVW(self, A, velRadial):
1689 1688
1690 1689 #Operacion Matricial
1691 1690 # velUVW = numpy.zeros((velRadial.shape[1],3))
1692 1691 # for ind in range(velRadial.shape[1]):
1693 1692 # velUVW[ind,:] = numpy.dot(A,velRadial[:,ind])
1694 1693 # velUVW = velUVW.transpose()
1695 1694 velUVW = numpy.zeros((A.shape[0],velRadial.shape[1]))
1696 1695 velUVW[:,:] = numpy.dot(A,velRadial)
1697 1696
1698 1697
1699 1698 return velUVW
1700 1699
1701 1700 # def techniqueDBS(self, velRadial0, dirCosx, disrCosy, azimuth, correct, horizontalOnly, heiRang, SNR0):
1702 1701
1703 1702 def techniqueDBS(self, kwargs):
1704 1703 """
1705 1704 Function that implements Doppler Beam Swinging (DBS) technique.
1706 1705
1707 1706 Input: Radial velocities, Direction cosines (x and y) of the Beam, Antenna azimuth,
1708 1707 Direction correction (if necessary), Ranges and SNR
1709 1708
1710 1709 Output: Winds estimation (Zonal, Meridional and Vertical)
1711 1710
1712 1711 Parameters affected: Winds, height range, SNR
1713 1712 """
1714 1713 velRadial0 = kwargs['velRadial']
1715 1714 heiRang = kwargs['heightList']
1716 1715 SNR0 = kwargs['SNR']
1717 1716
1718 1717 if 'dirCosx' in kwargs and 'dirCosy' in kwargs:
1719 1718 theta_x = numpy.array(kwargs['dirCosx'])
1720 1719 theta_y = numpy.array(kwargs['dirCosy'])
1721 1720 else:
1722 1721 elev = numpy.array(kwargs['elevation'])
1723 1722 azim = numpy.array(kwargs['azimuth'])
1724 1723 theta_x, theta_y = self.__calculateCosDir(elev, azim)
1725 1724 azimuth = kwargs['correctAzimuth']
1726 1725 if 'horizontalOnly' in kwargs:
1727 1726 horizontalOnly = kwargs['horizontalOnly']
1728 1727 else: horizontalOnly = False
1729 1728 if 'correctFactor' in kwargs:
1730 1729 correctFactor = kwargs['correctFactor']
1731 1730 else: correctFactor = 1
1732 1731 if 'channelList' in kwargs:
1733 1732 channelList = kwargs['channelList']
1734 1733 if len(channelList) == 2:
1735 1734 horizontalOnly = True
1736 1735 arrayChannel = numpy.array(channelList)
1737 1736 param = param[arrayChannel,:,:]
1738 1737 theta_x = theta_x[arrayChannel]
1739 1738 theta_y = theta_y[arrayChannel]
1740 1739
1741 1740 azimuth_arr, zenith_arr, dir_cosu, dir_cosv, dir_cosw = self.__calculateAngles(theta_x, theta_y, azimuth)
1742 1741 heiRang1, velRadial1, SNR1 = self.__correctValues(heiRang, zenith_arr, correctFactor*velRadial0, SNR0)
1743 1742 A = self.__calculateMatA(dir_cosu, dir_cosv, dir_cosw, horizontalOnly)
1744 1743
1745 1744 #Calculo de Componentes de la velocidad con DBS
1746 1745 winds = self.__calculateVelUVW(A,velRadial1)
1747 1746
1748 1747 return winds, heiRang1, SNR1
1749 1748
1750 1749 def __calculateDistance(self, posx, posy, pairs_ccf, azimuth = None):
1751 1750
1752 1751 nPairs = len(pairs_ccf)
1753 1752 posx = numpy.asarray(posx)
1754 1753 posy = numpy.asarray(posy)
1755 1754
1756 1755 #Rotacion Inversa para alinear con el azimuth
1757 1756 if azimuth!= None:
1758 1757 azimuth = azimuth*math.pi/180
1759 1758 posx1 = posx*math.cos(azimuth) + posy*math.sin(azimuth)
1760 1759 posy1 = -posx*math.sin(azimuth) + posy*math.cos(azimuth)
1761 1760 else:
1762 1761 posx1 = posx
1763 1762 posy1 = posy
1764 1763
1765 1764 #Calculo de Distancias
1766 1765 distx = numpy.zeros(nPairs)
1767 1766 disty = numpy.zeros(nPairs)
1768 1767 dist = numpy.zeros(nPairs)
1769 1768 ang = numpy.zeros(nPairs)
1770 1769
1771 1770 for i in range(nPairs):
1772 1771 distx[i] = posx1[pairs_ccf[i][1]] - posx1[pairs_ccf[i][0]]
1773 1772 disty[i] = posy1[pairs_ccf[i][1]] - posy1[pairs_ccf[i][0]]
1774 1773 dist[i] = numpy.sqrt(distx[i]**2 + disty[i]**2)
1775 1774 ang[i] = numpy.arctan2(disty[i],distx[i])
1776 1775
1777 1776 return distx, disty, dist, ang
1778 1777 #Calculo de Matrices
1779 1778 # nPairs = len(pairs)
1780 1779 # ang1 = numpy.zeros((nPairs, 2, 1))
1781 1780 # dist1 = numpy.zeros((nPairs, 2, 1))
1782 1781 #
1783 1782 # for j in range(nPairs):
1784 1783 # dist1[j,0,0] = dist[pairs[j][0]]
1785 1784 # dist1[j,1,0] = dist[pairs[j][1]]
1786 1785 # ang1[j,0,0] = ang[pairs[j][0]]
1787 1786 # ang1[j,1,0] = ang[pairs[j][1]]
1788 1787 #
1789 1788 # return distx,disty, dist1,ang1
1790 1789
1791 1790
1792 1791 def __calculateVelVer(self, phase, lagTRange, _lambda):
1793 1792
1794 1793 Ts = lagTRange[1] - lagTRange[0]
1795 1794 velW = -_lambda*phase/(4*math.pi*Ts)
1796 1795
1797 1796 return velW
1798 1797
1799 1798 def __calculateVelHorDir(self, dist, tau1, tau2, ang):
1800 1799 nPairs = tau1.shape[0]
1801 1800 nHeights = tau1.shape[1]
1802 1801 vel = numpy.zeros((nPairs,3,nHeights))
1803 1802 dist1 = numpy.reshape(dist, (dist.size,1))
1804 1803
1805 1804 angCos = numpy.cos(ang)
1806 1805 angSin = numpy.sin(ang)
1807 1806
1808 1807 vel0 = dist1*tau1/(2*tau2**2)
1809 1808 vel[:,0,:] = (vel0*angCos).sum(axis = 1)
1810 1809 vel[:,1,:] = (vel0*angSin).sum(axis = 1)
1811 1810
1812 1811 ind = numpy.where(numpy.isinf(vel))
1813 1812 vel[ind] = numpy.nan
1814 1813
1815 1814 return vel
1816 1815
1817 1816 # def __getPairsAutoCorr(self, pairsList, nChannels):
1818 1817 #
1819 1818 # pairsAutoCorr = numpy.zeros(nChannels, dtype = 'int')*numpy.nan
1820 1819 #
1821 1820 # for l in range(len(pairsList)):
1822 1821 # firstChannel = pairsList[l][0]
1823 1822 # secondChannel = pairsList[l][1]
1824 1823 #
1825 1824 # #Obteniendo pares de Autocorrelacion
1826 1825 # if firstChannel == secondChannel:
1827 1826 # pairsAutoCorr[firstChannel] = int(l)
1828 1827 #
1829 1828 # pairsAutoCorr = pairsAutoCorr.astype(int)
1830 1829 #
1831 1830 # pairsCrossCorr = range(len(pairsList))
1832 1831 # pairsCrossCorr = numpy.delete(pairsCrossCorr,pairsAutoCorr)
1833 1832 #
1834 1833 # return pairsAutoCorr, pairsCrossCorr
1835 1834
1836 1835 # def techniqueSA(self, pairsSelected, pairsList, nChannels, tau, azimuth, _lambda, position_x, position_y, lagTRange, correctFactor):
1837 1836 def techniqueSA(self, kwargs):
1838 1837
1839 1838 """
1840 1839 Function that implements Spaced Antenna (SA) technique.
1841 1840
1842 1841 Input: Radial velocities, Direction cosines (x and y) of the Beam, Antenna azimuth,
1843 1842 Direction correction (if necessary), Ranges and SNR
1844 1843
1845 1844 Output: Winds estimation (Zonal, Meridional and Vertical)
1846 1845
1847 1846 Parameters affected: Winds
1848 1847 """
1849 1848 position_x = kwargs['positionX']
1850 1849 position_y = kwargs['positionY']
1851 1850 azimuth = kwargs['azimuth']
1852 1851
1853 1852 if 'correctFactor' in kwargs:
1854 1853 correctFactor = kwargs['correctFactor']
1855 1854 else:
1856 1855 correctFactor = 1
1857 1856
1858 1857 groupList = kwargs['groupList']
1859 1858 pairs_ccf = groupList[1]
1860 1859 tau = kwargs['tau']
1861 1860 _lambda = kwargs['_lambda']
1862 1861
1863 1862 #Cross Correlation pairs obtained
1864 1863 # pairsAutoCorr, pairsCrossCorr = self.__getPairsAutoCorr(pairssList, nChannels)
1865 1864 # pairsArray = numpy.array(pairsList)[pairsCrossCorr]
1866 1865 # pairsSelArray = numpy.array(pairsSelected)
1867 1866 # pairs = []
1868 1867 #
1869 1868 # #Wind estimation pairs obtained
1870 1869 # for i in range(pairsSelArray.shape[0]/2):
1871 1870 # ind1 = numpy.where(numpy.all(pairsArray == pairsSelArray[2*i], axis = 1))[0][0]
1872 1871 # ind2 = numpy.where(numpy.all(pairsArray == pairsSelArray[2*i + 1], axis = 1))[0][0]
1873 1872 # pairs.append((ind1,ind2))
1874 1873
1875 1874 indtau = tau.shape[0]/2
1876 1875 tau1 = tau[:indtau,:]
1877 1876 tau2 = tau[indtau:-1,:]
1878 1877 # tau1 = tau1[pairs,:]
1879 1878 # tau2 = tau2[pairs,:]
1880 1879 phase1 = tau[-1,:]
1881 1880
1882 1881 #---------------------------------------------------------------------
1883 1882 #Metodo Directo
1884 1883 distx, disty, dist, ang = self.__calculateDistance(position_x, position_y, pairs_ccf,azimuth)
1885 1884 winds = self.__calculateVelHorDir(dist, tau1, tau2, ang)
1886 1885 winds = stats.nanmean(winds, axis=0)
1887 1886 #---------------------------------------------------------------------
1888 1887 #Metodo General
1889 1888 # distx, disty, dist = self.calculateDistance(position_x,position_y,pairsCrossCorr, pairsList, azimuth)
1890 1889 # #Calculo Coeficientes de Funcion de Correlacion
1891 1890 # F,G,A,B,H = self.calculateCoef(tau1,tau2,distx,disty,n)
1892 1891 # #Calculo de Velocidades
1893 1892 # winds = self.calculateVelUV(F,G,A,B,H)
1894 1893
1895 1894 #---------------------------------------------------------------------
1896 1895 winds[2,:] = self.__calculateVelVer(phase1, lagTRange, _lambda)
1897 1896 winds = correctFactor*winds
1898 1897 return winds
1899 1898
1900 1899 def __checkTime(self, currentTime, paramInterval, outputInterval):
1901 1900
1902 1901 dataTime = currentTime + paramInterval
1903 1902 deltaTime = dataTime - self.__initime
1904 1903
1905 1904 if deltaTime >= outputInterval or deltaTime < 0:
1906 1905 self.__dataReady = True
1907 1906 return
1908 1907
1909 1908 def techniqueMeteors(self, arrayMeteor, meteorThresh, heightMin, heightMax):
1910 1909 '''
1911 1910 Function that implements winds estimation technique with detected meteors.
1912 1911
1913 1912 Input: Detected meteors, Minimum meteor quantity to wind estimation
1914 1913
1915 1914 Output: Winds estimation (Zonal and Meridional)
1916 1915
1917 1916 Parameters affected: Winds
1918 1917 '''
1919 1918 #Settings
1920 1919 nInt = (heightMax - heightMin)/2
1921 1920 nInt = int(nInt)
1922 1921 winds = numpy.zeros((2,nInt))*numpy.nan
1923 1922
1924 1923 #Filter errors
1925 1924 error = numpy.where(arrayMeteor[:,-1] == 0)[0]
1926 1925 finalMeteor = arrayMeteor[error,:]
1927 1926
1928 1927 #Meteor Histogram
1929 1928 finalHeights = finalMeteor[:,2]
1930 1929 hist = numpy.histogram(finalHeights, bins = nInt, range = (heightMin,heightMax))
1931 1930 nMeteorsPerI = hist[0]
1932 1931 heightPerI = hist[1]
1933 1932
1934 1933 #Sort of meteors
1935 1934 indSort = finalHeights.argsort()
1936 1935 finalMeteor2 = finalMeteor[indSort,:]
1937 1936
1938 1937 # Calculating winds
1939 1938 ind1 = 0
1940 1939 ind2 = 0
1941 1940
1942 1941 for i in range(nInt):
1943 1942 nMet = nMeteorsPerI[i]
1944 1943 ind1 = ind2
1945 1944 ind2 = ind1 + nMet
1946 1945
1947 1946 meteorAux = finalMeteor2[ind1:ind2,:]
1948 1947
1949 1948 if meteorAux.shape[0] >= meteorThresh:
1950 1949 vel = meteorAux[:, 6]
1951 1950 zen = meteorAux[:, 4]*numpy.pi/180
1952 1951 azim = meteorAux[:, 3]*numpy.pi/180
1953 1952
1954 1953 n = numpy.cos(zen)
1955 1954 # m = (1 - n**2)/(1 - numpy.tan(azim)**2)
1956 1955 # l = m*numpy.tan(azim)
1957 1956 l = numpy.sin(zen)*numpy.sin(azim)
1958 1957 m = numpy.sin(zen)*numpy.cos(azim)
1959 1958
1960 1959 A = numpy.vstack((l, m)).transpose()
1961 1960 A1 = numpy.dot(numpy.linalg.inv( numpy.dot(A.transpose(),A) ),A.transpose())
1962 1961 windsAux = numpy.dot(A1, vel)
1963 1962
1964 1963 winds[0,i] = windsAux[0]
1965 1964 winds[1,i] = windsAux[1]
1966 1965
1967 1966 return winds, heightPerI[:-1]
1968 1967
1969 1968 def techniqueNSM_SA(self, **kwargs):
1970 1969 metArray = kwargs['metArray']
1971 1970 heightList = kwargs['heightList']
1972 1971 timeList = kwargs['timeList']
1973 1972
1974 1973 rx_location = kwargs['rx_location']
1975 1974 groupList = kwargs['groupList']
1976 1975 azimuth = kwargs['azimuth']
1977 1976 dfactor = kwargs['dfactor']
1978 1977 k = kwargs['k']
1979 1978
1980 1979 azimuth1, dist = self.__calculateAzimuth1(rx_location, groupList, azimuth)
1981 1980 d = dist*dfactor
1982 1981 #Phase calculation
1983 1982 metArray1 = self.__getPhaseSlope(metArray, heightList, timeList)
1984 1983
1985 1984 metArray1[:,-2] = metArray1[:,-2]*metArray1[:,2]*1000/(k*d[metArray1[:,1].astype(int)]) #angles into velocities
1986 1985
1987 1986 velEst = numpy.zeros((heightList.size,2))*numpy.nan
1988 1987 azimuth1 = azimuth1*numpy.pi/180
1989 1988
1990 1989 for i in range(heightList.size):
1991 1990 h = heightList[i]
1992 1991 indH = numpy.where((metArray1[:,2] == h)&(numpy.abs(metArray1[:,-2]) < 100))[0]
1993 1992 metHeight = metArray1[indH,:]
1994 1993 if metHeight.shape[0] >= 2:
1995 1994 velAux = numpy.asmatrix(metHeight[:,-2]).T #Radial Velocities
1996 1995 iazim = metHeight[:,1].astype(int)
1997 1996 azimAux = numpy.asmatrix(azimuth1[iazim]).T #Azimuths
1998 1997 A = numpy.hstack((numpy.cos(azimAux),numpy.sin(azimAux)))
1999 1998 A = numpy.asmatrix(A)
2000 1999 A1 = numpy.linalg.pinv(A.transpose()*A)*A.transpose()
2001 2000 velHor = numpy.dot(A1,velAux)
2002 2001
2003 2002 velEst[i,:] = numpy.squeeze(velHor)
2004 2003 return velEst
2005 2004
2006 2005 def __getPhaseSlope(self, metArray, heightList, timeList):
2007 2006 meteorList = []
2008 2007 #utctime sec1 height SNR velRad ph0 ph1 ph2 coh0 coh1 coh2
2009 2008 #Putting back together the meteor matrix
2010 2009 utctime = metArray[:,0]
2011 2010 uniqueTime = numpy.unique(utctime)
2012 2011
2013 2012 phaseDerThresh = 0.5
2014 2013 ippSeconds = timeList[1] - timeList[0]
2015 2014 sec = numpy.where(timeList>1)[0][0]
2016 2015 nPairs = metArray.shape[1] - 6
2017 2016 nHeights = len(heightList)
2018 2017
2019 2018 for t in uniqueTime:
2020 2019 metArray1 = metArray[utctime==t,:]
2021 2020 # phaseDerThresh = numpy.pi/4 #reducir Phase thresh
2022 2021 tmet = metArray1[:,1].astype(int)
2023 2022 hmet = metArray1[:,2].astype(int)
2024 2023
2025 2024 metPhase = numpy.zeros((nPairs, heightList.size, timeList.size - 1))
2026 2025 metPhase[:,:] = numpy.nan
2027 2026 metPhase[:,hmet,tmet] = metArray1[:,6:].T
2028 2027
2029 2028 #Delete short trails
2030 2029 metBool = ~numpy.isnan(metPhase[0,:,:])
2031 2030 heightVect = numpy.sum(metBool, axis = 1)
2032 2031 metBool[heightVect<sec,:] = False
2033 2032 metPhase[:,heightVect<sec,:] = numpy.nan
2034 2033
2035 2034 #Derivative
2036 2035 metDer = numpy.abs(metPhase[:,:,1:] - metPhase[:,:,:-1])
2037 2036 phDerAux = numpy.dstack((numpy.full((nPairs,nHeights,1), False, dtype=bool),metDer > phaseDerThresh))
2038 2037 metPhase[phDerAux] = numpy.nan
2039 2038
2040 2039 #--------------------------METEOR DETECTION -----------------------------------------
2041 2040 indMet = numpy.where(numpy.any(metBool,axis=1))[0]
2042 2041
2043 2042 for p in numpy.arange(nPairs):
2044 2043 phase = metPhase[p,:,:]
2045 2044 phDer = metDer[p,:,:]
2046 2045
2047 2046 for h in indMet:
2048 2047 height = heightList[h]
2049 2048 phase1 = phase[h,:] #82
2050 2049 phDer1 = phDer[h,:]
2051 2050
2052 2051 phase1[~numpy.isnan(phase1)] = numpy.unwrap(phase1[~numpy.isnan(phase1)]) #Unwrap
2053 2052
2054 2053 indValid = numpy.where(~numpy.isnan(phase1))[0]
2055 2054 initMet = indValid[0]
2056 2055 endMet = 0
2057 2056
2058 2057 for i in range(len(indValid)-1):
2059 2058
2060 2059 #Time difference
2061 2060 inow = indValid[i]
2062 2061 inext = indValid[i+1]
2063 2062 idiff = inext - inow
2064 2063 #Phase difference
2065 2064 phDiff = numpy.abs(phase1[inext] - phase1[inow])
2066 2065
2067 2066 if idiff>sec or phDiff>numpy.pi/4 or inext==indValid[-1]: #End of Meteor
2068 2067 sizeTrail = inow - initMet + 1
2069 2068 if sizeTrail>3*sec: #Too short meteors
2070 2069 x = numpy.arange(initMet,inow+1)*ippSeconds
2071 2070 y = phase1[initMet:inow+1]
2072 2071 ynnan = ~numpy.isnan(y)
2073 2072 x = x[ynnan]
2074 2073 y = y[ynnan]
2075 2074 slope, intercept, r_value, p_value, std_err = stats.linregress(x,y)
2076 2075 ylin = x*slope + intercept
2077 2076 rsq = r_value**2
2078 2077 if rsq > 0.5:
2079 2078 vel = slope#*height*1000/(k*d)
2080 2079 estAux = numpy.array([utctime,p,height, vel, rsq])
2081 2080 meteorList.append(estAux)
2082 2081 initMet = inext
2083 2082 metArray2 = numpy.array(meteorList)
2084 2083
2085 2084 return metArray2
2086 2085
2087 2086 def __calculateAzimuth1(self, rx_location, pairslist, azimuth0):
2088 2087
2089 2088 azimuth1 = numpy.zeros(len(pairslist))
2090 2089 dist = numpy.zeros(len(pairslist))
2091 2090
2092 2091 for i in range(len(rx_location)):
2093 2092 ch0 = pairslist[i][0]
2094 2093 ch1 = pairslist[i][1]
2095 2094
2096 2095 diffX = rx_location[ch0][0] - rx_location[ch1][0]
2097 2096 diffY = rx_location[ch0][1] - rx_location[ch1][1]
2098 2097 azimuth1[i] = numpy.arctan2(diffY,diffX)*180/numpy.pi
2099 2098 dist[i] = numpy.sqrt(diffX**2 + diffY**2)
2100 2099
2101 2100 azimuth1 -= azimuth0
2102 2101 return azimuth1, dist
2103 2102
2104 2103 def techniqueNSM_DBS(self, **kwargs):
2105 2104 metArray = kwargs['metArray']
2106 2105 heightList = kwargs['heightList']
2107 2106 timeList = kwargs['timeList']
2108 2107 azimuth = kwargs['azimuth']
2109 2108 theta_x = numpy.array(kwargs['theta_x'])
2110 2109 theta_y = numpy.array(kwargs['theta_y'])
2111 2110
2112 2111 utctime = metArray[:,0]
2113 2112 cmet = metArray[:,1].astype(int)
2114 2113 hmet = metArray[:,3].astype(int)
2115 2114 SNRmet = metArray[:,4]
2116 2115 vmet = metArray[:,5]
2117 2116 spcmet = metArray[:,6]
2118 2117
2119 2118 nChan = numpy.max(cmet) + 1
2120 2119 nHeights = len(heightList)
2121 2120
2122 2121 azimuth_arr, zenith_arr, dir_cosu, dir_cosv, dir_cosw = self.__calculateAngles(theta_x, theta_y, azimuth)
2123 2122 hmet = heightList[hmet]
2124 2123 h1met = hmet*numpy.cos(zenith_arr[cmet]) #Corrected heights
2125 2124
2126 2125 velEst = numpy.zeros((heightList.size,2))*numpy.nan
2127 2126
2128 2127 for i in range(nHeights - 1):
2129 2128 hmin = heightList[i]
2130 2129 hmax = heightList[i + 1]
2131 2130
2132 2131 thisH = (h1met>=hmin) & (h1met<hmax) & (cmet!=2) & (SNRmet>8) & (vmet<50) & (spcmet<10)
2133 2132 indthisH = numpy.where(thisH)
2134 2133
2135 2134 if numpy.size(indthisH) > 3:
2136 2135
2137 2136 vel_aux = vmet[thisH]
2138 2137 chan_aux = cmet[thisH]
2139 2138 cosu_aux = dir_cosu[chan_aux]
2140 2139 cosv_aux = dir_cosv[chan_aux]
2141 2140 cosw_aux = dir_cosw[chan_aux]
2142 2141
2143 2142 nch = numpy.size(numpy.unique(chan_aux))
2144 2143 if nch > 1:
2145 2144 A = self.__calculateMatA(cosu_aux, cosv_aux, cosw_aux, True)
2146 2145 velEst[i,:] = numpy.dot(A,vel_aux)
2147 2146
2148 2147 return velEst
2149 2148
2150 2149 def run(self, dataOut, technique, nHours=1, hmin=70, hmax=110, **kwargs):
2151 2150
2152 2151 param = dataOut.data_param
2153 2152 if dataOut.abscissaList.any():
2154 2153 #if dataOut.abscissaList != None:
2155 2154 absc = dataOut.abscissaList[:-1]
2156 2155 # noise = dataOut.noise
2157 2156 heightList = dataOut.heightList
2158 2157 SNR = dataOut.data_snr
2159 2158
2160 2159 if technique == 'DBS':
2161 2160
2162 2161 kwargs['velRadial'] = param[:,1,:] #Radial velocity
2163 2162 kwargs['heightList'] = heightList
2164 2163 kwargs['SNR'] = SNR
2165 2164
2166 2165 dataOut.data_output, dataOut.heightList, dataOut.data_snr = self.techniqueDBS(kwargs) #DBS Function
2167 2166 dataOut.utctimeInit = dataOut.utctime
2168 2167 dataOut.outputInterval = dataOut.paramInterval
2169 2168
2170 2169 elif technique == 'SA':
2171 2170
2172 2171 #Parameters
2173 2172 # position_x = kwargs['positionX']
2174 2173 # position_y = kwargs['positionY']
2175 2174 # azimuth = kwargs['azimuth']
2176 2175 #
2177 2176 # if kwargs.has_key('crosspairsList'):
2178 2177 # pairs = kwargs['crosspairsList']
2179 2178 # else:
2180 2179 # pairs = None
2181 2180 #
2182 2181 # if kwargs.has_key('correctFactor'):
2183 2182 # correctFactor = kwargs['correctFactor']
2184 2183 # else:
2185 2184 # correctFactor = 1
2186 2185
2187 2186 # tau = dataOut.data_param
2188 2187 # _lambda = dataOut.C/dataOut.frequency
2189 2188 # pairsList = dataOut.groupList
2190 2189 # nChannels = dataOut.nChannels
2191 2190
2192 2191 kwargs['groupList'] = dataOut.groupList
2193 2192 kwargs['tau'] = dataOut.data_param
2194 2193 kwargs['_lambda'] = dataOut.C/dataOut.frequency
2195 2194 # dataOut.data_output = self.techniqueSA(pairs, pairsList, nChannels, tau, azimuth, _lambda, position_x, position_y, absc, correctFactor)
2196 2195 dataOut.data_output = self.techniqueSA(kwargs)
2197 2196 dataOut.utctimeInit = dataOut.utctime
2198 2197 dataOut.outputInterval = dataOut.timeInterval
2199 2198
2200 2199 elif technique == 'Meteors':
2201 2200 dataOut.flagNoData = True
2202 2201 self.__dataReady = False
2203 2202
2204 2203 if 'nHours' in kwargs:
2205 2204 nHours = kwargs['nHours']
2206 2205 else:
2207 2206 nHours = 1
2208 2207
2209 2208 if 'meteorsPerBin' in kwargs:
2210 2209 meteorThresh = kwargs['meteorsPerBin']
2211 2210 else:
2212 2211 meteorThresh = 6
2213 2212
2214 2213 if 'hmin' in kwargs:
2215 2214 hmin = kwargs['hmin']
2216 2215 else: hmin = 70
2217 2216 if 'hmax' in kwargs:
2218 2217 hmax = kwargs['hmax']
2219 2218 else: hmax = 110
2220 2219
2221 2220 dataOut.outputInterval = nHours*3600
2222 2221
2223 2222 if self.__isConfig == False:
2224 2223 # self.__initime = dataOut.datatime.replace(minute = 0, second = 0, microsecond = 03)
2225 2224 #Get Initial LTC time
2226 2225 self.__initime = datetime.datetime.utcfromtimestamp(dataOut.utctime)
2227 2226 self.__initime = (self.__initime.replace(minute = 0, second = 0, microsecond = 0) - datetime.datetime(1970, 1, 1)).total_seconds()
2228 2227
2229 2228 self.__isConfig = True
2230 2229
2231 2230 if self.__buffer is None:
2232 2231 self.__buffer = dataOut.data_param
2233 2232 self.__firstdata = copy.copy(dataOut)
2234 2233
2235 2234 else:
2236 2235 self.__buffer = numpy.vstack((self.__buffer, dataOut.data_param))
2237 2236
2238 2237 self.__checkTime(dataOut.utctime, dataOut.paramInterval, dataOut.outputInterval) #Check if the buffer is ready
2239 2238
2240 2239 if self.__dataReady:
2241 2240 dataOut.utctimeInit = self.__initime
2242 2241
2243 2242 self.__initime += dataOut.outputInterval #to erase time offset
2244 2243
2245 2244 dataOut.data_output, dataOut.heightList = self.techniqueMeteors(self.__buffer, meteorThresh, hmin, hmax)
2246 2245 dataOut.flagNoData = False
2247 2246 self.__buffer = None
2248 2247
2249 2248 elif technique == 'Meteors1':
2250 2249 dataOut.flagNoData = True
2251 2250 self.__dataReady = False
2252 2251
2253 2252 if 'nMins' in kwargs:
2254 2253 nMins = kwargs['nMins']
2255 2254 else: nMins = 20
2256 2255 if 'rx_location' in kwargs:
2257 2256 rx_location = kwargs['rx_location']
2258 2257 else: rx_location = [(0,1),(1,1),(1,0)]
2259 2258 if 'azimuth' in kwargs:
2260 2259 azimuth = kwargs['azimuth']
2261 2260 else: azimuth = 51.06
2262 2261 if 'dfactor' in kwargs:
2263 2262 dfactor = kwargs['dfactor']
2264 2263 if 'mode' in kwargs:
2265 2264 mode = kwargs['mode']
2266 2265 if 'theta_x' in kwargs:
2267 2266 theta_x = kwargs['theta_x']
2268 2267 if 'theta_y' in kwargs:
2269 2268 theta_y = kwargs['theta_y']
2270 2269 else: mode = 'SA'
2271 2270
2272 2271 #Borrar luego esto
2273 2272 if dataOut.groupList is None:
2274 2273 dataOut.groupList = [(0,1),(0,2),(1,2)]
2275 2274 groupList = dataOut.groupList
2276 2275 C = 3e8
2277 2276 freq = 50e6
2278 2277 lamb = C/freq
2279 2278 k = 2*numpy.pi/lamb
2280 2279
2281 2280 timeList = dataOut.abscissaList
2282 2281 heightList = dataOut.heightList
2283 2282
2284 2283 if self.__isConfig == False:
2285 2284 dataOut.outputInterval = nMins*60
2286 2285 # self.__initime = dataOut.datatime.replace(minute = 0, second = 0, microsecond = 03)
2287 2286 #Get Initial LTC time
2288 2287 initime = datetime.datetime.utcfromtimestamp(dataOut.utctime)
2289 2288 minuteAux = initime.minute
2290 2289 minuteNew = int(numpy.floor(minuteAux/nMins)*nMins)
2291 2290 self.__initime = (initime.replace(minute = minuteNew, second = 0, microsecond = 0) - datetime.datetime(1970, 1, 1)).total_seconds()
2292 2291
2293 2292 self.__isConfig = True
2294 2293
2295 2294 if self.__buffer is None:
2296 2295 self.__buffer = dataOut.data_param
2297 2296 self.__firstdata = copy.copy(dataOut)
2298 2297
2299 2298 else:
2300 2299 self.__buffer = numpy.vstack((self.__buffer, dataOut.data_param))
2301 2300
2302 2301 self.__checkTime(dataOut.utctime, dataOut.paramInterval, dataOut.outputInterval) #Check if the buffer is ready
2303 2302
2304 2303 if self.__dataReady:
2305 2304 dataOut.utctimeInit = self.__initime
2306 2305 self.__initime += dataOut.outputInterval #to erase time offset
2307 2306
2308 2307 metArray = self.__buffer
2309 2308 if mode == 'SA':
2310 2309 dataOut.data_output = self.techniqueNSM_SA(rx_location=rx_location, groupList=groupList, azimuth=azimuth, dfactor=dfactor, k=k,metArray=metArray, heightList=heightList,timeList=timeList)
2311 2310 elif mode == 'DBS':
2312 2311 dataOut.data_output = self.techniqueNSM_DBS(metArray=metArray,heightList=heightList,timeList=timeList, azimuth=azimuth, theta_x=theta_x, theta_y=theta_y)
2313 2312 dataOut.data_output = dataOut.data_output.T
2314 2313 dataOut.flagNoData = False
2315 2314 self.__buffer = None
2316 2315
2317 2316 return
2318 2317
2319 2318 class EWDriftsEstimation(Operation):
2320 2319
2321 2320 def __init__(self):
2322 2321 Operation.__init__(self)
2323 2322
2324 2323 def __correctValues(self, heiRang, phi, velRadial, SNR):
2325 2324 listPhi = phi.tolist()
2326 2325 maxid = listPhi.index(max(listPhi))
2327 2326 minid = listPhi.index(min(listPhi))
2328 2327
2329 2328 rango = list(range(len(phi)))
2330 2329 # rango = numpy.delete(rango,maxid)
2331 2330
2332 2331 heiRang1 = heiRang*math.cos(phi[maxid])
2333 2332 heiRangAux = heiRang*math.cos(phi[minid])
2334 2333 indOut = (heiRang1 < heiRangAux[0]).nonzero()
2335 2334 heiRang1 = numpy.delete(heiRang1,indOut)
2336 2335
2337 2336 velRadial1 = numpy.zeros([len(phi),len(heiRang1)])
2338 2337 SNR1 = numpy.zeros([len(phi),len(heiRang1)])
2339 2338
2340 2339 for i in rango:
2341 2340 x = heiRang*math.cos(phi[i])
2342 2341 y1 = velRadial[i,:]
2343 2342 f1 = interpolate.interp1d(x,y1,kind = 'cubic')
2344 2343
2345 2344 x1 = heiRang1
2346 2345 y11 = f1(x1)
2347 2346
2348 2347 y2 = SNR[i,:]
2349 2348 f2 = interpolate.interp1d(x,y2,kind = 'cubic')
2350 2349 y21 = f2(x1)
2351 2350
2352 2351 velRadial1[i,:] = y11
2353 2352 SNR1[i,:] = y21
2354 2353
2355 2354 return heiRang1, velRadial1, SNR1
2356 2355
2357 2356 def run(self, dataOut, zenith, zenithCorrection):
2358 2357 heiRang = dataOut.heightList
2359 2358 velRadial = dataOut.data_param[:,3,:]
2360 2359 SNR = dataOut.data_snr
2361 2360
2362 2361 zenith = numpy.array(zenith)
2363 2362 zenith -= zenithCorrection
2364 2363 zenith *= numpy.pi/180
2365 2364
2366 2365 heiRang1, velRadial1, SNR1 = self.__correctValues(heiRang, numpy.abs(zenith), velRadial, SNR)
2367 2366
2368 2367 alp = zenith[0]
2369 2368 bet = zenith[1]
2370 2369
2371 2370 w_w = velRadial1[0,:]
2372 2371 w_e = velRadial1[1,:]
2373 2372
2374 2373 w = (w_w*numpy.sin(bet) - w_e*numpy.sin(alp))/(numpy.cos(alp)*numpy.sin(bet) - numpy.cos(bet)*numpy.sin(alp))
2375 2374 u = (w_w*numpy.cos(bet) - w_e*numpy.cos(alp))/(numpy.sin(alp)*numpy.cos(bet) - numpy.sin(bet)*numpy.cos(alp))
2376 2375
2377 2376 winds = numpy.vstack((u,w))
2378 2377
2379 2378 dataOut.heightList = heiRang1
2380 2379 dataOut.data_output = winds
2381 2380 dataOut.data_snr = SNR1
2382 2381
2383 2382 dataOut.utctimeInit = dataOut.utctime
2384 2383 dataOut.outputInterval = dataOut.timeInterval
2385 2384 return
2386 2385
2387 2386 #--------------- Non Specular Meteor ----------------
2388 2387
2389 2388 class NonSpecularMeteorDetection(Operation):
2390 2389
2391 2390 def run(self, dataOut, mode, SNRthresh=8, phaseDerThresh=0.5, cohThresh=0.8, allData = False):
2392 2391 data_acf = dataOut.data_pre[0]
2393 2392 data_ccf = dataOut.data_pre[1]
2394 2393 pairsList = dataOut.groupList[1]
2395 2394
2396 2395 lamb = dataOut.C/dataOut.frequency
2397 2396 tSamp = dataOut.ippSeconds*dataOut.nCohInt
2398 2397 paramInterval = dataOut.paramInterval
2399 2398
2400 2399 nChannels = data_acf.shape[0]
2401 2400 nLags = data_acf.shape[1]
2402 2401 nProfiles = data_acf.shape[2]
2403 2402 nHeights = dataOut.nHeights
2404 2403 nCohInt = dataOut.nCohInt
2405 2404 sec = numpy.round(nProfiles/dataOut.paramInterval)
2406 2405 heightList = dataOut.heightList
2407 2406 ippSeconds = dataOut.ippSeconds*dataOut.nCohInt*dataOut.nAvg
2408 2407 utctime = dataOut.utctime
2409 2408
2410 2409 dataOut.abscissaList = numpy.arange(0,paramInterval+ippSeconds,ippSeconds)
2411 2410
2412 2411 #------------------------ SNR --------------------------------------
2413 2412 power = data_acf[:,0,:,:].real
2414 2413 noise = numpy.zeros(nChannels)
2415 2414 SNR = numpy.zeros(power.shape)
2416 2415 for i in range(nChannels):
2417 2416 noise[i] = hildebrand_sekhon(power[i,:], nCohInt)
2418 2417 SNR[i] = (power[i]-noise[i])/noise[i]
2419 2418 SNRm = numpy.nanmean(SNR, axis = 0)
2420 2419 SNRdB = 10*numpy.log10(SNR)
2421 2420
2422 2421 if mode == 'SA':
2423 2422 dataOut.groupList = dataOut.groupList[1]
2424 2423 nPairs = data_ccf.shape[0]
2425 2424 #---------------------- Coherence and Phase --------------------------
2426 2425 phase = numpy.zeros(data_ccf[:,0,:,:].shape)
2427 2426 # phase1 = numpy.copy(phase)
2428 2427 coh1 = numpy.zeros(data_ccf[:,0,:,:].shape)
2429 2428
2430 2429 for p in range(nPairs):
2431 2430 ch0 = pairsList[p][0]
2432 2431 ch1 = pairsList[p][1]
2433 2432 ccf = data_ccf[p,0,:,:]/numpy.sqrt(data_acf[ch0,0,:,:]*data_acf[ch1,0,:,:])
2434 2433 phase[p,:,:] = ndimage.median_filter(numpy.angle(ccf), size = (5,1)) #median filter
2435 2434 # phase1[p,:,:] = numpy.angle(ccf) #median filter
2436 2435 coh1[p,:,:] = ndimage.median_filter(numpy.abs(ccf), 5) #median filter
2437 2436 # coh1[p,:,:] = numpy.abs(ccf) #median filter
2438 2437 coh = numpy.nanmax(coh1, axis = 0)
2439 2438 # struc = numpy.ones((5,1))
2440 2439 # coh = ndimage.morphology.grey_dilation(coh, size=(10,1))
2441 2440 #---------------------- Radial Velocity ----------------------------
2442 2441 phaseAux = numpy.mean(numpy.angle(data_acf[:,1,:,:]), axis = 0)
2443 2442 velRad = phaseAux*lamb/(4*numpy.pi*tSamp)
2444 2443
2445 2444 if allData:
2446 2445 boolMetFin = ~numpy.isnan(SNRm)
2447 2446 # coh[:-1,:] = numpy.nanmean(numpy.abs(phase[:,1:,:] - phase[:,:-1,:]),axis=0)
2448 2447 else:
2449 2448 #------------------------ Meteor mask ---------------------------------
2450 2449 # #SNR mask
2451 2450 # boolMet = (SNRdB>SNRthresh)#|(~numpy.isnan(SNRdB))
2452 2451 #
2453 2452 # #Erase small objects
2454 2453 # boolMet1 = self.__erase_small(boolMet, 2*sec, 5)
2455 2454 #
2456 2455 # auxEEJ = numpy.sum(boolMet1,axis=0)
2457 2456 # indOver = auxEEJ>nProfiles*0.8 #Use this later
2458 2457 # indEEJ = numpy.where(indOver)[0]
2459 2458 # indNEEJ = numpy.where(~indOver)[0]
2460 2459 #
2461 2460 # boolMetFin = boolMet1
2462 2461 #
2463 2462 # if indEEJ.size > 0:
2464 2463 # boolMet1[:,indEEJ] = False #Erase heights with EEJ
2465 2464 #
2466 2465 # boolMet2 = coh > cohThresh
2467 2466 # boolMet2 = self.__erase_small(boolMet2, 2*sec,5)
2468 2467 #
2469 2468 # #Final Meteor mask
2470 2469 # boolMetFin = boolMet1|boolMet2
2471 2470
2472 2471 #Coherence mask
2473 2472 boolMet1 = coh > 0.75
2474 2473 struc = numpy.ones((30,1))
2475 2474 boolMet1 = ndimage.morphology.binary_dilation(boolMet1, structure=struc)
2476 2475
2477 2476 #Derivative mask
2478 2477 derPhase = numpy.nanmean(numpy.abs(phase[:,1:,:] - phase[:,:-1,:]),axis=0)
2479 2478 boolMet2 = derPhase < 0.2
2480 2479 # boolMet2 = ndimage.morphology.binary_opening(boolMet2)
2481 2480 # boolMet2 = ndimage.morphology.binary_closing(boolMet2, structure = numpy.ones((10,1)))
2482 2481 boolMet2 = ndimage.median_filter(boolMet2,size=5)
2483 2482 boolMet2 = numpy.vstack((boolMet2,numpy.full((1,nHeights), True, dtype=bool)))
2484 2483 # #Final mask
2485 2484 # boolMetFin = boolMet2
2486 2485 boolMetFin = boolMet1&boolMet2
2487 2486 # boolMetFin = ndimage.morphology.binary_dilation(boolMetFin)
2488 2487 #Creating data_param
2489 2488 coordMet = numpy.where(boolMetFin)
2490 2489
2491 2490 tmet = coordMet[0]
2492 2491 hmet = coordMet[1]
2493 2492
2494 2493 data_param = numpy.zeros((tmet.size, 6 + nPairs))
2495 2494 data_param[:,0] = utctime
2496 2495 data_param[:,1] = tmet
2497 2496 data_param[:,2] = hmet
2498 2497 data_param[:,3] = SNRm[tmet,hmet]
2499 2498 data_param[:,4] = velRad[tmet,hmet]
2500 2499 data_param[:,5] = coh[tmet,hmet]
2501 2500 data_param[:,6:] = phase[:,tmet,hmet].T
2502 2501
2503 2502 elif mode == 'DBS':
2504 2503 dataOut.groupList = numpy.arange(nChannels)
2505 2504
2506 2505 #Radial Velocities
2507 2506 phase = numpy.angle(data_acf[:,1,:,:])
2508 2507 # phase = ndimage.median_filter(numpy.angle(data_acf[:,1,:,:]), size = (1,5,1))
2509 2508 velRad = phase*lamb/(4*numpy.pi*tSamp)
2510 2509
2511 2510 #Spectral width
2512 2511 # acf1 = ndimage.median_filter(numpy.abs(data_acf[:,1,:,:]), size = (1,5,1))
2513 2512 # acf2 = ndimage.median_filter(numpy.abs(data_acf[:,2,:,:]), size = (1,5,1))
2514 2513 acf1 = data_acf[:,1,:,:]
2515 2514 acf2 = data_acf[:,2,:,:]
2516 2515
2517 2516 spcWidth = (lamb/(2*numpy.sqrt(6)*numpy.pi*tSamp))*numpy.sqrt(numpy.log(acf1/acf2))
2518 2517 # velRad = ndimage.median_filter(velRad, size = (1,5,1))
2519 2518 if allData:
2520 2519 boolMetFin = ~numpy.isnan(SNRdB)
2521 2520 else:
2522 2521 #SNR
2523 2522 boolMet1 = (SNRdB>SNRthresh) #SNR mask
2524 2523 boolMet1 = ndimage.median_filter(boolMet1, size=(1,5,5))
2525 2524
2526 2525 #Radial velocity
2527 2526 boolMet2 = numpy.abs(velRad) < 20
2528 2527 boolMet2 = ndimage.median_filter(boolMet2, (1,5,5))
2529 2528
2530 2529 #Spectral Width
2531 2530 boolMet3 = spcWidth < 30
2532 2531 boolMet3 = ndimage.median_filter(boolMet3, (1,5,5))
2533 2532 # boolMetFin = self.__erase_small(boolMet1, 10,5)
2534 2533 boolMetFin = boolMet1&boolMet2&boolMet3
2535 2534
2536 2535 #Creating data_param
2537 2536 coordMet = numpy.where(boolMetFin)
2538 2537
2539 2538 cmet = coordMet[0]
2540 2539 tmet = coordMet[1]
2541 2540 hmet = coordMet[2]
2542 2541
2543 2542 data_param = numpy.zeros((tmet.size, 7))
2544 2543 data_param[:,0] = utctime
2545 2544 data_param[:,1] = cmet
2546 2545 data_param[:,2] = tmet
2547 2546 data_param[:,3] = hmet
2548 2547 data_param[:,4] = SNR[cmet,tmet,hmet].T
2549 2548 data_param[:,5] = velRad[cmet,tmet,hmet].T
2550 2549 data_param[:,6] = spcWidth[cmet,tmet,hmet].T
2551 2550
2552 2551 # self.dataOut.data_param = data_int
2553 2552 if len(data_param) == 0:
2554 2553 dataOut.flagNoData = True
2555 2554 else:
2556 2555 dataOut.data_param = data_param
2557 2556
2558 2557 def __erase_small(self, binArray, threshX, threshY):
2559 2558 labarray, numfeat = ndimage.measurements.label(binArray)
2560 2559 binArray1 = numpy.copy(binArray)
2561 2560
2562 2561 for i in range(1,numfeat + 1):
2563 2562 auxBin = (labarray==i)
2564 2563 auxSize = auxBin.sum()
2565 2564
2566 2565 x,y = numpy.where(auxBin)
2567 2566 widthX = x.max() - x.min()
2568 2567 widthY = y.max() - y.min()
2569 2568
2570 2569 #width X: 3 seg -> 12.5*3
2571 2570 #width Y:
2572 2571
2573 2572 if (auxSize < 50) or (widthX < threshX) or (widthY < threshY):
2574 2573 binArray1[auxBin] = False
2575 2574
2576 2575 return binArray1
2577 2576
2578 2577 #--------------- Specular Meteor ----------------
2579 2578
2580 2579 class SMDetection(Operation):
2581 2580 '''
2582 2581 Function DetectMeteors()
2583 2582 Project developed with paper:
2584 2583 HOLDSWORTH ET AL. 2004
2585 2584
2586 2585 Input:
2587 2586 self.dataOut.data_pre
2588 2587
2589 2588 centerReceiverIndex: From the channels, which is the center receiver
2590 2589
2591 2590 hei_ref: Height reference for the Beacon signal extraction
2592 2591 tauindex:
2593 2592 predefinedPhaseShifts: Predefined phase offset for the voltge signals
2594 2593
2595 2594 cohDetection: Whether to user Coherent detection or not
2596 2595 cohDet_timeStep: Coherent Detection calculation time step
2597 2596 cohDet_thresh: Coherent Detection phase threshold to correct phases
2598 2597
2599 2598 noise_timeStep: Noise calculation time step
2600 2599 noise_multiple: Noise multiple to define signal threshold
2601 2600
2602 2601 multDet_timeLimit: Multiple Detection Removal time limit in seconds
2603 2602 multDet_rangeLimit: Multiple Detection Removal range limit in km
2604 2603
2605 2604 phaseThresh: Maximum phase difference between receiver to be consider a meteor
2606 2605 SNRThresh: Minimum SNR threshold of the meteor signal to be consider a meteor
2607 2606
2608 2607 hmin: Minimum Height of the meteor to use it in the further wind estimations
2609 2608 hmax: Maximum Height of the meteor to use it in the further wind estimations
2610 2609 azimuth: Azimuth angle correction
2611 2610
2612 2611 Affected:
2613 2612 self.dataOut.data_param
2614 2613
2615 2614 Rejection Criteria (Errors):
2616 2615 0: No error; analysis OK
2617 2616 1: SNR < SNR threshold
2618 2617 2: angle of arrival (AOA) ambiguously determined
2619 2618 3: AOA estimate not feasible
2620 2619 4: Large difference in AOAs obtained from different antenna baselines
2621 2620 5: echo at start or end of time series
2622 2621 6: echo less than 5 examples long; too short for analysis
2623 2622 7: echo rise exceeds 0.3s
2624 2623 8: echo decay time less than twice rise time
2625 2624 9: large power level before echo
2626 2625 10: large power level after echo
2627 2626 11: poor fit to amplitude for estimation of decay time
2628 2627 12: poor fit to CCF phase variation for estimation of radial drift velocity
2629 2628 13: height unresolvable echo: not valid height within 70 to 110 km
2630 2629 14: height ambiguous echo: more then one possible height within 70 to 110 km
2631 2630 15: radial drift velocity or projected horizontal velocity exceeds 200 m/s
2632 2631 16: oscilatory echo, indicating event most likely not an underdense echo
2633 2632
2634 2633 17: phase difference in meteor Reestimation
2635 2634
2636 2635 Data Storage:
2637 2636 Meteors for Wind Estimation (8):
2638 2637 Utc Time | Range Height
2639 2638 Azimuth Zenith errorCosDir
2640 2639 VelRad errorVelRad
2641 2640 Phase0 Phase1 Phase2 Phase3
2642 2641 TypeError
2643 2642
2644 2643 '''
2645 2644
2646 2645 def run(self, dataOut, hei_ref = None, tauindex = 0,
2647 2646 phaseOffsets = None,
2648 2647 cohDetection = False, cohDet_timeStep = 1, cohDet_thresh = 25,
2649 2648 noise_timeStep = 4, noise_multiple = 4,
2650 2649 multDet_timeLimit = 1, multDet_rangeLimit = 3,
2651 2650 phaseThresh = 20, SNRThresh = 5,
2652 2651 hmin = 50, hmax=150, azimuth = 0,
2653 2652 channelPositions = None) :
2654 2653
2655 2654
2656 2655 #Getting Pairslist
2657 2656 if channelPositions is None:
2658 2657 # channelPositions = [(2.5,0), (0,2.5), (0,0), (0,4.5), (-2,0)] #T
2659 2658 channelPositions = [(4.5,2), (2,4.5), (2,2), (2,0), (0,2)] #Estrella
2660 2659 meteorOps = SMOperations()
2661 2660 pairslist0, distances = meteorOps.getPhasePairs(channelPositions)
2662 2661 heiRang = dataOut.heightList
2663 2662 #Get Beacon signal - No Beacon signal anymore
2664 2663 # newheis = numpy.where(self.dataOut.heightList>self.dataOut.radarControllerHeaderObj.Taus[tauindex])
2665 2664 #
2666 2665 # if hei_ref != None:
2667 2666 # newheis = numpy.where(self.dataOut.heightList>hei_ref)
2668 2667 #
2669 2668
2670 2669
2671 2670 #****************REMOVING HARDWARE PHASE DIFFERENCES***************
2672 2671 # see if the user put in pre defined phase shifts
2673 2672 voltsPShift = dataOut.data_pre.copy()
2674 2673
2675 2674 # if predefinedPhaseShifts != None:
2676 2675 # hardwarePhaseShifts = numpy.array(predefinedPhaseShifts)*numpy.pi/180
2677 2676 #
2678 2677 # # elif beaconPhaseShifts:
2679 2678 # # #get hardware phase shifts using beacon signal
2680 2679 # # hardwarePhaseShifts = self.__getHardwarePhaseDiff(self.dataOut.data_pre, pairslist, newheis, 10)
2681 2680 # # hardwarePhaseShifts = numpy.insert(hardwarePhaseShifts,centerReceiverIndex,0)
2682 2681 #
2683 2682 # else:
2684 2683 # hardwarePhaseShifts = numpy.zeros(5)
2685 2684 #
2686 2685 # voltsPShift = numpy.zeros((self.dataOut.data_pre.shape[0],self.dataOut.data_pre.shape[1],self.dataOut.data_pre.shape[2]), dtype = 'complex')
2687 2686 # for i in range(self.dataOut.data_pre.shape[0]):
2688 2687 # voltsPShift[i,:,:] = self.__shiftPhase(self.dataOut.data_pre[i,:,:], hardwarePhaseShifts[i])
2689 2688
2690 2689 #******************END OF REMOVING HARDWARE PHASE DIFFERENCES*********
2691 2690
2692 2691 #Remove DC
2693 2692 voltsDC = numpy.mean(voltsPShift,1)
2694 2693 voltsDC = numpy.mean(voltsDC,1)
2695 2694 for i in range(voltsDC.shape[0]):
2696 2695 voltsPShift[i] = voltsPShift[i] - voltsDC[i]
2697 2696
2698 2697 #Don't considerate last heights, theyre used to calculate Hardware Phase Shift
2699 2698 # voltsPShift = voltsPShift[:,:,:newheis[0][0]]
2700 2699
2701 2700 #************ FIND POWER OF DATA W/COH OR NON COH DETECTION (3.4) **********
2702 2701 #Coherent Detection
2703 2702 if cohDetection:
2704 2703 #use coherent detection to get the net power
2705 2704 cohDet_thresh = cohDet_thresh*numpy.pi/180
2706 2705 voltsPShift = self.__coherentDetection(voltsPShift, cohDet_timeStep, dataOut.timeInterval, pairslist0, cohDet_thresh)
2707 2706
2708 2707 #Non-coherent detection!
2709 2708 powerNet = numpy.nansum(numpy.abs(voltsPShift[:,:,:])**2,0)
2710 2709 #********** END OF COH/NON-COH POWER CALCULATION**********************
2711 2710
2712 2711 #********** FIND THE NOISE LEVEL AND POSSIBLE METEORS ****************
2713 2712 #Get noise
2714 2713 noise, noise1 = self.__getNoise(powerNet, noise_timeStep, dataOut.timeInterval)
2715 2714 # noise = self.getNoise1(powerNet, noise_timeStep, self.dataOut.timeInterval)
2716 2715 #Get signal threshold
2717 2716 signalThresh = noise_multiple*noise
2718 2717 #Meteor echoes detection
2719 2718 listMeteors = self.__findMeteors(powerNet, signalThresh)
2720 2719 #******* END OF NOISE LEVEL AND POSSIBLE METEORS CACULATION **********
2721 2720
2722 2721 #************** REMOVE MULTIPLE DETECTIONS (3.5) ***************************
2723 2722 #Parameters
2724 2723 heiRange = dataOut.heightList
2725 2724 rangeInterval = heiRange[1] - heiRange[0]
2726 2725 rangeLimit = multDet_rangeLimit/rangeInterval
2727 2726 timeLimit = multDet_timeLimit/dataOut.timeInterval
2728 2727 #Multiple detection removals
2729 2728 listMeteors1 = self.__removeMultipleDetections(listMeteors, rangeLimit, timeLimit)
2730 2729 #************ END OF REMOVE MULTIPLE DETECTIONS **********************
2731 2730
2732 2731 #********************* METEOR REESTIMATION (3.7, 3.8, 3.9, 3.10) ********************
2733 2732 #Parameters
2734 2733 phaseThresh = phaseThresh*numpy.pi/180
2735 2734 thresh = [phaseThresh, noise_multiple, SNRThresh]
2736 2735 #Meteor reestimation (Errors N 1, 6, 12, 17)
2737 2736 listMeteors2, listMeteorsPower, listMeteorsVolts = self.__meteorReestimation(listMeteors1, voltsPShift, pairslist0, thresh, noise, dataOut.timeInterval, dataOut.frequency)
2738 2737 # listMeteors2, listMeteorsPower, listMeteorsVolts = self.meteorReestimation3(listMeteors2, listMeteorsPower, listMeteorsVolts, voltsPShift, pairslist, thresh, noise)
2739 2738 #Estimation of decay times (Errors N 7, 8, 11)
2740 2739 listMeteors3 = self.__estimateDecayTime(listMeteors2, listMeteorsPower, dataOut.timeInterval, dataOut.frequency)
2741 2740 #******************* END OF METEOR REESTIMATION *******************
2742 2741
2743 2742 #********************* METEOR PARAMETERS CALCULATION (3.11, 3.12, 3.13) **************************
2744 2743 #Calculating Radial Velocity (Error N 15)
2745 2744 radialStdThresh = 10
2746 2745 listMeteors4 = self.__getRadialVelocity(listMeteors3, listMeteorsVolts, radialStdThresh, pairslist0, dataOut.timeInterval)
2747 2746
2748 2747 if len(listMeteors4) > 0:
2749 2748 #Setting New Array
2750 2749 date = dataOut.utctime
2751 2750 arrayParameters = self.__setNewArrays(listMeteors4, date, heiRang)
2752 2751
2753 2752 #Correcting phase offset
2754 2753 if phaseOffsets != None:
2755 2754 phaseOffsets = numpy.array(phaseOffsets)*numpy.pi/180
2756 2755 arrayParameters[:,8:12] = numpy.unwrap(arrayParameters[:,8:12] + phaseOffsets)
2757 2756
2758 2757 #Second Pairslist
2759 2758 pairsList = []
2760 2759 pairx = (0,1)
2761 2760 pairy = (2,3)
2762 2761 pairsList.append(pairx)
2763 2762 pairsList.append(pairy)
2764 2763
2765 2764 jph = numpy.array([0,0,0,0])
2766 2765 h = (hmin,hmax)
2767 2766 arrayParameters = meteorOps.getMeteorParams(arrayParameters, azimuth, h, pairsList, distances, jph)
2768 2767
2769 2768 # #Calculate AOA (Error N 3, 4)
2770 2769 # #JONES ET AL. 1998
2771 2770 # error = arrayParameters[:,-1]
2772 2771 # AOAthresh = numpy.pi/8
2773 2772 # phases = -arrayParameters[:,9:13]
2774 2773 # arrayParameters[:,4:7], arrayParameters[:,-1] = meteorOps.getAOA(phases, pairsList, error, AOAthresh, azimuth)
2775 2774 #
2776 2775 # #Calculate Heights (Error N 13 and 14)
2777 2776 # error = arrayParameters[:,-1]
2778 2777 # Ranges = arrayParameters[:,2]
2779 2778 # zenith = arrayParameters[:,5]
2780 2779 # arrayParameters[:,3], arrayParameters[:,-1] = meteorOps.getHeights(Ranges, zenith, error, hmin, hmax)
2781 2780 # error = arrayParameters[:,-1]
2782 2781 #********************* END OF PARAMETERS CALCULATION **************************
2783 2782
2784 2783 #***************************+ PASS DATA TO NEXT STEP **********************
2785 2784 # arrayFinal = arrayParameters.reshape((1,arrayParameters.shape[0],arrayParameters.shape[1]))
2786 2785 dataOut.data_param = arrayParameters
2787 2786
2788 2787 if arrayParameters is None:
2789 2788 dataOut.flagNoData = True
2790 2789 else:
2791 2790 dataOut.flagNoData = True
2792 2791
2793 2792 return
2794 2793
2795 2794 def __getHardwarePhaseDiff(self, voltage0, pairslist, newheis, n):
2796 2795
2797 2796 minIndex = min(newheis[0])
2798 2797 maxIndex = max(newheis[0])
2799 2798
2800 2799 voltage = voltage0[:,:,minIndex:maxIndex+1]
2801 2800 nLength = voltage.shape[1]/n
2802 2801 nMin = 0
2803 2802 nMax = 0
2804 2803 phaseOffset = numpy.zeros((len(pairslist),n))
2805 2804
2806 2805 for i in range(n):
2807 2806 nMax += nLength
2808 2807 phaseCCF = -numpy.angle(self.__calculateCCF(voltage[:,nMin:nMax,:], pairslist, [0]))
2809 2808 phaseCCF = numpy.mean(phaseCCF, axis = 2)
2810 2809 phaseOffset[:,i] = phaseCCF.transpose()
2811 2810 nMin = nMax
2812 2811 # phaseDiff, phaseArrival = self.estimatePhaseDifference(voltage, pairslist)
2813 2812
2814 2813 #Remove Outliers
2815 2814 factor = 2
2816 2815 wt = phaseOffset - signal.medfilt(phaseOffset,(1,5))
2817 2816 dw = numpy.std(wt,axis = 1)
2818 2817 dw = dw.reshape((dw.size,1))
2819 2818 ind = numpy.where(numpy.logical_or(wt>dw*factor,wt<-dw*factor))
2820 2819 phaseOffset[ind] = numpy.nan
2821 2820 phaseOffset = stats.nanmean(phaseOffset, axis=1)
2822 2821
2823 2822 return phaseOffset
2824 2823
2825 2824 def __shiftPhase(self, data, phaseShift):
2826 2825 #this will shift the phase of a complex number
2827 2826 dataShifted = numpy.abs(data) * numpy.exp((numpy.angle(data)+phaseShift)*1j)
2828 2827 return dataShifted
2829 2828
2830 2829 def __estimatePhaseDifference(self, array, pairslist):
2831 2830 nChannel = array.shape[0]
2832 2831 nHeights = array.shape[2]
2833 2832 numPairs = len(pairslist)
2834 2833 # phaseCCF = numpy.zeros((nChannel, 5, nHeights))
2835 2834 phaseCCF = numpy.angle(self.__calculateCCF(array, pairslist, [-2,-1,0,1,2]))
2836 2835
2837 2836 #Correct phases
2838 2837 derPhaseCCF = phaseCCF[:,1:,:] - phaseCCF[:,0:-1,:]
2839 2838 indDer = numpy.where(numpy.abs(derPhaseCCF) > numpy.pi)
2840 2839
2841 2840 if indDer[0].shape[0] > 0:
2842 2841 for i in range(indDer[0].shape[0]):
2843 2842 signo = -numpy.sign(derPhaseCCF[indDer[0][i],indDer[1][i],indDer[2][i]])
2844 2843 phaseCCF[indDer[0][i],indDer[1][i]+1:,:] += signo*2*numpy.pi
2845 2844
2846 2845 # for j in range(numSides):
2847 2846 # phaseCCFAux = self.calculateCCF(arrayCenter, arraySides[j,:,:], [-2,1,0,1,2])
2848 2847 # phaseCCF[j,:,:] = numpy.angle(phaseCCFAux)
2849 2848 #
2850 2849 #Linear
2851 2850 phaseInt = numpy.zeros((numPairs,1))
2852 2851 angAllCCF = phaseCCF[:,[0,1,3,4],0]
2853 2852 for j in range(numPairs):
2854 2853 fit = stats.linregress([-2,-1,1,2],angAllCCF[j,:])
2855 2854 phaseInt[j] = fit[1]
2856 2855 #Phase Differences
2857 2856 phaseDiff = phaseInt - phaseCCF[:,2,:]
2858 2857 phaseArrival = phaseInt.reshape(phaseInt.size)
2859 2858
2860 2859 #Dealias
2861 2860 phaseArrival = numpy.angle(numpy.exp(1j*phaseArrival))
2862 2861 # indAlias = numpy.where(phaseArrival > numpy.pi)
2863 2862 # phaseArrival[indAlias] -= 2*numpy.pi
2864 2863 # indAlias = numpy.where(phaseArrival < -numpy.pi)
2865 2864 # phaseArrival[indAlias] += 2*numpy.pi
2866 2865
2867 2866 return phaseDiff, phaseArrival
2868 2867
2869 2868 def __coherentDetection(self, volts, timeSegment, timeInterval, pairslist, thresh):
2870 2869 #this function will run the coherent detection used in Holdworth et al. 2004 and return the net power
2871 2870 #find the phase shifts of each channel over 1 second intervals
2872 2871 #only look at ranges below the beacon signal
2873 2872 numProfPerBlock = numpy.ceil(timeSegment/timeInterval)
2874 2873 numBlocks = int(volts.shape[1]/numProfPerBlock)
2875 2874 numHeights = volts.shape[2]
2876 2875 nChannel = volts.shape[0]
2877 2876 voltsCohDet = volts.copy()
2878 2877
2879 2878 pairsarray = numpy.array(pairslist)
2880 2879 indSides = pairsarray[:,1]
2881 2880 # indSides = numpy.array(range(nChannel))
2882 2881 # indSides = numpy.delete(indSides, indCenter)
2883 2882 #
2884 2883 # listCenter = numpy.array_split(volts[indCenter,:,:], numBlocks, 0)
2885 2884 listBlocks = numpy.array_split(volts, numBlocks, 1)
2886 2885
2887 2886 startInd = 0
2888 2887 endInd = 0
2889 2888
2890 2889 for i in range(numBlocks):
2891 2890 startInd = endInd
2892 2891 endInd = endInd + listBlocks[i].shape[1]
2893 2892
2894 2893 arrayBlock = listBlocks[i]
2895 2894 # arrayBlockCenter = listCenter[i]
2896 2895
2897 2896 #Estimate the Phase Difference
2898 2897 phaseDiff, aux = self.__estimatePhaseDifference(arrayBlock, pairslist)
2899 2898 #Phase Difference RMS
2900 2899 arrayPhaseRMS = numpy.abs(phaseDiff)
2901 2900 phaseRMSaux = numpy.sum(arrayPhaseRMS < thresh,0)
2902 2901 indPhase = numpy.where(phaseRMSaux==4)
2903 2902 #Shifting
2904 2903 if indPhase[0].shape[0] > 0:
2905 2904 for j in range(indSides.size):
2906 2905 arrayBlock[indSides[j],:,indPhase] = self.__shiftPhase(arrayBlock[indSides[j],:,indPhase], phaseDiff[j,indPhase].transpose())
2907 2906 voltsCohDet[:,startInd:endInd,:] = arrayBlock
2908 2907
2909 2908 return voltsCohDet
2910 2909
2911 2910 def __calculateCCF(self, volts, pairslist ,laglist):
2912 2911
2913 2912 nHeights = volts.shape[2]
2914 2913 nPoints = volts.shape[1]
2915 2914 voltsCCF = numpy.zeros((len(pairslist), len(laglist), nHeights),dtype = 'complex')
2916 2915
2917 2916 for i in range(len(pairslist)):
2918 2917 volts1 = volts[pairslist[i][0]]
2919 2918 volts2 = volts[pairslist[i][1]]
2920 2919
2921 2920 for t in range(len(laglist)):
2922 2921 idxT = laglist[t]
2923 2922 if idxT >= 0:
2924 2923 vStacked = numpy.vstack((volts2[idxT:,:],
2925 2924 numpy.zeros((idxT, nHeights),dtype='complex')))
2926 2925 else:
2927 2926 vStacked = numpy.vstack((numpy.zeros((-idxT, nHeights),dtype='complex'),
2928 2927 volts2[:(nPoints + idxT),:]))
2929 2928 voltsCCF[i,t,:] = numpy.sum((numpy.conjugate(volts1)*vStacked),axis=0)
2930 2929
2931 2930 vStacked = None
2932 2931 return voltsCCF
2933 2932
2934 2933 def __getNoise(self, power, timeSegment, timeInterval):
2935 2934 numProfPerBlock = numpy.ceil(timeSegment/timeInterval)
2936 2935 numBlocks = int(power.shape[0]/numProfPerBlock)
2937 2936 numHeights = power.shape[1]
2938 2937
2939 2938 listPower = numpy.array_split(power, numBlocks, 0)
2940 2939 noise = numpy.zeros((power.shape[0], power.shape[1]))
2941 2940 noise1 = numpy.zeros((power.shape[0], power.shape[1]))
2942 2941
2943 2942 startInd = 0
2944 2943 endInd = 0
2945 2944
2946 2945 for i in range(numBlocks): #split por canal
2947 2946 startInd = endInd
2948 2947 endInd = endInd + listPower[i].shape[0]
2949 2948
2950 2949 arrayBlock = listPower[i]
2951 2950 noiseAux = numpy.mean(arrayBlock, 0)
2952 2951 # noiseAux = numpy.median(noiseAux)
2953 2952 # noiseAux = numpy.mean(arrayBlock)
2954 2953 noise[startInd:endInd,:] = noise[startInd:endInd,:] + noiseAux
2955 2954
2956 2955 noiseAux1 = numpy.mean(arrayBlock)
2957 2956 noise1[startInd:endInd,:] = noise1[startInd:endInd,:] + noiseAux1
2958 2957
2959 2958 return noise, noise1
2960 2959
2961 2960 def __findMeteors(self, power, thresh):
2962 2961 nProf = power.shape[0]
2963 2962 nHeights = power.shape[1]
2964 2963 listMeteors = []
2965 2964
2966 2965 for i in range(nHeights):
2967 2966 powerAux = power[:,i]
2968 2967 threshAux = thresh[:,i]
2969 2968
2970 2969 indUPthresh = numpy.where(powerAux > threshAux)[0]
2971 2970 indDNthresh = numpy.where(powerAux <= threshAux)[0]
2972 2971
2973 2972 j = 0
2974 2973
2975 2974 while (j < indUPthresh.size - 2):
2976 2975 if (indUPthresh[j + 2] == indUPthresh[j] + 2):
2977 2976 indDNAux = numpy.where(indDNthresh > indUPthresh[j])
2978 2977 indDNthresh = indDNthresh[indDNAux]
2979 2978
2980 2979 if (indDNthresh.size > 0):
2981 2980 indEnd = indDNthresh[0] - 1
2982 2981 indInit = indUPthresh[j]
2983 2982
2984 2983 meteor = powerAux[indInit:indEnd + 1]
2985 2984 indPeak = meteor.argmax() + indInit
2986 2985 FLA = sum(numpy.conj(meteor)*numpy.hstack((meteor[1:],0)))
2987 2986
2988 2987 listMeteors.append(numpy.array([i,indInit,indPeak,indEnd,FLA])) #CHEQUEAR!!!!!
2989 2988 j = numpy.where(indUPthresh == indEnd)[0] + 1
2990 2989 else: j+=1
2991 2990 else: j+=1
2992 2991
2993 2992 return listMeteors
2994 2993
2995 2994 def __removeMultipleDetections(self,listMeteors, rangeLimit, timeLimit):
2996 2995
2997 2996 arrayMeteors = numpy.asarray(listMeteors)
2998 2997 listMeteors1 = []
2999 2998
3000 2999 while arrayMeteors.shape[0] > 0:
3001 3000 FLAs = arrayMeteors[:,4]
3002 3001 maxFLA = FLAs.argmax()
3003 3002 listMeteors1.append(arrayMeteors[maxFLA,:])
3004 3003
3005 3004 MeteorInitTime = arrayMeteors[maxFLA,1]
3006 3005 MeteorEndTime = arrayMeteors[maxFLA,3]
3007 3006 MeteorHeight = arrayMeteors[maxFLA,0]
3008 3007
3009 3008 #Check neighborhood
3010 3009 maxHeightIndex = MeteorHeight + rangeLimit
3011 3010 minHeightIndex = MeteorHeight - rangeLimit
3012 3011 minTimeIndex = MeteorInitTime - timeLimit
3013 3012 maxTimeIndex = MeteorEndTime + timeLimit
3014 3013
3015 3014 #Check Heights
3016 3015 indHeight = numpy.logical_and(arrayMeteors[:,0] >= minHeightIndex, arrayMeteors[:,0] <= maxHeightIndex)
3017 3016 indTime = numpy.logical_and(arrayMeteors[:,3] >= minTimeIndex, arrayMeteors[:,1] <= maxTimeIndex)
3018 3017 indBoth = numpy.where(numpy.logical_and(indTime,indHeight))
3019 3018
3020 3019 arrayMeteors = numpy.delete(arrayMeteors, indBoth, axis = 0)
3021 3020
3022 3021 return listMeteors1
3023 3022
3024 3023 def __meteorReestimation(self, listMeteors, volts, pairslist, thresh, noise, timeInterval,frequency):
3025 3024 numHeights = volts.shape[2]
3026 3025 nChannel = volts.shape[0]
3027 3026
3028 3027 thresholdPhase = thresh[0]
3029 3028 thresholdNoise = thresh[1]
3030 3029 thresholdDB = float(thresh[2])
3031 3030
3032 3031 thresholdDB1 = 10**(thresholdDB/10)
3033 3032 pairsarray = numpy.array(pairslist)
3034 3033 indSides = pairsarray[:,1]
3035 3034
3036 3035 pairslist1 = list(pairslist)
3037 3036 pairslist1.append((0,1))
3038 3037 pairslist1.append((3,4))
3039 3038
3040 3039 listMeteors1 = []
3041 3040 listPowerSeries = []
3042 3041 listVoltageSeries = []
3043 3042 #volts has the war data
3044 3043
3045 3044 if frequency == 30e6:
3046 3045 timeLag = 45*10**-3
3047 3046 else:
3048 3047 timeLag = 15*10**-3
3049 3048 lag = numpy.ceil(timeLag/timeInterval)
3050 3049
3051 3050 for i in range(len(listMeteors)):
3052 3051
3053 3052 ###################### 3.6 - 3.7 PARAMETERS REESTIMATION #########################
3054 3053 meteorAux = numpy.zeros(16)
3055 3054
3056 3055 #Loading meteor Data (mHeight, mStart, mPeak, mEnd)
3057 3056 mHeight = listMeteors[i][0]
3058 3057 mStart = listMeteors[i][1]
3059 3058 mPeak = listMeteors[i][2]
3060 3059 mEnd = listMeteors[i][3]
3061 3060
3062 3061 #get the volt data between the start and end times of the meteor
3063 3062 meteorVolts = volts[:,mStart:mEnd+1,mHeight]
3064 3063 meteorVolts = meteorVolts.reshape(meteorVolts.shape[0], meteorVolts.shape[1], 1)
3065 3064
3066 3065 #3.6. Phase Difference estimation
3067 3066 phaseDiff, aux = self.__estimatePhaseDifference(meteorVolts, pairslist)
3068 3067
3069 3068 #3.7. Phase difference removal & meteor start, peak and end times reestimated
3070 3069 #meteorVolts0.- all Channels, all Profiles
3071 3070 meteorVolts0 = volts[:,:,mHeight]
3072 3071 meteorThresh = noise[:,mHeight]*thresholdNoise
3073 3072 meteorNoise = noise[:,mHeight]
3074 3073 meteorVolts0[indSides,:] = self.__shiftPhase(meteorVolts0[indSides,:], phaseDiff) #Phase Shifting
3075 3074 powerNet0 = numpy.nansum(numpy.abs(meteorVolts0)**2, axis = 0) #Power
3076 3075
3077 3076 #Times reestimation
3078 3077 mStart1 = numpy.where(powerNet0[:mPeak] < meteorThresh[:mPeak])[0]
3079 3078 if mStart1.size > 0:
3080 3079 mStart1 = mStart1[-1] + 1
3081 3080
3082 3081 else:
3083 3082 mStart1 = mPeak
3084 3083
3085 3084 mEnd1 = numpy.where(powerNet0[mPeak:] < meteorThresh[mPeak:])[0][0] + mPeak - 1
3086 3085 mEndDecayTime1 = numpy.where(powerNet0[mPeak:] < meteorNoise[mPeak:])[0]
3087 3086 if mEndDecayTime1.size == 0:
3088 3087 mEndDecayTime1 = powerNet0.size
3089 3088 else:
3090 3089 mEndDecayTime1 = mEndDecayTime1[0] + mPeak - 1
3091 3090 # mPeak1 = meteorVolts0[mStart1:mEnd1 + 1].argmax()
3092 3091
3093 3092 #meteorVolts1.- all Channels, from start to end
3094 3093 meteorVolts1 = meteorVolts0[:,mStart1:mEnd1 + 1]
3095 3094 meteorVolts2 = meteorVolts0[:,mPeak + lag:mEnd1 + 1]
3096 3095 if meteorVolts2.shape[1] == 0:
3097 3096 meteorVolts2 = meteorVolts0[:,mPeak:mEnd1 + 1]
3098 3097 meteorVolts1 = meteorVolts1.reshape(meteorVolts1.shape[0], meteorVolts1.shape[1], 1)
3099 3098 meteorVolts2 = meteorVolts2.reshape(meteorVolts2.shape[0], meteorVolts2.shape[1], 1)
3100 3099 ##################### END PARAMETERS REESTIMATION #########################
3101 3100
3102 3101 ##################### 3.8 PHASE DIFFERENCE REESTIMATION ########################
3103 3102 # if mEnd1 - mStart1 > 4: #Error Number 6: echo less than 5 samples long; too short for analysis
3104 3103 if meteorVolts2.shape[1] > 0:
3105 3104 #Phase Difference re-estimation
3106 3105 phaseDiff1, phaseDiffint = self.__estimatePhaseDifference(meteorVolts2, pairslist1) #Phase Difference Estimation
3107 3106 # phaseDiff1, phaseDiffint = self.estimatePhaseDifference(meteorVolts2, pairslist)
3108 3107 meteorVolts2 = meteorVolts2.reshape(meteorVolts2.shape[0], meteorVolts2.shape[1])
3109 3108 phaseDiff11 = numpy.reshape(phaseDiff1, (phaseDiff1.shape[0],1))
3110 3109 meteorVolts2[indSides,:] = self.__shiftPhase(meteorVolts2[indSides,:], phaseDiff11[0:4]) #Phase Shifting
3111 3110
3112 3111 #Phase Difference RMS
3113 3112 phaseRMS1 = numpy.sqrt(numpy.mean(numpy.square(phaseDiff1)))
3114 3113 powerNet1 = numpy.nansum(numpy.abs(meteorVolts1[:,:])**2,0)
3115 3114 #Data from Meteor
3116 3115 mPeak1 = powerNet1.argmax() + mStart1
3117 3116 mPeakPower1 = powerNet1.max()
3118 3117 noiseAux = sum(noise[mStart1:mEnd1 + 1,mHeight])
3119 3118 mSNR1 = (sum(powerNet1)-noiseAux)/noiseAux
3120 3119 Meteor1 = numpy.array([mHeight, mStart1, mPeak1, mEnd1, mPeakPower1, mSNR1, phaseRMS1])
3121 3120 Meteor1 = numpy.hstack((Meteor1,phaseDiffint))
3122 3121 PowerSeries = powerNet0[mStart1:mEndDecayTime1 + 1]
3123 3122 #Vectorize
3124 3123 meteorAux[0:7] = [mHeight, mStart1, mPeak1, mEnd1, mPeakPower1, mSNR1, phaseRMS1]
3125 3124 meteorAux[7:11] = phaseDiffint[0:4]
3126 3125
3127 3126 #Rejection Criterions
3128 3127 if phaseRMS1 > thresholdPhase: #Error Number 17: Phase variation
3129 3128 meteorAux[-1] = 17
3130 3129 elif mSNR1 < thresholdDB1: #Error Number 1: SNR < threshold dB
3131 3130 meteorAux[-1] = 1
3132 3131
3133 3132
3134 3133 else:
3135 3134 meteorAux[0:4] = [mHeight, mStart, mPeak, mEnd]
3136 3135 meteorAux[-1] = 6 #Error Number 6: echo less than 5 samples long; too short for analysis
3137 3136 PowerSeries = 0
3138 3137
3139 3138 listMeteors1.append(meteorAux)
3140 3139 listPowerSeries.append(PowerSeries)
3141 3140 listVoltageSeries.append(meteorVolts1)
3142 3141
3143 3142 return listMeteors1, listPowerSeries, listVoltageSeries
3144 3143
3145 3144 def __estimateDecayTime(self, listMeteors, listPower, timeInterval, frequency):
3146 3145
3147 3146 threshError = 10
3148 3147 #Depending if it is 30 or 50 MHz
3149 3148 if frequency == 30e6:
3150 3149 timeLag = 45*10**-3
3151 3150 else:
3152 3151 timeLag = 15*10**-3
3153 3152 lag = numpy.ceil(timeLag/timeInterval)
3154 3153
3155 3154 listMeteors1 = []
3156 3155
3157 3156 for i in range(len(listMeteors)):
3158 3157 meteorPower = listPower[i]
3159 3158 meteorAux = listMeteors[i]
3160 3159
3161 3160 if meteorAux[-1] == 0:
3162 3161
3163 3162 try:
3164 3163 indmax = meteorPower.argmax()
3165 3164 indlag = indmax + lag
3166 3165
3167 3166 y = meteorPower[indlag:]
3168 3167 x = numpy.arange(0, y.size)*timeLag
3169 3168
3170 3169 #first guess
3171 3170 a = y[0]
3172 3171 tau = timeLag
3173 3172 #exponential fit
3174 3173 popt, pcov = optimize.curve_fit(self.__exponential_function, x, y, p0 = [a, tau])
3175 3174 y1 = self.__exponential_function(x, *popt)
3176 3175 #error estimation
3177 3176 error = sum((y - y1)**2)/(numpy.var(y)*(y.size - popt.size))
3178 3177
3179 3178 decayTime = popt[1]
3180 3179 riseTime = indmax*timeInterval
3181 3180 meteorAux[11:13] = [decayTime, error]
3182 3181
3183 3182 #Table items 7, 8 and 11
3184 3183 if (riseTime > 0.3): #Number 7: Echo rise exceeds 0.3s
3185 3184 meteorAux[-1] = 7
3186 3185 elif (decayTime < 2*riseTime) : #Number 8: Echo decay time less than than twice rise time
3187 3186 meteorAux[-1] = 8
3188 3187 if (error > threshError): #Number 11: Poor fit to amplitude for estimation of decay time
3189 3188 meteorAux[-1] = 11
3190 3189
3191 3190
3192 3191 except:
3193 3192 meteorAux[-1] = 11
3194 3193
3195 3194
3196 3195 listMeteors1.append(meteorAux)
3197 3196
3198 3197 return listMeteors1
3199 3198
3200 3199 #Exponential Function
3201 3200
3202 3201 def __exponential_function(self, x, a, tau):
3203 3202 y = a*numpy.exp(-x/tau)
3204 3203 return y
3205 3204
3206 3205 def __getRadialVelocity(self, listMeteors, listVolts, radialStdThresh, pairslist, timeInterval):
3207 3206
3208 3207 pairslist1 = list(pairslist)
3209 3208 pairslist1.append((0,1))
3210 3209 pairslist1.append((3,4))
3211 3210 numPairs = len(pairslist1)
3212 3211 #Time Lag
3213 3212 timeLag = 45*10**-3
3214 3213 c = 3e8
3215 3214 lag = numpy.ceil(timeLag/timeInterval)
3216 3215 freq = 30e6
3217 3216
3218 3217 listMeteors1 = []
3219 3218
3220 3219 for i in range(len(listMeteors)):
3221 3220 meteorAux = listMeteors[i]
3222 3221 if meteorAux[-1] == 0:
3223 3222 mStart = listMeteors[i][1]
3224 3223 mPeak = listMeteors[i][2]
3225 3224 mLag = mPeak - mStart + lag
3226 3225
3227 3226 #get the volt data between the start and end times of the meteor
3228 3227 meteorVolts = listVolts[i]
3229 3228 meteorVolts = meteorVolts.reshape(meteorVolts.shape[0], meteorVolts.shape[1], 1)
3230 3229
3231 3230 #Get CCF
3232 3231 allCCFs = self.__calculateCCF(meteorVolts, pairslist1, [-2,-1,0,1,2])
3233 3232
3234 3233 #Method 2
3235 3234 slopes = numpy.zeros(numPairs)
3236 3235 time = numpy.array([-2,-1,1,2])*timeInterval
3237 3236 angAllCCF = numpy.angle(allCCFs[:,[0,1,3,4],0])
3238 3237
3239 3238 #Correct phases
3240 3239 derPhaseCCF = angAllCCF[:,1:] - angAllCCF[:,0:-1]
3241 3240 indDer = numpy.where(numpy.abs(derPhaseCCF) > numpy.pi)
3242 3241
3243 3242 if indDer[0].shape[0] > 0:
3244 3243 for i in range(indDer[0].shape[0]):
3245 3244 signo = -numpy.sign(derPhaseCCF[indDer[0][i],indDer[1][i]])
3246 3245 angAllCCF[indDer[0][i],indDer[1][i]+1:] += signo*2*numpy.pi
3247 3246
3248 3247 # fit = scipy.stats.linregress(numpy.array([-2,-1,1,2])*timeInterval, numpy.array([phaseLagN2s[i],phaseLagN1s[i],phaseLag1s[i],phaseLag2s[i]]))
3249 3248 for j in range(numPairs):
3250 3249 fit = stats.linregress(time, angAllCCF[j,:])
3251 3250 slopes[j] = fit[0]
3252 3251
3253 3252 #Remove Outlier
3254 3253 # indOut = numpy.argmax(numpy.abs(slopes - numpy.mean(slopes)))
3255 3254 # slopes = numpy.delete(slopes,indOut)
3256 3255 # indOut = numpy.argmax(numpy.abs(slopes - numpy.mean(slopes)))
3257 3256 # slopes = numpy.delete(slopes,indOut)
3258 3257
3259 3258 radialVelocity = -numpy.mean(slopes)*(0.25/numpy.pi)*(c/freq)
3260 3259 radialError = numpy.std(slopes)*(0.25/numpy.pi)*(c/freq)
3261 3260 meteorAux[-2] = radialError
3262 3261 meteorAux[-3] = radialVelocity
3263 3262
3264 3263 #Setting Error
3265 3264 #Number 15: Radial Drift velocity or projected horizontal velocity exceeds 200 m/s
3266 3265 if numpy.abs(radialVelocity) > 200:
3267 3266 meteorAux[-1] = 15
3268 3267 #Number 12: Poor fit to CCF variation for estimation of radial drift velocity
3269 3268 elif radialError > radialStdThresh:
3270 3269 meteorAux[-1] = 12
3271 3270
3272 3271 listMeteors1.append(meteorAux)
3273 3272 return listMeteors1
3274 3273
3275 3274 def __setNewArrays(self, listMeteors, date, heiRang):
3276 3275
3277 3276 #New arrays
3278 3277 arrayMeteors = numpy.array(listMeteors)
3279 3278 arrayParameters = numpy.zeros((len(listMeteors), 13))
3280 3279
3281 3280 #Date inclusion
3282 3281 # date = re.findall(r'\((.*?)\)', date)
3283 3282 # date = date[0].split(',')
3284 3283 # date = map(int, date)
3285 3284 #
3286 3285 # if len(date)<6:
3287 3286 # date.append(0)
3288 3287 #
3289 3288 # date = [date[0]*10000 + date[1]*100 + date[2], date[3]*10000 + date[4]*100 + date[5]]
3290 3289 # arrayDate = numpy.tile(date, (len(listMeteors), 1))
3291 3290 arrayDate = numpy.tile(date, (len(listMeteors)))
3292 3291
3293 3292 #Meteor array
3294 3293 # arrayMeteors[:,0] = heiRang[arrayMeteors[:,0].astype(int)]
3295 3294 # arrayMeteors = numpy.hstack((arrayDate, arrayMeteors))
3296 3295
3297 3296 #Parameters Array
3298 3297 arrayParameters[:,0] = arrayDate #Date
3299 3298 arrayParameters[:,1] = heiRang[arrayMeteors[:,0].astype(int)] #Range
3300 3299 arrayParameters[:,6:8] = arrayMeteors[:,-3:-1] #Radial velocity and its error
3301 3300 arrayParameters[:,8:12] = arrayMeteors[:,7:11] #Phases
3302 3301 arrayParameters[:,-1] = arrayMeteors[:,-1] #Error
3303 3302
3304 3303
3305 3304 return arrayParameters
3306 3305
3307 3306 class CorrectSMPhases(Operation):
3308 3307
3309 3308 def run(self, dataOut, phaseOffsets, hmin = 50, hmax = 150, azimuth = 45, channelPositions = None):
3310 3309
3311 3310 arrayParameters = dataOut.data_param
3312 3311 pairsList = []
3313 3312 pairx = (0,1)
3314 3313 pairy = (2,3)
3315 3314 pairsList.append(pairx)
3316 3315 pairsList.append(pairy)
3317 3316 jph = numpy.zeros(4)
3318 3317
3319 3318 phaseOffsets = numpy.array(phaseOffsets)*numpy.pi/180
3320 3319 # arrayParameters[:,8:12] = numpy.unwrap(arrayParameters[:,8:12] + phaseOffsets)
3321 3320 arrayParameters[:,8:12] = numpy.angle(numpy.exp(1j*(arrayParameters[:,8:12] + phaseOffsets)))
3322 3321
3323 3322 meteorOps = SMOperations()
3324 3323 if channelPositions is None:
3325 3324 # channelPositions = [(2.5,0), (0,2.5), (0,0), (0,4.5), (-2,0)] #T
3326 3325 channelPositions = [(4.5,2), (2,4.5), (2,2), (2,0), (0,2)] #Estrella
3327 3326
3328 3327 pairslist0, distances = meteorOps.getPhasePairs(channelPositions)
3329 3328 h = (hmin,hmax)
3330 3329
3331 3330 arrayParameters = meteorOps.getMeteorParams(arrayParameters, azimuth, h, pairsList, distances, jph)
3332 3331
3333 3332 dataOut.data_param = arrayParameters
3334 3333 return
3335 3334
3336 3335 class SMPhaseCalibration(Operation):
3337 3336
3338 3337 __buffer = None
3339 3338
3340 3339 __initime = None
3341 3340
3342 3341 __dataReady = False
3343 3342
3344 3343 __isConfig = False
3345 3344
3346 3345 def __checkTime(self, currentTime, initTime, paramInterval, outputInterval):
3347 3346
3348 3347 dataTime = currentTime + paramInterval
3349 3348 deltaTime = dataTime - initTime
3350 3349
3351 3350 if deltaTime >= outputInterval or deltaTime < 0:
3352 3351 return True
3353 3352
3354 3353 return False
3355 3354
3356 3355 def __getGammas(self, pairs, d, phases):
3357 3356 gammas = numpy.zeros(2)
3358 3357
3359 3358 for i in range(len(pairs)):
3360 3359
3361 3360 pairi = pairs[i]
3362 3361
3363 3362 phip3 = phases[:,pairi[0]]
3364 3363 d3 = d[pairi[0]]
3365 3364 phip2 = phases[:,pairi[1]]
3366 3365 d2 = d[pairi[1]]
3367 3366 #Calculating gamma
3368 3367 # jdcos = alp1/(k*d1)
3369 3368 # jgamma = numpy.angle(numpy.exp(1j*(d0*alp1/d1 - alp0)))
3370 3369 jgamma = -phip2*d3/d2 - phip3
3371 3370 jgamma = numpy.angle(numpy.exp(1j*jgamma))
3372 3371 # jgamma[jgamma>numpy.pi] -= 2*numpy.pi
3373 3372 # jgamma[jgamma<-numpy.pi] += 2*numpy.pi
3374 3373
3375 3374 #Revised distribution
3376 3375 jgammaArray = numpy.hstack((jgamma,jgamma+0.5*numpy.pi,jgamma-0.5*numpy.pi))
3377 3376
3378 3377 #Histogram
3379 3378 nBins = 64
3380 3379 rmin = -0.5*numpy.pi
3381 3380 rmax = 0.5*numpy.pi
3382 3381 phaseHisto = numpy.histogram(jgammaArray, bins=nBins, range=(rmin,rmax))
3383 3382
3384 3383 meteorsY = phaseHisto[0]
3385 3384 phasesX = phaseHisto[1][:-1]
3386 3385 width = phasesX[1] - phasesX[0]
3387 3386 phasesX += width/2
3388 3387
3389 3388 #Gaussian aproximation
3390 3389 bpeak = meteorsY.argmax()
3391 3390 peak = meteorsY.max()
3392 3391 jmin = bpeak - 5
3393 3392 jmax = bpeak + 5 + 1
3394 3393
3395 3394 if jmin<0:
3396 3395 jmin = 0
3397 3396 jmax = 6
3398 3397 elif jmax > meteorsY.size:
3399 3398 jmin = meteorsY.size - 6
3400 3399 jmax = meteorsY.size
3401 3400
3402 3401 x0 = numpy.array([peak,bpeak,50])
3403 3402 coeff = optimize.leastsq(self.__residualFunction, x0, args=(meteorsY[jmin:jmax], phasesX[jmin:jmax]))
3404 3403
3405 3404 #Gammas
3406 3405 gammas[i] = coeff[0][1]
3407 3406
3408 3407 return gammas
3409 3408
3410 3409 def __residualFunction(self, coeffs, y, t):
3411 3410
3412 3411 return y - self.__gauss_function(t, coeffs)
3413 3412
3414 3413 def __gauss_function(self, t, coeffs):
3415 3414
3416 3415 return coeffs[0]*numpy.exp(-0.5*((t - coeffs[1]) / coeffs[2])**2)
3417 3416
3418 3417 def __getPhases(self, azimuth, h, pairsList, d, gammas, meteorsArray):
3419 3418 meteorOps = SMOperations()
3420 3419 nchan = 4
3421 3420 pairx = pairsList[0] #x es 0
3422 3421 pairy = pairsList[1] #y es 1
3423 3422 center_xangle = 0
3424 3423 center_yangle = 0
3425 3424 range_angle = numpy.array([10*numpy.pi,numpy.pi,numpy.pi/2,numpy.pi/4])
3426 3425 ntimes = len(range_angle)
3427 3426
3428 3427 nstepsx = 20
3429 3428 nstepsy = 20
3430 3429
3431 3430 for iz in range(ntimes):
3432 3431 min_xangle = -range_angle[iz]/2 + center_xangle
3433 3432 max_xangle = range_angle[iz]/2 + center_xangle
3434 3433 min_yangle = -range_angle[iz]/2 + center_yangle
3435 3434 max_yangle = range_angle[iz]/2 + center_yangle
3436 3435
3437 3436 inc_x = (max_xangle-min_xangle)/nstepsx
3438 3437 inc_y = (max_yangle-min_yangle)/nstepsy
3439 3438
3440 3439 alpha_y = numpy.arange(nstepsy)*inc_y + min_yangle
3441 3440 alpha_x = numpy.arange(nstepsx)*inc_x + min_xangle
3442 3441 penalty = numpy.zeros((nstepsx,nstepsy))
3443 3442 jph_array = numpy.zeros((nchan,nstepsx,nstepsy))
3444 3443 jph = numpy.zeros(nchan)
3445 3444
3446 3445 # Iterations looking for the offset
3447 3446 for iy in range(int(nstepsy)):
3448 3447 for ix in range(int(nstepsx)):
3449 3448 d3 = d[pairsList[1][0]]
3450 3449 d2 = d[pairsList[1][1]]
3451 3450 d5 = d[pairsList[0][0]]
3452 3451 d4 = d[pairsList[0][1]]
3453 3452
3454 3453 alp2 = alpha_y[iy] #gamma 1
3455 3454 alp4 = alpha_x[ix] #gamma 0
3456 3455
3457 3456 alp3 = -alp2*d3/d2 - gammas[1]
3458 3457 alp5 = -alp4*d5/d4 - gammas[0]
3459 3458 # jph[pairy[1]] = alpha_y[iy]
3460 3459 # jph[pairy[0]] = -gammas[1] - alpha_y[iy]*d[pairy[1]]/d[pairy[0]]
3461 3460
3462 3461 # jph[pairx[1]] = alpha_x[ix]
3463 3462 # jph[pairx[0]] = -gammas[0] - alpha_x[ix]*d[pairx[1]]/d[pairx[0]]
3464 3463 jph[pairsList[0][1]] = alp4
3465 3464 jph[pairsList[0][0]] = alp5
3466 3465 jph[pairsList[1][0]] = alp3
3467 3466 jph[pairsList[1][1]] = alp2
3468 3467 jph_array[:,ix,iy] = jph
3469 3468 # d = [2.0,2.5,2.5,2.0]
3470 3469 #falta chequear si va a leer bien los meteoros
3471 3470 meteorsArray1 = meteorOps.getMeteorParams(meteorsArray, azimuth, h, pairsList, d, jph)
3472 3471 error = meteorsArray1[:,-1]
3473 3472 ind1 = numpy.where(error==0)[0]
3474 3473 penalty[ix,iy] = ind1.size
3475 3474
3476 3475 i,j = numpy.unravel_index(penalty.argmax(), penalty.shape)
3477 3476 phOffset = jph_array[:,i,j]
3478 3477
3479 3478 center_xangle = phOffset[pairx[1]]
3480 3479 center_yangle = phOffset[pairy[1]]
3481 3480
3482 3481 phOffset = numpy.angle(numpy.exp(1j*jph_array[:,i,j]))
3483 3482 phOffset = phOffset*180/numpy.pi
3484 3483 return phOffset
3485 3484
3486 3485
3487 3486 def run(self, dataOut, hmin, hmax, channelPositions=None, nHours = 1):
3488 3487
3489 3488 dataOut.flagNoData = True
3490 3489 self.__dataReady = False
3491 3490 dataOut.outputInterval = nHours*3600
3492 3491
3493 3492 if self.__isConfig == False:
3494 3493 # self.__initime = dataOut.datatime.replace(minute = 0, second = 0, microsecond = 03)
3495 3494 #Get Initial LTC time
3496 3495 self.__initime = datetime.datetime.utcfromtimestamp(dataOut.utctime)
3497 3496 self.__initime = (self.__initime.replace(minute = 0, second = 0, microsecond = 0) - datetime.datetime(1970, 1, 1)).total_seconds()
3498 3497
3499 3498 self.__isConfig = True
3500 3499
3501 3500 if self.__buffer is None:
3502 3501 self.__buffer = dataOut.data_param.copy()
3503 3502
3504 3503 else:
3505 3504 self.__buffer = numpy.vstack((self.__buffer, dataOut.data_param))
3506 3505
3507 3506 self.__dataReady = self.__checkTime(dataOut.utctime, self.__initime, dataOut.paramInterval, dataOut.outputInterval) #Check if the buffer is ready
3508 3507
3509 3508 if self.__dataReady:
3510 3509 dataOut.utctimeInit = self.__initime
3511 3510 self.__initime += dataOut.outputInterval #to erase time offset
3512 3511
3513 3512 freq = dataOut.frequency
3514 3513 c = dataOut.C #m/s
3515 3514 lamb = c/freq
3516 3515 k = 2*numpy.pi/lamb
3517 3516 azimuth = 0
3518 3517 h = (hmin, hmax)
3519 3518 # pairs = ((0,1),(2,3)) #Estrella
3520 3519 # pairs = ((1,0),(2,3)) #T
3521 3520
3522 3521 if channelPositions is None:
3523 3522 # channelPositions = [(2.5,0), (0,2.5), (0,0), (0,4.5), (-2,0)] #T
3524 3523 channelPositions = [(4.5,2), (2,4.5), (2,2), (2,0), (0,2)] #Estrella
3525 3524 meteorOps = SMOperations()
3526 3525 pairslist0, distances = meteorOps.getPhasePairs(channelPositions)
3527 3526
3528 3527 #Checking correct order of pairs
3529 3528 pairs = []
3530 3529 if distances[1] > distances[0]:
3531 3530 pairs.append((1,0))
3532 3531 else:
3533 3532 pairs.append((0,1))
3534 3533
3535 3534 if distances[3] > distances[2]:
3536 3535 pairs.append((3,2))
3537 3536 else:
3538 3537 pairs.append((2,3))
3539 3538 # distances1 = [-distances[0]*lamb, distances[1]*lamb, -distances[2]*lamb, distances[3]*lamb]
3540 3539
3541 3540 meteorsArray = self.__buffer
3542 3541 error = meteorsArray[:,-1]
3543 3542 boolError = (error==0)|(error==3)|(error==4)|(error==13)|(error==14)
3544 3543 ind1 = numpy.where(boolError)[0]
3545 3544 meteorsArray = meteorsArray[ind1,:]
3546 3545 meteorsArray[:,-1] = 0
3547 3546 phases = meteorsArray[:,8:12]
3548 3547
3549 3548 #Calculate Gammas
3550 3549 gammas = self.__getGammas(pairs, distances, phases)
3551 3550 # gammas = numpy.array([-21.70409463,45.76935864])*numpy.pi/180
3552 3551 #Calculate Phases
3553 3552 phasesOff = self.__getPhases(azimuth, h, pairs, distances, gammas, meteorsArray)
3554 3553 phasesOff = phasesOff.reshape((1,phasesOff.size))
3555 3554 dataOut.data_output = -phasesOff
3556 3555 dataOut.flagNoData = False
3557 3556 self.__buffer = None
3558 3557
3559 3558
3560 3559 return
3561 3560
3562 3561 class SMOperations():
3563 3562
3564 3563 def __init__(self):
3565 3564
3566 3565 return
3567 3566
3568 3567 def getMeteorParams(self, arrayParameters0, azimuth, h, pairsList, distances, jph):
3569 3568
3570 3569 arrayParameters = arrayParameters0.copy()
3571 3570 hmin = h[0]
3572 3571 hmax = h[1]
3573 3572
3574 3573 #Calculate AOA (Error N 3, 4)
3575 3574 #JONES ET AL. 1998
3576 3575 AOAthresh = numpy.pi/8
3577 3576 error = arrayParameters[:,-1]
3578 3577 phases = -arrayParameters[:,8:12] + jph
3579 3578 # phases = numpy.unwrap(phases)
3580 3579 arrayParameters[:,3:6], arrayParameters[:,-1] = self.__getAOA(phases, pairsList, distances, error, AOAthresh, azimuth)
3581 3580
3582 3581 #Calculate Heights (Error N 13 and 14)
3583 3582 error = arrayParameters[:,-1]
3584 3583 Ranges = arrayParameters[:,1]
3585 3584 zenith = arrayParameters[:,4]
3586 3585 arrayParameters[:,2], arrayParameters[:,-1] = self.__getHeights(Ranges, zenith, error, hmin, hmax)
3587 3586
3588 3587 #----------------------- Get Final data ------------------------------------
3589 3588 # error = arrayParameters[:,-1]
3590 3589 # ind1 = numpy.where(error==0)[0]
3591 3590 # arrayParameters = arrayParameters[ind1,:]
3592 3591
3593 3592 return arrayParameters
3594 3593
3595 3594 def __getAOA(self, phases, pairsList, directions, error, AOAthresh, azimuth):
3596 3595
3597 3596 arrayAOA = numpy.zeros((phases.shape[0],3))
3598 3597 cosdir0, cosdir = self.__getDirectionCosines(phases, pairsList,directions)
3599 3598
3600 3599 arrayAOA[:,:2] = self.__calculateAOA(cosdir, azimuth)
3601 3600 cosDirError = numpy.sum(numpy.abs(cosdir0 - cosdir), axis = 1)
3602 3601 arrayAOA[:,2] = cosDirError
3603 3602
3604 3603 azimuthAngle = arrayAOA[:,0]
3605 3604 zenithAngle = arrayAOA[:,1]
3606 3605
3607 3606 #Setting Error
3608 3607 indError = numpy.where(numpy.logical_or(error == 3, error == 4))[0]
3609 3608 error[indError] = 0
3610 3609 #Number 3: AOA not fesible
3611 3610 indInvalid = numpy.where(numpy.logical_and((numpy.logical_or(numpy.isnan(zenithAngle), numpy.isnan(azimuthAngle))),error == 0))[0]
3612 3611 error[indInvalid] = 3
3613 3612 #Number 4: Large difference in AOAs obtained from different antenna baselines
3614 3613 indInvalid = numpy.where(numpy.logical_and(cosDirError > AOAthresh,error == 0))[0]
3615 3614 error[indInvalid] = 4
3616 3615 return arrayAOA, error
3617 3616
3618 3617 def __getDirectionCosines(self, arrayPhase, pairsList, distances):
3619 3618
3620 3619 #Initializing some variables
3621 3620 ang_aux = numpy.array([-8,-7,-6,-5,-4,-3,-2,-1,0,1,2,3,4,5,6,7,8])*2*numpy.pi
3622 3621 ang_aux = ang_aux.reshape(1,ang_aux.size)
3623 3622
3624 3623 cosdir = numpy.zeros((arrayPhase.shape[0],2))
3625 3624 cosdir0 = numpy.zeros((arrayPhase.shape[0],2))
3626 3625
3627 3626
3628 3627 for i in range(2):
3629 3628 ph0 = arrayPhase[:,pairsList[i][0]]
3630 3629 ph1 = arrayPhase[:,pairsList[i][1]]
3631 3630 d0 = distances[pairsList[i][0]]
3632 3631 d1 = distances[pairsList[i][1]]
3633 3632
3634 3633 ph0_aux = ph0 + ph1
3635 3634 ph0_aux = numpy.angle(numpy.exp(1j*ph0_aux))
3636 3635 # ph0_aux[ph0_aux > numpy.pi] -= 2*numpy.pi
3637 3636 # ph0_aux[ph0_aux < -numpy.pi] += 2*numpy.pi
3638 3637 #First Estimation
3639 3638 cosdir0[:,i] = (ph0_aux)/(2*numpy.pi*(d0 - d1))
3640 3639
3641 3640 #Most-Accurate Second Estimation
3642 3641 phi1_aux = ph0 - ph1
3643 3642 phi1_aux = phi1_aux.reshape(phi1_aux.size,1)
3644 3643 #Direction Cosine 1
3645 3644 cosdir1 = (phi1_aux + ang_aux)/(2*numpy.pi*(d0 + d1))
3646 3645
3647 3646 #Searching the correct Direction Cosine
3648 3647 cosdir0_aux = cosdir0[:,i]
3649 3648 cosdir0_aux = cosdir0_aux.reshape(cosdir0_aux.size,1)
3650 3649 #Minimum Distance
3651 3650 cosDiff = (cosdir1 - cosdir0_aux)**2
3652 3651 indcos = cosDiff.argmin(axis = 1)
3653 3652 #Saving Value obtained
3654 3653 cosdir[:,i] = cosdir1[numpy.arange(len(indcos)),indcos]
3655 3654
3656 3655 return cosdir0, cosdir
3657 3656
3658 3657 def __calculateAOA(self, cosdir, azimuth):
3659 3658 cosdirX = cosdir[:,0]
3660 3659 cosdirY = cosdir[:,1]
3661 3660
3662 3661 zenithAngle = numpy.arccos(numpy.sqrt(1 - cosdirX**2 - cosdirY**2))*180/numpy.pi
3663 3662 azimuthAngle = numpy.arctan2(cosdirX,cosdirY)*180/numpy.pi + azimuth#0 deg north, 90 deg east
3664 3663 angles = numpy.vstack((azimuthAngle, zenithAngle)).transpose()
3665 3664
3666 3665 return angles
3667 3666
3668 3667 def __getHeights(self, Ranges, zenith, error, minHeight, maxHeight):
3669 3668
3670 3669 Ramb = 375 #Ramb = c/(2*PRF)
3671 3670 Re = 6371 #Earth Radius
3672 3671 heights = numpy.zeros(Ranges.shape)
3673 3672
3674 3673 R_aux = numpy.array([0,1,2])*Ramb
3675 3674 R_aux = R_aux.reshape(1,R_aux.size)
3676 3675
3677 3676 Ranges = Ranges.reshape(Ranges.size,1)
3678 3677
3679 3678 Ri = Ranges + R_aux
3680 3679 hi = numpy.sqrt(Re**2 + Ri**2 + (2*Re*numpy.cos(zenith*numpy.pi/180)*Ri.transpose()).transpose()) - Re
3681 3680
3682 3681 #Check if there is a height between 70 and 110 km
3683 3682 h_bool = numpy.sum(numpy.logical_and(hi > minHeight, hi < maxHeight), axis = 1)
3684 3683 ind_h = numpy.where(h_bool == 1)[0]
3685 3684
3686 3685 hCorr = hi[ind_h, :]
3687 3686 ind_hCorr = numpy.where(numpy.logical_and(hi > minHeight, hi < maxHeight))
3688 3687
3689 3688 hCorr = hi[ind_hCorr][:len(ind_h)]
3690 3689 heights[ind_h] = hCorr
3691 3690
3692 3691 #Setting Error
3693 3692 #Number 13: Height unresolvable echo: not valid height within 70 to 110 km
3694 3693 #Number 14: Height ambiguous echo: more than one possible height within 70 to 110 km
3695 3694 indError = numpy.where(numpy.logical_or(error == 13, error == 14))[0]
3696 3695 error[indError] = 0
3697 3696 indInvalid2 = numpy.where(numpy.logical_and(h_bool > 1, error == 0))[0]
3698 3697 error[indInvalid2] = 14
3699 3698 indInvalid1 = numpy.where(numpy.logical_and(h_bool == 0, error == 0))[0]
3700 3699 error[indInvalid1] = 13
3701 3700
3702 3701 return heights, error
3703 3702
3704 3703 def getPhasePairs(self, channelPositions):
3705 3704 chanPos = numpy.array(channelPositions)
3706 3705 listOper = list(itertools.combinations(list(range(5)),2))
3707 3706
3708 3707 distances = numpy.zeros(4)
3709 3708 axisX = []
3710 3709 axisY = []
3711 3710 distX = numpy.zeros(3)
3712 3711 distY = numpy.zeros(3)
3713 3712 ix = 0
3714 3713 iy = 0
3715 3714
3716 3715 pairX = numpy.zeros((2,2))
3717 3716 pairY = numpy.zeros((2,2))
3718 3717
3719 3718 for i in range(len(listOper)):
3720 3719 pairi = listOper[i]
3721 3720
3722 3721 posDif = numpy.abs(chanPos[pairi[0],:] - chanPos[pairi[1],:])
3723 3722
3724 3723 if posDif[0] == 0:
3725 3724 axisY.append(pairi)
3726 3725 distY[iy] = posDif[1]
3727 3726 iy += 1
3728 3727 elif posDif[1] == 0:
3729 3728 axisX.append(pairi)
3730 3729 distX[ix] = posDif[0]
3731 3730 ix += 1
3732 3731
3733 3732 for i in range(2):
3734 3733 if i==0:
3735 3734 dist0 = distX
3736 3735 axis0 = axisX
3737 3736 else:
3738 3737 dist0 = distY
3739 3738 axis0 = axisY
3740 3739
3741 3740 side = numpy.argsort(dist0)[:-1]
3742 3741 axis0 = numpy.array(axis0)[side,:]
3743 3742 chanC = int(numpy.intersect1d(axis0[0,:], axis0[1,:])[0])
3744 3743 axis1 = numpy.unique(numpy.reshape(axis0,4))
3745 3744 side = axis1[axis1 != chanC]
3746 3745 diff1 = chanPos[chanC,i] - chanPos[side[0],i]
3747 3746 diff2 = chanPos[chanC,i] - chanPos[side[1],i]
3748 3747 if diff1<0:
3749 3748 chan2 = side[0]
3750 3749 d2 = numpy.abs(diff1)
3751 3750 chan1 = side[1]
3752 3751 d1 = numpy.abs(diff2)
3753 3752 else:
3754 3753 chan2 = side[1]
3755 3754 d2 = numpy.abs(diff2)
3756 3755 chan1 = side[0]
3757 3756 d1 = numpy.abs(diff1)
3758 3757
3759 3758 if i==0:
3760 3759 chanCX = chanC
3761 3760 chan1X = chan1
3762 3761 chan2X = chan2
3763 3762 distances[0:2] = numpy.array([d1,d2])
3764 3763 else:
3765 3764 chanCY = chanC
3766 3765 chan1Y = chan1
3767 3766 chan2Y = chan2
3768 3767 distances[2:4] = numpy.array([d1,d2])
3769 3768 # axisXsides = numpy.reshape(axisX[ix,:],4)
3770 3769 #
3771 3770 # channelCentX = int(numpy.intersect1d(pairX[0,:], pairX[1,:])[0])
3772 3771 # channelCentY = int(numpy.intersect1d(pairY[0,:], pairY[1,:])[0])
3773 3772 #
3774 3773 # ind25X = numpy.where(pairX[0,:] != channelCentX)[0][0]
3775 3774 # ind20X = numpy.where(pairX[1,:] != channelCentX)[0][0]
3776 3775 # channel25X = int(pairX[0,ind25X])
3777 3776 # channel20X = int(pairX[1,ind20X])
3778 3777 # ind25Y = numpy.where(pairY[0,:] != channelCentY)[0][0]
3779 3778 # ind20Y = numpy.where(pairY[1,:] != channelCentY)[0][0]
3780 3779 # channel25Y = int(pairY[0,ind25Y])
3781 3780 # channel20Y = int(pairY[1,ind20Y])
3782 3781
3783 3782 # pairslist = [(channelCentX, channel25X),(channelCentX, channel20X),(channelCentY,channel25Y),(channelCentY, channel20Y)]
3784 3783 pairslist = [(chanCX, chan1X),(chanCX, chan2X),(chanCY,chan1Y),(chanCY, chan2Y)]
3785 3784
3786 3785 return pairslist, distances
3787 3786 # def __getAOA(self, phases, pairsList, error, AOAthresh, azimuth):
3788 3787 #
3789 3788 # arrayAOA = numpy.zeros((phases.shape[0],3))
3790 3789 # cosdir0, cosdir = self.__getDirectionCosines(phases, pairsList)
3791 3790 #
3792 3791 # arrayAOA[:,:2] = self.__calculateAOA(cosdir, azimuth)
3793 3792 # cosDirError = numpy.sum(numpy.abs(cosdir0 - cosdir), axis = 1)
3794 3793 # arrayAOA[:,2] = cosDirError
3795 3794 #
3796 3795 # azimuthAngle = arrayAOA[:,0]
3797 3796 # zenithAngle = arrayAOA[:,1]
3798 3797 #
3799 3798 # #Setting Error
3800 3799 # #Number 3: AOA not fesible
3801 3800 # indInvalid = numpy.where(numpy.logical_and((numpy.logical_or(numpy.isnan(zenithAngle), numpy.isnan(azimuthAngle))),error == 0))[0]
3802 3801 # error[indInvalid] = 3
3803 3802 # #Number 4: Large difference in AOAs obtained from different antenna baselines
3804 3803 # indInvalid = numpy.where(numpy.logical_and(cosDirError > AOAthresh,error == 0))[0]
3805 3804 # error[indInvalid] = 4
3806 3805 # return arrayAOA, error
3807 3806 #
3808 3807 # def __getDirectionCosines(self, arrayPhase, pairsList):
3809 3808 #
3810 3809 # #Initializing some variables
3811 3810 # ang_aux = numpy.array([-8,-7,-6,-5,-4,-3,-2,-1,0,1,2,3,4,5,6,7,8])*2*numpy.pi
3812 3811 # ang_aux = ang_aux.reshape(1,ang_aux.size)
3813 3812 #
3814 3813 # cosdir = numpy.zeros((arrayPhase.shape[0],2))
3815 3814 # cosdir0 = numpy.zeros((arrayPhase.shape[0],2))
3816 3815 #
3817 3816 #
3818 3817 # for i in range(2):
3819 3818 # #First Estimation
3820 3819 # phi0_aux = arrayPhase[:,pairsList[i][0]] + arrayPhase[:,pairsList[i][1]]
3821 3820 # #Dealias
3822 3821 # indcsi = numpy.where(phi0_aux > numpy.pi)
3823 3822 # phi0_aux[indcsi] -= 2*numpy.pi
3824 3823 # indcsi = numpy.where(phi0_aux < -numpy.pi)
3825 3824 # phi0_aux[indcsi] += 2*numpy.pi
3826 3825 # #Direction Cosine 0
3827 3826 # cosdir0[:,i] = -(phi0_aux)/(2*numpy.pi*0.5)
3828 3827 #
3829 3828 # #Most-Accurate Second Estimation
3830 3829 # phi1_aux = arrayPhase[:,pairsList[i][0]] - arrayPhase[:,pairsList[i][1]]
3831 3830 # phi1_aux = phi1_aux.reshape(phi1_aux.size,1)
3832 3831 # #Direction Cosine 1
3833 3832 # cosdir1 = -(phi1_aux + ang_aux)/(2*numpy.pi*4.5)
3834 3833 #
3835 3834 # #Searching the correct Direction Cosine
3836 3835 # cosdir0_aux = cosdir0[:,i]
3837 3836 # cosdir0_aux = cosdir0_aux.reshape(cosdir0_aux.size,1)
3838 3837 # #Minimum Distance
3839 3838 # cosDiff = (cosdir1 - cosdir0_aux)**2
3840 3839 # indcos = cosDiff.argmin(axis = 1)
3841 3840 # #Saving Value obtained
3842 3841 # cosdir[:,i] = cosdir1[numpy.arange(len(indcos)),indcos]
3843 3842 #
3844 3843 # return cosdir0, cosdir
3845 3844 #
3846 3845 # def __calculateAOA(self, cosdir, azimuth):
3847 3846 # cosdirX = cosdir[:,0]
3848 3847 # cosdirY = cosdir[:,1]
3849 3848 #
3850 3849 # zenithAngle = numpy.arccos(numpy.sqrt(1 - cosdirX**2 - cosdirY**2))*180/numpy.pi
3851 3850 # azimuthAngle = numpy.arctan2(cosdirX,cosdirY)*180/numpy.pi + azimuth #0 deg north, 90 deg east
3852 3851 # angles = numpy.vstack((azimuthAngle, zenithAngle)).transpose()
3853 3852 #
3854 3853 # return angles
3855 3854 #
3856 3855 # def __getHeights(self, Ranges, zenith, error, minHeight, maxHeight):
3857 3856 #
3858 3857 # Ramb = 375 #Ramb = c/(2*PRF)
3859 3858 # Re = 6371 #Earth Radius
3860 3859 # heights = numpy.zeros(Ranges.shape)
3861 3860 #
3862 3861 # R_aux = numpy.array([0,1,2])*Ramb
3863 3862 # R_aux = R_aux.reshape(1,R_aux.size)
3864 3863 #
3865 3864 # Ranges = Ranges.reshape(Ranges.size,1)
3866 3865 #
3867 3866 # Ri = Ranges + R_aux
3868 3867 # hi = numpy.sqrt(Re**2 + Ri**2 + (2*Re*numpy.cos(zenith*numpy.pi/180)*Ri.transpose()).transpose()) - Re
3869 3868 #
3870 3869 # #Check if there is a height between 70 and 110 km
3871 3870 # h_bool = numpy.sum(numpy.logical_and(hi > minHeight, hi < maxHeight), axis = 1)
3872 3871 # ind_h = numpy.where(h_bool == 1)[0]
3873 3872 #
3874 3873 # hCorr = hi[ind_h, :]
3875 3874 # ind_hCorr = numpy.where(numpy.logical_and(hi > minHeight, hi < maxHeight))
3876 3875 #
3877 3876 # hCorr = hi[ind_hCorr]
3878 3877 # heights[ind_h] = hCorr
3879 3878 #
3880 3879 # #Setting Error
3881 3880 # #Number 13: Height unresolvable echo: not valid height within 70 to 110 km
3882 3881 # #Number 14: Height ambiguous echo: more than one possible height within 70 to 110 km
3883 3882 #
3884 3883 # indInvalid2 = numpy.where(numpy.logical_and(h_bool > 1, error == 0))[0]
3885 3884 # error[indInvalid2] = 14
3886 3885 # indInvalid1 = numpy.where(numpy.logical_and(h_bool == 0, error == 0))[0]
3887 3886 # error[indInvalid1] = 13
3888 3887 #
3889 3888 # return heights, error
@@ -1,1439 +1,1357
1 1 # Copyright (c) 2012-2020 Jicamarca Radio Observatory
2 2 # All rights reserved.
3 3 #
4 4 # Distributed under the terms of the BSD 3-clause license.
5 5 """Spectra processing Unit and operations
6 6
7 7 Here you will find the processing unit `SpectraProc` and several operations
8 8 to work with Spectra data type
9 9 """
10 10
11 11 import time
12 12 import itertools
13 13
14 14 import numpy
15 15 import math
16 16
17 17 from schainpy.model.proc.jroproc_base import ProcessingUnit, MPDecorator, Operation
18 18 from schainpy.model.data.jrodata import Spectra
19 19 from schainpy.model.data.jrodata import hildebrand_sekhon
20 20 from schainpy.utils import log
21 21
22 22 from scipy.optimize import curve_fit
23 23
24 24
25 25 class SpectraProc(ProcessingUnit):
26 26
27 27 def __init__(self):
28 28
29 29 ProcessingUnit.__init__(self)
30 30
31 31 self.buffer = None
32 32 self.firstdatatime = None
33 33 self.profIndex = 0
34 34 self.dataOut = Spectra()
35 35 self.id_min = None
36 36 self.id_max = None
37 37 self.setupReq = False #Agregar a todas las unidades de proc
38 38
39 39 def __updateSpecFromVoltage(self):
40 40
41 41 self.dataOut.timeZone = self.dataIn.timeZone
42 42 self.dataOut.dstFlag = self.dataIn.dstFlag
43 43 self.dataOut.errorCount = self.dataIn.errorCount
44 44 self.dataOut.useLocalTime = self.dataIn.useLocalTime
45 45 try:
46 46 self.dataOut.processingHeaderObj = self.dataIn.processingHeaderObj.copy()
47 47 except:
48 48 pass
49 49 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
50 50 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
51 51 self.dataOut.channelList = self.dataIn.channelList
52 52 self.dataOut.heightList = self.dataIn.heightList
53 53 self.dataOut.dtype = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
54 54 self.dataOut.nProfiles = self.dataOut.nFFTPoints
55 55 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
56 56 self.dataOut.utctime = self.firstdatatime
57 57 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData
58 58 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData
59 59 self.dataOut.flagShiftFFT = False
60 60 self.dataOut.nCohInt = self.dataIn.nCohInt
61 61 self.dataOut.nIncohInt = 1
62 62 self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
63 63 self.dataOut.frequency = self.dataIn.frequency
64 64 self.dataOut.realtime = self.dataIn.realtime
65 65 self.dataOut.azimuth = self.dataIn.azimuth
66 66 self.dataOut.zenith = self.dataIn.zenith
67 67 self.dataOut.codeList = self.dataIn.codeList
68 68 self.dataOut.azimuthList = self.dataIn.azimuthList
69 69 self.dataOut.elevationList = self.dataIn.elevationList
70 70
71 71 def __getFft(self):
72 72 """
73 73 Convierte valores de Voltaje a Spectra
74 74
75 75 Affected:
76 76 self.dataOut.data_spc
77 77 self.dataOut.data_cspc
78 78 self.dataOut.data_dc
79 79 self.dataOut.heightList
80 80 self.profIndex
81 81 self.buffer
82 82 self.dataOut.flagNoData
83 83 """
84 84 fft_volt = numpy.fft.fft(
85 85 self.buffer, n=self.dataOut.nFFTPoints, axis=1)
86 86 fft_volt = fft_volt.astype(numpy.dtype('complex'))
87 87 dc = fft_volt[:, 0, :]
88 88
89 89 # calculo de self-spectra
90 90 fft_volt = numpy.fft.fftshift(fft_volt, axes=(1,))
91 91 spc = fft_volt * numpy.conjugate(fft_volt)
92 92 spc = spc.real
93 93
94 94 blocksize = 0
95 95 blocksize += dc.size
96 96 blocksize += spc.size
97 97
98 98 cspc = None
99 99 pairIndex = 0
100 100 if self.dataOut.pairsList != None:
101 101 # calculo de cross-spectra
102 102 cspc = numpy.zeros(
103 103 (self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex')
104 104 for pair in self.dataOut.pairsList:
105 105 if pair[0] not in self.dataOut.channelList:
106 106 raise ValueError("Error getting CrossSpectra: pair 0 of %s is not in channelList = %s" % (
107 107 str(pair), str(self.dataOut.channelList)))
108 108 if pair[1] not in self.dataOut.channelList:
109 109 raise ValueError("Error getting CrossSpectra: pair 1 of %s is not in channelList = %s" % (
110 110 str(pair), str(self.dataOut.channelList)))
111 111
112 112 cspc[pairIndex, :, :] = fft_volt[pair[0], :, :] * \
113 113 numpy.conjugate(fft_volt[pair[1], :, :])
114 114 pairIndex += 1
115 115 blocksize += cspc.size
116 116
117 117 self.dataOut.data_spc = spc
118 118 self.dataOut.data_cspc = cspc
119 119 self.dataOut.data_dc = dc
120 120 self.dataOut.blockSize = blocksize
121 121 self.dataOut.flagShiftFFT = False
122 122
123 123 def run(self, nProfiles=None, nFFTPoints=None, pairsList=None, ippFactor=None, shift_fft=False):
124 124
125 125 if self.dataIn.type == "Spectra":
126 126 self.dataOut.copy(self.dataIn)
127 127 if shift_fft:
128 128 #desplaza a la derecha en el eje 2 determinadas posiciones
129 129 shift = int(self.dataOut.nFFTPoints/2)
130 130 self.dataOut.data_spc = numpy.roll(self.dataOut.data_spc, shift , axis=1)
131 131
132 132 if self.dataOut.data_cspc is not None:
133 133 #desplaza a la derecha en el eje 2 determinadas posiciones
134 134 self.dataOut.data_cspc = numpy.roll(self.dataOut.data_cspc, shift, axis=1)
135 135 if pairsList:
136 136 self.__selectPairs(pairsList)
137 137
138 138 elif self.dataIn.type == "Voltage":
139 139
140 140 self.dataOut.flagNoData = True
141 141
142 142 if nFFTPoints == None:
143 143 raise ValueError("This SpectraProc.run() need nFFTPoints input variable")
144 144
145 145 if nProfiles == None:
146 146 nProfiles = nFFTPoints
147 147
148 148 if ippFactor == None:
149 149 self.dataOut.ippFactor = 1
150 150
151 151 self.dataOut.nFFTPoints = nFFTPoints
152 152
153 153 if self.buffer is None:
154 154 self.buffer = numpy.zeros((self.dataIn.nChannels,
155 155 nProfiles,
156 156 self.dataIn.nHeights),
157 157 dtype='complex')
158 158
159 159 if self.dataIn.flagDataAsBlock:
160 160 nVoltProfiles = self.dataIn.data.shape[1]
161 161
162 162 if nVoltProfiles == nProfiles:
163 163 self.buffer = self.dataIn.data.copy()
164 164 self.profIndex = nVoltProfiles
165 165
166 166 elif nVoltProfiles < nProfiles:
167 167
168 168 if self.profIndex == 0:
169 169 self.id_min = 0
170 170 self.id_max = nVoltProfiles
171 171
172 172 self.buffer[:, self.id_min:self.id_max,
173 173 :] = self.dataIn.data
174 174 self.profIndex += nVoltProfiles
175 175 self.id_min += nVoltProfiles
176 176 self.id_max += nVoltProfiles
177 177 else:
178 178 raise ValueError("The type object %s has %d profiles, it should just has %d profiles" % (
179 179 self.dataIn.type, self.dataIn.data.shape[1], nProfiles))
180 180 self.dataOut.flagNoData = True
181 181 else:
182 182 self.buffer[:, self.profIndex, :] = self.dataIn.data.copy()
183 183 self.profIndex += 1
184 184
185 185 if self.firstdatatime == None:
186 186 self.firstdatatime = self.dataIn.utctime
187 187
188 188 if self.profIndex == nProfiles:
189 189 self.__updateSpecFromVoltage()
190 190 if pairsList == None:
191 191 self.dataOut.pairsList = [pair for pair in itertools.combinations(self.dataOut.channelList, 2)]
192 192 else:
193 193 self.dataOut.pairsList = pairsList
194 194 self.__getFft()
195 195 self.dataOut.flagNoData = False
196 196 self.firstdatatime = None
197 197 self.profIndex = 0
198 198 else:
199 199 raise ValueError("The type of input object '%s' is not valid".format(
200 200 self.dataIn.type))
201 201
202 202 def __selectPairs(self, pairsList):
203 203
204 204 if not pairsList:
205 205 return
206 206
207 207 pairs = []
208 208 pairsIndex = []
209 209
210 210 for pair in pairsList:
211 211 if pair[0] not in self.dataOut.channelList or pair[1] not in self.dataOut.channelList:
212 212 continue
213 213 pairs.append(pair)
214 214 pairsIndex.append(pairs.index(pair))
215 215
216 216 self.dataOut.data_cspc = self.dataOut.data_cspc[pairsIndex]
217 217 self.dataOut.pairsList = pairs
218 218
219 219 return
220 220
221 221 def selectFFTs(self, minFFT, maxFFT ):
222 222 """
223 223 Selecciona un bloque de datos en base a un grupo de valores de puntos FFTs segun el rango
224 224 minFFT<= FFT <= maxFFT
225 225 """
226 226
227 227 if (minFFT > maxFFT):
228 228 raise ValueError("Error selecting heights: Height range (%d,%d) is not valid" % (minFFT, maxFFT))
229 229
230 230 if (minFFT < self.dataOut.getFreqRange()[0]):
231 231 minFFT = self.dataOut.getFreqRange()[0]
232 232
233 233 if (maxFFT > self.dataOut.getFreqRange()[-1]):
234 234 maxFFT = self.dataOut.getFreqRange()[-1]
235 235
236 236 minIndex = 0
237 237 maxIndex = 0
238 238 FFTs = self.dataOut.getFreqRange()
239 239
240 240 inda = numpy.where(FFTs >= minFFT)
241 241 indb = numpy.where(FFTs <= maxFFT)
242 242
243 243 try:
244 244 minIndex = inda[0][0]
245 245 except:
246 246 minIndex = 0
247 247
248 248 try:
249 249 maxIndex = indb[0][-1]
250 250 except:
251 251 maxIndex = len(FFTs)
252 252
253 253 self.selectFFTsByIndex(minIndex, maxIndex)
254 254
255 255 return 1
256 256
257 257 def getBeaconSignal(self, tauindex=0, channelindex=0, hei_ref=None):
258 258 newheis = numpy.where(
259 259 self.dataOut.heightList > self.dataOut.radarControllerHeaderObj.Taus[tauindex])
260 260
261 261 if hei_ref != None:
262 262 newheis = numpy.where(self.dataOut.heightList > hei_ref)
263 263
264 264 minIndex = min(newheis[0])
265 265 maxIndex = max(newheis[0])
266 266 data_spc = self.dataOut.data_spc[:, :, minIndex:maxIndex + 1]
267 267 heightList = self.dataOut.heightList[minIndex:maxIndex + 1]
268 268
269 269 # determina indices
270 270 nheis = int(self.dataOut.radarControllerHeaderObj.txB /
271 271 (self.dataOut.heightList[1] - self.dataOut.heightList[0]))
272 272 avg_dB = 10 * \
273 273 numpy.log10(numpy.sum(data_spc[channelindex, :, :], axis=0))
274 274 beacon_dB = numpy.sort(avg_dB)[-nheis:]
275 275 beacon_heiIndexList = []
276 276 for val in avg_dB.tolist():
277 277 if val >= beacon_dB[0]:
278 278 beacon_heiIndexList.append(avg_dB.tolist().index(val))
279 279
280 280 #data_spc = data_spc[:,:,beacon_heiIndexList]
281 281 data_cspc = None
282 282 if self.dataOut.data_cspc is not None:
283 283 data_cspc = self.dataOut.data_cspc[:, :, minIndex:maxIndex + 1]
284 284 #data_cspc = data_cspc[:,:,beacon_heiIndexList]
285 285
286 286 data_dc = None
287 287 if self.dataOut.data_dc is not None:
288 288 data_dc = self.dataOut.data_dc[:, minIndex:maxIndex + 1]
289 289 #data_dc = data_dc[:,beacon_heiIndexList]
290 290
291 291 self.dataOut.data_spc = data_spc
292 292 self.dataOut.data_cspc = data_cspc
293 293 self.dataOut.data_dc = data_dc
294 294 self.dataOut.heightList = heightList
295 295 self.dataOut.beacon_heiIndexList = beacon_heiIndexList
296 296
297 297 return 1
298 298
299 299 def selectFFTsByIndex(self, minIndex, maxIndex):
300 300 """
301 301
302 302 """
303 303
304 304 if (minIndex < 0) or (minIndex > maxIndex):
305 305 raise ValueError("Error selecting heights: Index range (%d,%d) is not valid" % (minIndex, maxIndex))
306 306
307 307 if (maxIndex >= self.dataOut.nProfiles):
308 308 maxIndex = self.dataOut.nProfiles-1
309 309
310 310 #Spectra
311 311 data_spc = self.dataOut.data_spc[:,minIndex:maxIndex+1,:]
312 312
313 313 data_cspc = None
314 314 if self.dataOut.data_cspc is not None:
315 315 data_cspc = self.dataOut.data_cspc[:,minIndex:maxIndex+1,:]
316 316
317 317 data_dc = None
318 318 if self.dataOut.data_dc is not None:
319 319 data_dc = self.dataOut.data_dc[minIndex:maxIndex+1,:]
320 320
321 321 self.dataOut.data_spc = data_spc
322 322 self.dataOut.data_cspc = data_cspc
323 323 self.dataOut.data_dc = data_dc
324 324
325 325 self.dataOut.ippSeconds = self.dataOut.ippSeconds*(self.dataOut.nFFTPoints / numpy.shape(data_cspc)[1])
326 326 self.dataOut.nFFTPoints = numpy.shape(data_cspc)[1]
327 327 self.dataOut.profilesPerBlock = numpy.shape(data_cspc)[1]
328 328
329 329 return 1
330 330
331 331 def getNoise(self, minHei=None, maxHei=None, minVel=None, maxVel=None):
332 332 # validacion de rango
333 333 if minHei == None:
334 334 minHei = self.dataOut.heightList[0]
335 335
336 336 if maxHei == None:
337 337 maxHei = self.dataOut.heightList[-1]
338 338
339 339 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
340 340 print('minHei: %.2f is out of the heights range' % (minHei))
341 341 print('minHei is setting to %.2f' % (self.dataOut.heightList[0]))
342 342 minHei = self.dataOut.heightList[0]
343 343
344 344 if (maxHei > self.dataOut.heightList[-1]) or (maxHei < minHei):
345 345 print('maxHei: %.2f is out of the heights range' % (maxHei))
346 346 print('maxHei is setting to %.2f' % (self.dataOut.heightList[-1]))
347 347 maxHei = self.dataOut.heightList[-1]
348 348
349 349 # validacion de velocidades
350 350 velrange = self.dataOut.getVelRange(1)
351 351
352 352 if minVel == None:
353 353 minVel = velrange[0]
354 354
355 355 if maxVel == None:
356 356 maxVel = velrange[-1]
357 357
358 358 if (minVel < velrange[0]) or (minVel > maxVel):
359 359 print('minVel: %.2f is out of the velocity range' % (minVel))
360 360 print('minVel is setting to %.2f' % (velrange[0]))
361 361 minVel = velrange[0]
362 362
363 363 if (maxVel > velrange[-1]) or (maxVel < minVel):
364 364 print('maxVel: %.2f is out of the velocity range' % (maxVel))
365 365 print('maxVel is setting to %.2f' % (velrange[-1]))
366 366 maxVel = velrange[-1]
367 367
368 368 # seleccion de indices para rango
369 369 minIndex = 0
370 370 maxIndex = 0
371 371 heights = self.dataOut.heightList
372 372
373 373 inda = numpy.where(heights >= minHei)
374 374 indb = numpy.where(heights <= maxHei)
375 375
376 376 try:
377 377 minIndex = inda[0][0]
378 378 except:
379 379 minIndex = 0
380 380
381 381 try:
382 382 maxIndex = indb[0][-1]
383 383 except:
384 384 maxIndex = len(heights)
385 385
386 386 if (minIndex < 0) or (minIndex > maxIndex):
387 387 raise ValueError("some value in (%d,%d) is not valid" % (
388 388 minIndex, maxIndex))
389 389
390 390 if (maxIndex >= self.dataOut.nHeights):
391 391 maxIndex = self.dataOut.nHeights - 1
392 392
393 393 # seleccion de indices para velocidades
394 394 indminvel = numpy.where(velrange >= minVel)
395 395 indmaxvel = numpy.where(velrange <= maxVel)
396 396 try:
397 397 minIndexVel = indminvel[0][0]
398 398 except:
399 399 minIndexVel = 0
400 400
401 401 try:
402 402 maxIndexVel = indmaxvel[0][-1]
403 403 except:
404 404 maxIndexVel = len(velrange)
405 405
406 406 # seleccion del espectro
407 407 data_spc = self.dataOut.data_spc[:,
408 408 minIndexVel:maxIndexVel + 1, minIndex:maxIndex + 1]
409 409 # estimacion de ruido
410 410 noise = numpy.zeros(self.dataOut.nChannels)
411 411
412 412 for channel in range(self.dataOut.nChannels):
413 413 daux = data_spc[channel, :, :]
414 414 sortdata = numpy.sort(daux, axis=None)
415 415 noise[channel] = hildebrand_sekhon(sortdata, self.dataOut.nIncohInt)
416 416
417 417 self.dataOut.noise_estimation = noise.copy()
418 418
419 419 return 1
420 420
421 421 class removeDC(Operation):
422 422
423 423 def run(self, dataOut, mode=2):
424 424 self.dataOut = dataOut
425 425 jspectra = self.dataOut.data_spc
426 426 jcspectra = self.dataOut.data_cspc
427 427
428 428 num_chan = jspectra.shape[0]
429 429 num_hei = jspectra.shape[2]
430 430
431 431 if jcspectra is not None:
432 432 jcspectraExist = True
433 433 num_pairs = jcspectra.shape[0]
434 434 else:
435 435 jcspectraExist = False
436 436
437 437 freq_dc = int(jspectra.shape[1] / 2)
438 438 ind_vel = numpy.array([-2, -1, 1, 2]) + freq_dc
439 439 ind_vel = ind_vel.astype(int)
440 440
441 441 if ind_vel[0] < 0:
442 442 ind_vel[list(range(0, 1))] = ind_vel[list(range(0, 1))] + self.num_prof
443 443
444 444 if mode == 1:
445 445 jspectra[:, freq_dc, :] = (
446 446 jspectra[:, ind_vel[1], :] + jspectra[:, ind_vel[2], :]) / 2 # CORRECCION
447 447
448 448 if jcspectraExist:
449 449 jcspectra[:, freq_dc, :] = (
450 450 jcspectra[:, ind_vel[1], :] + jcspectra[:, ind_vel[2], :]) / 2
451 451
452 452 if mode == 2:
453 453
454 454 vel = numpy.array([-2, -1, 1, 2])
455 455 xx = numpy.zeros([4, 4])
456 456
457 457 for fil in range(4):
458 458 xx[fil, :] = vel[fil]**numpy.asarray(list(range(4)))
459 459
460 460 xx_inv = numpy.linalg.inv(xx)
461 461 xx_aux = xx_inv[0, :]
462 462
463 463 for ich in range(num_chan):
464 464 yy = jspectra[ich, ind_vel, :]
465 465 jspectra[ich, freq_dc, :] = numpy.dot(xx_aux, yy)
466 466
467 467 junkid = jspectra[ich, freq_dc, :] <= 0
468 468 cjunkid = sum(junkid)
469 469
470 470 if cjunkid.any():
471 471 jspectra[ich, freq_dc, junkid.nonzero()] = (
472 472 jspectra[ich, ind_vel[1], junkid] + jspectra[ich, ind_vel[2], junkid]) / 2
473 473
474 474 if jcspectraExist:
475 475 for ip in range(num_pairs):
476 476 yy = jcspectra[ip, ind_vel, :]
477 477 jcspectra[ip, freq_dc, :] = numpy.dot(xx_aux, yy)
478 478
479 479 self.dataOut.data_spc = jspectra
480 480 self.dataOut.data_cspc = jcspectra
481 481
482 482 return self.dataOut
483 483
484 484 # import matplotlib.pyplot as plt
485 485
486 486 def fit_func( x, a0, a1, a2): #, a3, a4, a5):
487 487 z = (x - a1) / a2
488 488 y = a0 * numpy.exp(-z**2 / a2) #+ a3 + a4 * x + a5 * x**2
489 489 return y
490 490
491 491
492 492 class CleanRayleigh(Operation):
493 493
494 494 def __init__(self):
495 495
496 496 Operation.__init__(self)
497 497 self.i=0
498 498 self.isConfig = False
499 499 self.__dataReady = False
500 500 self.__profIndex = 0
501 501 self.byTime = False
502 502 self.byProfiles = False
503 503
504 504 self.bloques = None
505 505 self.bloque0 = None
506 506
507 507 self.index = 0
508 508
509 509 self.buffer = 0
510 510 self.buffer2 = 0
511 511 self.buffer3 = 0
512 512
513 513
514 514 def setup(self,dataOut,min_hei,max_hei,n, timeInterval,factor_stdv):
515 515
516 516 self.nChannels = dataOut.nChannels
517 517 self.nProf = dataOut.nProfiles
518 518 self.nPairs = dataOut.data_cspc.shape[0]
519 519 self.pairsArray = numpy.array(dataOut.pairsList)
520 520 self.spectra = dataOut.data_spc
521 521 self.cspectra = dataOut.data_cspc
522 522 self.heights = dataOut.heightList #alturas totales
523 523 self.nHeights = len(self.heights)
524 524 self.min_hei = min_hei
525 525 self.max_hei = max_hei
526 526 if (self.min_hei == None):
527 527 self.min_hei = 0
528 528 if (self.max_hei == None):
529 529 self.max_hei = dataOut.heightList[-1]
530 530 self.hval = ((self.max_hei>=self.heights) & (self.heights >= self.min_hei)).nonzero()
531 531 self.heightsClean = self.heights[self.hval] #alturas filtradas
532 532 self.hval = self.hval[0] # forma (N,), an solo N elementos -> Indices de alturas
533 533 self.nHeightsClean = len(self.heightsClean)
534 534 self.channels = dataOut.channelList
535 535 self.nChan = len(self.channels)
536 536 self.nIncohInt = dataOut.nIncohInt
537 537 self.__initime = dataOut.utctime
538 538 self.maxAltInd = self.hval[-1]+1
539 539 self.minAltInd = self.hval[0]
540 540
541 541 self.crosspairs = dataOut.pairsList
542 542 self.nPairs = len(self.crosspairs)
543 543 self.normFactor = dataOut.normFactor
544 544 self.nFFTPoints = dataOut.nFFTPoints
545 545 self.ippSeconds = dataOut.ippSeconds
546 546 self.currentTime = self.__initime
547 547 self.pairsArray = numpy.array(dataOut.pairsList)
548 548 self.factor_stdv = factor_stdv
549 549 #print("CHANNELS: ",[x for x in self.channels])
550 550
551 551 if n != None :
552 552 self.byProfiles = True
553 553 self.nIntProfiles = n
554 554 else:
555 555 self.__integrationtime = timeInterval
556 556
557 557 self.__dataReady = False
558 558 self.isConfig = True
559 559
560 560
561 561
562 562 def run(self, dataOut,min_hei=None,max_hei=None, n=None, timeInterval=10,factor_stdv=2.5):
563 563 #print (dataOut.utctime)
564 564 if not self.isConfig :
565 565 #print("Setting config")
566 566 self.setup(dataOut, min_hei,max_hei,n,timeInterval,factor_stdv)
567 567 #print("Config Done")
568 568 tini=dataOut.utctime
569 569
570 570 if self.byProfiles:
571 571 if self.__profIndex == self.nIntProfiles:
572 572 self.__dataReady = True
573 573 else:
574 574 if (tini - self.__initime) >= self.__integrationtime:
575 575 #print(tini - self.__initime,self.__profIndex)
576 576 self.__dataReady = True
577 577 self.__initime = tini
578 578
579 579 #if (tini.tm_min % 2) == 0 and (tini.tm_sec < 5 and self.fint==0):
580 580
581 581 if self.__dataReady:
582 582 #print("Data ready",self.__profIndex)
583 583 self.__profIndex = 0
584 584 jspc = self.buffer
585 585 jcspc = self.buffer2
586 586 #jnoise = self.buffer3
587 587 self.buffer = dataOut.data_spc
588 588 self.buffer2 = dataOut.data_cspc
589 589 #self.buffer3 = dataOut.noise
590 590 self.currentTime = dataOut.utctime
591 591 if numpy.any(jspc) :
592 592 #print( jspc.shape, jcspc.shape)
593 593 jspc = numpy.reshape(jspc,(int(len(jspc)/self.nChannels),self.nChannels,self.nFFTPoints,self.nHeights))
594 594 jcspc= numpy.reshape(jcspc,(int(len(jcspc)/self.nPairs),self.nPairs,self.nFFTPoints,self.nHeights))
595 595 self.__dataReady = False
596 596 #print( jspc.shape, jcspc.shape)
597 597 dataOut.flagNoData = False
598 598 else:
599 599 dataOut.flagNoData = True
600 600 self.__dataReady = False
601 601 return dataOut
602 602 else:
603 603 #print( len(self.buffer))
604 604 if numpy.any(self.buffer):
605 605 self.buffer = numpy.concatenate((self.buffer,dataOut.data_spc), axis=0)
606 606 self.buffer2 = numpy.concatenate((self.buffer2,dataOut.data_cspc), axis=0)
607 607 self.buffer3 += dataOut.data_dc
608 608 else:
609 609 self.buffer = dataOut.data_spc
610 610 self.buffer2 = dataOut.data_cspc
611 611 self.buffer3 = dataOut.data_dc
612 612 #print self.index, self.fint
613 613 #print self.buffer2.shape
614 614 dataOut.flagNoData = True ## NOTE: ?? revisar LUEGO
615 615 self.__profIndex += 1
616 616 return dataOut ## NOTE: REV
617 617
618 618
619 619 #index = tini.tm_hour*12+tini.tm_min/5
620 620 '''REVISAR'''
621 621 # jspc = jspc/self.nFFTPoints/self.normFactor
622 622 # jcspc = jcspc/self.nFFTPoints/self.normFactor
623 623
624 624
625 625
626 626 tmp_spectra,tmp_cspectra = self.cleanRayleigh(dataOut,jspc,jcspc,self.factor_stdv)
627 627 dataOut.data_spc = tmp_spectra
628 628 dataOut.data_cspc = tmp_cspectra
629 629
630 630 #dataOut.data_spc,dataOut.data_cspc = self.cleanRayleigh(dataOut,jspc,jcspc,self.factor_stdv)
631 631
632 632 dataOut.data_dc = self.buffer3
633 633 dataOut.nIncohInt *= self.nIntProfiles
634 634 dataOut.utctime = self.currentTime #tiempo promediado
635 635 #print("Time: ",time.localtime(dataOut.utctime))
636 636 # dataOut.data_spc = sat_spectra
637 637 # dataOut.data_cspc = sat_cspectra
638 638 self.buffer = 0
639 639 self.buffer2 = 0
640 640 self.buffer3 = 0
641 641
642 642 return dataOut
643 643
644 644 def cleanRayleigh(self,dataOut,spectra,cspectra,factor_stdv):
645 645 #print("OP cleanRayleigh")
646 import matplotlib.pyplot as plt
646 #import matplotlib.pyplot as plt
647 647 #for k in range(149):
648 channelsProcssd = []
649 channelA_ok = False
650 rfunc = cspectra.copy() #self.bloques
648 #channelsProcssd = []
649 #channelA_ok = False
650 #rfunc = cspectra.copy() #self.bloques
651 rfunc = spectra.copy()
651 652 #rfunc = cspectra
652 653 #val_spc = spectra*0.0 #self.bloque0*0.0
653 654 #val_cspc = cspectra*0.0 #self.bloques*0.0
654 655 #in_sat_spectra = spectra.copy() #self.bloque0
655 656 #in_sat_cspectra = cspectra.copy() #self.bloques
656 657
657 658
658 659 ###ONLY FOR TEST:
659 660 raxs = math.ceil(math.sqrt(self.nPairs))
660 661 caxs = math.ceil(self.nPairs/raxs)
661 662 if self.nPairs <4:
662 663 raxs = 2
663 664 caxs = 2
664 665 #print(raxs, caxs)
665 666 fft_rev = 14 #nFFT to plot
666 667 hei_rev = ((self.heights >= 550) & (self.heights <= 551)).nonzero() #hei to plot
667 668 hei_rev = hei_rev[0]
668 669 #print(hei_rev)
669 670
670 671 #print numpy.absolute(rfunc[:,0,0,14])
671 672
672 673 gauss_fit, covariance = None, None
673 674 for ih in range(self.minAltInd,self.maxAltInd):
674 675 for ifreq in range(self.nFFTPoints):
676 '''
675 677 ###ONLY FOR TEST:
676 678 if ifreq ==fft_rev and ih==hei_rev: #TO VIEW A SIGNLE FREQUENCY
677 679 fig, axs = plt.subplots(raxs, caxs)
678 680 fig2, axs2 = plt.subplots(raxs, caxs)
679 681 col_ax = 0
680 682 row_ax = 0
683 '''
681 684 #print(self.nPairs)
682 for ii in range(self.nPairs): #PARES DE CANALES SELF y CROSS
683 if self.crosspairs[ii][1]-self.crosspairs[ii][0] > 1: # APLICAR SOLO EN PARES CONTIGUOS
684 continue
685 if not self.crosspairs[ii][0] in channelsProcssd:
686 channelA_ok = True
685 for ii in range(self.nChan): #PARES DE CANALES SELF y CROSS
686 # if self.crosspairs[ii][1]-self.crosspairs[ii][0] > 1: # APLICAR SOLO EN PARES CONTIGUOS
687 # continue
688 # if not self.crosspairs[ii][0] in channelsProcssd:
689 # channelA_ok = True
687 690 #print("pair: ",self.crosspairs[ii])
688 if (col_ax%caxs==0 and col_ax!=0 and self.nPairs !=1): ###ONLY FOR TEST:
691 '''
692 ###ONLY FOR TEST:
693 if (col_ax%caxs==0 and col_ax!=0 and self.nPairs !=1):
689 694 col_ax = 0
690 695 row_ax += 1
696 '''
691 697 func2clean = 10*numpy.log10(numpy.absolute(rfunc[:,ii,ifreq,ih])) #Potencia?
692 698 #print(func2clean.shape)
693 699 val = (numpy.isfinite(func2clean)==True).nonzero()
694 700
695 701 if len(val)>0: #limitador
696 702 min_val = numpy.around(numpy.amin(func2clean)-2) #> (-40)
697 703 if min_val <= -40 :
698 704 min_val = -40
699 705 max_val = numpy.around(numpy.amax(func2clean)+2) #< 200
700 706 if max_val >= 200 :
701 707 max_val = 200
702 708 #print min_val, max_val
703 709 step = 1
704 710 #print("Getting bins and the histogram")
705 711 x_dist = min_val + numpy.arange(1 + ((max_val-(min_val))/step))*step
706 712 y_dist,binstep = numpy.histogram(func2clean,bins=range(int(min_val),int(max_val+2),step))
707 713 #print(len(y_dist),len(binstep[:-1]))
708 714 #print(row_ax,col_ax, " ..")
709 715 #print(self.pairsArray[ii][0],self.pairsArray[ii][1])
710 716 mean = numpy.sum(x_dist * y_dist) / numpy.sum(y_dist)
711 717 sigma = numpy.sqrt(numpy.sum(y_dist * (x_dist - mean)**2) / numpy.sum(y_dist))
712 718 parg = [numpy.amax(y_dist),mean,sigma]
713 719
714 720 newY = None
715 721
716 722 try :
717 723 gauss_fit, covariance = curve_fit(fit_func, x_dist, y_dist,p0=parg)
718 724 mode = gauss_fit[1]
719 725 stdv = gauss_fit[2]
720 726 #print(" FIT OK",gauss_fit)
721
727 '''
722 728 ###ONLY FOR TEST:
723 729 if ifreq ==fft_rev and ih==hei_rev: #TO VIEW A SIGNLE FREQUENCY
724 730 newY = fit_func(x_dist,gauss_fit[0],gauss_fit[1],gauss_fit[2])
725 731 axs[row_ax,col_ax].plot(binstep[:-1],y_dist,color='green')
726 732 axs[row_ax,col_ax].plot(binstep[:-1],newY,color='red')
727 axs[row_ax,col_ax].set_title("Pair "+str(self.crosspairs[ii]))
728
733 axs[row_ax,col_ax].set_title("CH "+str(self.channels[ii]))
734 '''
729 735 except:
730 736 mode = mean
731 737 stdv = sigma
732 738 #print("FIT FAIL")
733 continue
739 #continue
734 740
735 741
736 742 #print(mode,stdv)
737 743 #Removing echoes greater than mode + std_factor*stdv
738 744 noval = (abs(func2clean - mode)>=(factor_stdv*stdv)).nonzero()
739 745 #noval tiene los indices que se van a remover
740 #print("Pair ",ii," novals: ",len(noval[0]))
746 #print("Chan ",ii," novals: ",len(noval[0]))
741 747 if len(noval[0]) > 0: #forma de array (N,) es igual a longitud (N)
742 748 novall = ((func2clean - mode) >= (factor_stdv*stdv)).nonzero()
743 749 #print(novall)
744 750 #print(" ",self.pairsArray[ii])
745 cross_pairs = self.pairsArray[ii]
751 #cross_pairs = self.pairsArray[ii]
746 752 #Getting coherent echoes which are removed.
747 753 # if len(novall[0]) > 0:
748 754 #
749 755 # val_spc[novall[0],cross_pairs[0],ifreq,ih] = 1
750 756 # val_spc[novall[0],cross_pairs[1],ifreq,ih] = 1
751 757 # val_cspc[novall[0],ii,ifreq,ih] = 1
752 758 #print("OUT NOVALL 1")
753
754 new_a = numpy.delete(cspectra[:,ii,ifreq,ih], noval[0])
755 cspectra[noval,ii,ifreq,ih] = numpy.mean(new_a) #mean CrossSpectra
756
757 if channelA_ok:
758 chA = self.channels.index(cross_pairs[0])
759 new_b = numpy.delete(spectra[:,chA,ifreq,ih], noval[0])
760 spectra[noval,chA,ifreq,ih] = numpy.mean(new_b) #mean Spectra Pair A
761 channelA_ok = False
762 chB = self.channels.index(cross_pairs[1])
763 new_c = numpy.delete(spectra[:,chB,ifreq,ih], noval[0])
764 spectra[noval,chB,ifreq,ih] = numpy.mean(new_c) #mean Spectra Pair B
765
766 channelsProcssd.append(self.crosspairs[ii][0]) # save channel A
767 channelsProcssd.append(self.crosspairs[ii][1]) # save channel B
768
759 try:
760 pair = (self.channels[ii],self.channels[ii + 1])
761 except:
762 pair = (99,99)
763 #print("par ", pair)
764 if ( pair in self.crosspairs):
765 q = self.crosspairs.index(pair)
766 #print("está aqui: ", q, (ii,ii + 1))
767 new_a = numpy.delete(cspectra[:,q,ifreq,ih], noval[0])
768 cspectra[noval,q,ifreq,ih] = numpy.mean(new_a) #mean CrossSpectra
769
770 #if channelA_ok:
771 #chA = self.channels.index(cross_pairs[0])
772 new_b = numpy.delete(spectra[:,ii,ifreq,ih], noval[0])
773 spectra[noval,ii,ifreq,ih] = numpy.mean(new_b) #mean Spectra Pair A
774 #channelA_ok = False
775
776 # chB = self.channels.index(cross_pairs[1])
777 # new_c = numpy.delete(spectra[:,chB,ifreq,ih], noval[0])
778 # spectra[noval,chB,ifreq,ih] = numpy.mean(new_c) #mean Spectra Pair B
779 #
780 # channelsProcssd.append(self.crosspairs[ii][0]) # save channel A
781 # channelsProcssd.append(self.crosspairs[ii][1]) # save channel B
782 '''
769 783 ###ONLY FOR TEST:
770 784 if ifreq ==fft_rev and ih==hei_rev: #TO VIEW A SIGNLE FREQUENCY
771 func2clean = 10*numpy.log10(numpy.absolute(cspectra[:,ii,ifreq,ih]))
785 func2clean = 10*numpy.log10(numpy.absolute(spectra[:,ii,ifreq,ih]))
772 786 y_dist,binstep = numpy.histogram(func2clean,bins=range(int(min_val),int(max_val+2),step))
773 787 axs2[row_ax,col_ax].plot(binstep[:-1],newY,color='red')
774 788 axs2[row_ax,col_ax].plot(binstep[:-1],y_dist,color='green')
775 axs2[row_ax,col_ax].set_title("Pair "+str(self.crosspairs[ii]))
776
789 axs2[row_ax,col_ax].set_title("CH "+str(self.channels[ii]))
790 '''
791 '''
777 792 ###ONLY FOR TEST:
778 793 col_ax += 1 #contador de ploteo columnas
779 794 ##print(col_ax)
780 795 ###ONLY FOR TEST:
781 796 if ifreq ==fft_rev and ih==hei_rev: #TO VIEW A SIGNLE FREQUENCY
782 797 title = str(dataOut.datatime)+" nFFT: "+str(ifreq)+" Alt: "+str(self.heights[ih])+ " km"
783 798 title2 = str(dataOut.datatime)+" nFFT: "+str(ifreq)+" Alt: "+str(self.heights[ih])+ " km CLEANED"
784 799 fig.suptitle(title)
785 800 fig2.suptitle(title2)
786 801 plt.show()
787
788
789 '''
790
791 channels = channels
792 cross_pairs = cross_pairs
793 #print("OUT NOVALL 2")
794
795 vcross0 = (cross_pairs[0] == channels[ii]).nonzero()
796 vcross1 = (cross_pairs[1] == channels[ii]).nonzero()
797 vcross = numpy.concatenate((vcross0,vcross1),axis=None)
798 #print('vcros =', vcross)
799
800 #Getting coherent echoes which are removed.
801 if len(novall) > 0:
802 #val_spc[novall,ii,ifreq,ih] = 1
803 val_spc[ii,ifreq,ih,novall] = 1
804 if len(vcross) > 0:
805 val_cspc[vcross,ifreq,ih,novall] = 1
806
807 #Removing coherent from ISR data.
808 self.bloque0[ii,ifreq,ih,noval] = numpy.nan
809 if len(vcross) > 0:
810 self.bloques[vcross,ifreq,ih,noval] = numpy.nan
811 802 '''
803 ##################################################################################################
812 804
813 805 #print("Getting average of the spectra and cross-spectra from incoherent echoes.")
814 806 out_spectra = numpy.zeros([self.nChan,self.nFFTPoints,self.nHeights], dtype=float) #+numpy.nan
815 807 out_cspectra = numpy.zeros([self.nPairs,self.nFFTPoints,self.nHeights], dtype=complex) #+numpy.nan
816 808 for ih in range(self.nHeights):
817 809 for ifreq in range(self.nFFTPoints):
818 810 for ich in range(self.nChan):
819 811 tmp = spectra[:,ich,ifreq,ih]
820 812 valid = (numpy.isfinite(tmp[:])==True).nonzero()
821 813
822 814 if len(valid[0]) >0 :
823 815 out_spectra[ich,ifreq,ih] = numpy.nansum(tmp)#/len(valid[0])
824 816
825 817 for icr in range(self.nPairs):
826 818 tmp = numpy.squeeze(cspectra[:,icr,ifreq,ih])
827 819 valid = (numpy.isfinite(tmp)==True).nonzero()
828 820 if len(valid[0]) > 0:
829 821 out_cspectra[icr,ifreq,ih] = numpy.nansum(tmp)#/len(valid[0])
830 '''
831 # print('##########################################################')
832 print("Removing fake coherent echoes (at least 4 points around the point)")
833
834 val_spectra = numpy.sum(val_spc,0)
835 val_cspectra = numpy.sum(val_cspc,0)
836
837 val_spectra = self.REM_ISOLATED_POINTS(val_spectra,4)
838 val_cspectra = self.REM_ISOLATED_POINTS(val_cspectra,4)
839
840 for i in range(nChan):
841 for j in range(nProf):
842 for k in range(nHeights):
843 if numpy.isfinite(val_spectra[i,j,k]) and val_spectra[i,j,k] < 1 :
844 val_spc[:,i,j,k] = 0.0
845 for i in range(nPairs):
846 for j in range(nProf):
847 for k in range(nHeights):
848 if numpy.isfinite(val_cspectra[i,j,k]) and val_cspectra[i,j,k] < 1 :
849 val_cspc[:,i,j,k] = 0.0
850
851 # val_spc = numpy.reshape(val_spc, (len(spectra[:,0,0,0]),nProf*nHeights*nChan))
852 # if numpy.isfinite(val_spectra)==str(True):
853 # noval = (val_spectra<1).nonzero()
854 # if len(noval) > 0:
855 # val_spc[:,noval] = 0.0
856 # val_spc = numpy.reshape(val_spc, (149,nChan,nProf,nHeights))
857
858 #val_cspc = numpy.reshape(val_spc, (149,nChan*nHeights*nProf))
859 #if numpy.isfinite(val_cspectra)==str(True):
860 # noval = (val_cspectra<1).nonzero()
861 # if len(noval) > 0:
862 # val_cspc[:,noval] = 0.0
863 # val_cspc = numpy.reshape(val_cspc, (149,nChan,nProf,nHeights))
864 tmp_sat_spectra = spectra.copy()
865 tmp_sat_spectra = tmp_sat_spectra*numpy.nan
866 tmp_sat_cspectra = cspectra.copy()
867 tmp_sat_cspectra = tmp_sat_cspectra*numpy.nan
868 '''
869 # fig = plt.figure(figsize=(6,5))
870 # left, bottom, width, height = 0.1, 0.1, 0.8, 0.8
871 # ax = fig.add_axes([left, bottom, width, height])
872 # cp = ax.contour(10*numpy.log10(numpy.absolute(spectra[0,0,:,:])))
873 # ax.clabel(cp, inline=True,fontsize=10)
874 # plt.show()
875 '''
876 val = (val_spc > 0).nonzero()
877 if len(val[0]) > 0:
878 tmp_sat_spectra[val] = in_sat_spectra[val]
879 val = (val_cspc > 0).nonzero()
880 if len(val[0]) > 0:
881 tmp_sat_cspectra[val] = in_sat_cspectra[val]
882
883 print("Getting average of the spectra and cross-spectra from incoherent echoes 2")
884 sat_spectra = numpy.zeros((nChan,nProf,nHeights), dtype=float)
885 sat_cspectra = numpy.zeros((nPairs,nProf,nHeights), dtype=complex)
886 for ih in range(nHeights):
887 for ifreq in range(nProf):
888 for ich in range(nChan):
889 tmp = numpy.squeeze(tmp_sat_spectra[:,ich,ifreq,ih])
890 valid = (numpy.isfinite(tmp)).nonzero()
891 if len(valid[0]) > 0:
892 sat_spectra[ich,ifreq,ih] = numpy.nansum(tmp)/len(valid[0])
893 822
894 for icr in range(nPairs):
895 tmp = numpy.squeeze(tmp_sat_cspectra[:,icr,ifreq,ih])
896 valid = (numpy.isfinite(tmp)).nonzero()
897 if len(valid[0]) > 0:
898 sat_cspectra[icr,ifreq,ih] = numpy.nansum(tmp)/len(valid[0])
899 '''
900 #self.__dataReady= True
901 #sat_spectra, sat_cspectra= sat_spectra, sat_cspectra
902 #if not self.__dataReady:
903 #return None, None
904 #return out_spectra, out_cspectra ,sat_spectra,sat_cspectra
905 823 return out_spectra, out_cspectra
906 824
907 825 def REM_ISOLATED_POINTS(self,array,rth):
908 826 # import matplotlib.pyplot as plt
909 827 if rth == None :
910 828 rth = 4
911 print("REM ISO")
829 #print("REM ISO")
912 830 num_prof = len(array[0,:,0])
913 831 num_hei = len(array[0,0,:])
914 832 n2d = len(array[:,0,0])
915 833
916 834 for ii in range(n2d) :
917 835 #print ii,n2d
918 836 tmp = array[ii,:,:]
919 837 #print tmp.shape, array[ii,101,:],array[ii,102,:]
920 838
921 839 # fig = plt.figure(figsize=(6,5))
922 840 # left, bottom, width, height = 0.1, 0.1, 0.8, 0.8
923 841 # ax = fig.add_axes([left, bottom, width, height])
924 842 # x = range(num_prof)
925 843 # y = range(num_hei)
926 844 # cp = ax.contour(y,x,tmp)
927 845 # ax.clabel(cp, inline=True,fontsize=10)
928 846 # plt.show()
929 847
930 848 #indxs = WHERE(FINITE(tmp) AND tmp GT 0,cindxs)
931 849 tmp = numpy.reshape(tmp,num_prof*num_hei)
932 850 indxs1 = (numpy.isfinite(tmp)==True).nonzero()
933 851 indxs2 = (tmp > 0).nonzero()
934 852
935 853 indxs1 = (indxs1[0])
936 854 indxs2 = indxs2[0]
937 855 #indxs1 = numpy.array(indxs1[0])
938 856 #indxs2 = numpy.array(indxs2[0])
939 857 indxs = None
940 858 #print indxs1 , indxs2
941 859 for iv in range(len(indxs2)):
942 860 indv = numpy.array((indxs1 == indxs2[iv]).nonzero())
943 861 #print len(indxs2), indv
944 862 if len(indv[0]) > 0 :
945 863 indxs = numpy.concatenate((indxs,indxs2[iv]), axis=None)
946 864 # print indxs
947 865 indxs = indxs[1:]
948 866 #print(indxs, len(indxs))
949 867 if len(indxs) < 4 :
950 868 array[ii,:,:] = 0.
951 869 return
952 870
953 871 xpos = numpy.mod(indxs ,num_hei)
954 872 ypos = (indxs / num_hei)
955 873 sx = numpy.argsort(xpos) # Ordering respect to "x" (time)
956 874 #print sx
957 875 xpos = xpos[sx]
958 876 ypos = ypos[sx]
959 877
960 878 # *********************************** Cleaning isolated points **********************************
961 879 ic = 0
962 880 while True :
963 881 r = numpy.sqrt(list(numpy.power((xpos[ic]-xpos),2)+ numpy.power((ypos[ic]-ypos),2)))
964 882 #no_coh = WHERE(FINITE(r) AND (r LE rth),cno_coh)
965 883 #plt.plot(r)
966 884 #plt.show()
967 885 no_coh1 = (numpy.isfinite(r)==True).nonzero()
968 886 no_coh2 = (r <= rth).nonzero()
969 887 #print r, no_coh1, no_coh2
970 888 no_coh1 = numpy.array(no_coh1[0])
971 889 no_coh2 = numpy.array(no_coh2[0])
972 890 no_coh = None
973 891 #print valid1 , valid2
974 892 for iv in range(len(no_coh2)):
975 893 indv = numpy.array((no_coh1 == no_coh2[iv]).nonzero())
976 894 if len(indv[0]) > 0 :
977 895 no_coh = numpy.concatenate((no_coh,no_coh2[iv]), axis=None)
978 896 no_coh = no_coh[1:]
979 897 #print len(no_coh), no_coh
980 898 if len(no_coh) < 4 :
981 899 #print xpos[ic], ypos[ic], ic
982 900 # plt.plot(r)
983 901 # plt.show()
984 902 xpos[ic] = numpy.nan
985 903 ypos[ic] = numpy.nan
986 904
987 905 ic = ic + 1
988 906 if (ic == len(indxs)) :
989 907 break
990 908 #print( xpos, ypos)
991 909
992 910 indxs = (numpy.isfinite(list(xpos))==True).nonzero()
993 911 #print indxs[0]
994 912 if len(indxs[0]) < 4 :
995 913 array[ii,:,:] = 0.
996 914 return
997 915
998 916 xpos = xpos[indxs[0]]
999 917 ypos = ypos[indxs[0]]
1000 918 for i in range(0,len(ypos)):
1001 919 ypos[i]=int(ypos[i])
1002 920 junk = tmp
1003 921 tmp = junk*0.0
1004 922
1005 923 tmp[list(xpos + (ypos*num_hei))] = junk[list(xpos + (ypos*num_hei))]
1006 924 array[ii,:,:] = numpy.reshape(tmp,(num_prof,num_hei))
1007 925
1008 926 #print array.shape
1009 927 #tmp = numpy.reshape(tmp,(num_prof,num_hei))
1010 928 #print tmp.shape
1011 929
1012 930 # fig = plt.figure(figsize=(6,5))
1013 931 # left, bottom, width, height = 0.1, 0.1, 0.8, 0.8
1014 932 # ax = fig.add_axes([left, bottom, width, height])
1015 933 # x = range(num_prof)
1016 934 # y = range(num_hei)
1017 935 # cp = ax.contour(y,x,array[ii,:,:])
1018 936 # ax.clabel(cp, inline=True,fontsize=10)
1019 937 # plt.show()
1020 938 return array
1021 939
1022 940 class removeInterference(Operation):
1023 941
1024 942 def removeInterference2(self):
1025 943
1026 944 cspc = self.dataOut.data_cspc
1027 945 spc = self.dataOut.data_spc
1028 946 Heights = numpy.arange(cspc.shape[2])
1029 947 realCspc = numpy.abs(cspc)
1030 948
1031 949 for i in range(cspc.shape[0]):
1032 950 LinePower= numpy.sum(realCspc[i], axis=0)
1033 951 Threshold = numpy.amax(LinePower)-numpy.sort(LinePower)[len(Heights)-int(len(Heights)*0.1)]
1034 952 SelectedHeights = Heights[ numpy.where( LinePower < Threshold ) ]
1035 953 InterferenceSum = numpy.sum( realCspc[i,:,SelectedHeights], axis=0 )
1036 954 InterferenceThresholdMin = numpy.sort(InterferenceSum)[int(len(InterferenceSum)*0.98)]
1037 955 InterferenceThresholdMax = numpy.sort(InterferenceSum)[int(len(InterferenceSum)*0.99)]
1038 956
1039 957
1040 958 InterferenceRange = numpy.where( ([InterferenceSum > InterferenceThresholdMin]))# , InterferenceSum < InterferenceThresholdMax]) )
1041 959 #InterferenceRange = numpy.where( ([InterferenceRange < InterferenceThresholdMax]))
1042 960 if len(InterferenceRange)<int(cspc.shape[1]*0.3):
1043 961 cspc[i,InterferenceRange,:] = numpy.NaN
1044 962
1045 963 self.dataOut.data_cspc = cspc
1046 964
1047 965 def removeInterference(self, interf = 2, hei_interf = None, nhei_interf = None, offhei_interf = None):
1048 966
1049 967 jspectra = self.dataOut.data_spc
1050 968 jcspectra = self.dataOut.data_cspc
1051 969 jnoise = self.dataOut.getNoise()
1052 970 num_incoh = self.dataOut.nIncohInt
1053 971
1054 972 num_channel = jspectra.shape[0]
1055 973 num_prof = jspectra.shape[1]
1056 974 num_hei = jspectra.shape[2]
1057 975
1058 976 # hei_interf
1059 977 if hei_interf is None:
1060 978 count_hei = int(num_hei / 2)
1061 979 hei_interf = numpy.asmatrix(list(range(count_hei))) + num_hei - count_hei
1062 980 hei_interf = numpy.asarray(hei_interf)[0]
1063 981 # nhei_interf
1064 982 if (nhei_interf == None):
1065 983 nhei_interf = 5
1066 984 if (nhei_interf < 1):
1067 985 nhei_interf = 1
1068 986 if (nhei_interf > count_hei):
1069 987 nhei_interf = count_hei
1070 988 if (offhei_interf == None):
1071 989 offhei_interf = 0
1072 990
1073 991 ind_hei = list(range(num_hei))
1074 992 # mask_prof = numpy.asarray(range(num_prof - 2)) + 1
1075 993 # mask_prof[range(num_prof/2 - 1,len(mask_prof))] += 1
1076 994 mask_prof = numpy.asarray(list(range(num_prof)))
1077 995 num_mask_prof = mask_prof.size
1078 996 comp_mask_prof = [0, num_prof / 2]
1079 997
1080 998 # noise_exist: Determina si la variable jnoise ha sido definida y contiene la informacion del ruido de cada canal
1081 999 if (jnoise.size < num_channel or numpy.isnan(jnoise).any()):
1082 1000 jnoise = numpy.nan
1083 1001 noise_exist = jnoise[0] < numpy.Inf
1084 1002
1085 1003 # Subrutina de Remocion de la Interferencia
1086 1004 for ich in range(num_channel):
1087 1005 # Se ordena los espectros segun su potencia (menor a mayor)
1088 1006 power = jspectra[ich, mask_prof, :]
1089 1007 power = power[:, hei_interf]
1090 1008 power = power.sum(axis=0)
1091 1009 psort = power.ravel().argsort()
1092 1010
1093 1011 # Se estima la interferencia promedio en los Espectros de Potencia empleando
1094 1012 junkspc_interf = jspectra[ich, :, hei_interf[psort[list(range(
1095 1013 offhei_interf, nhei_interf + offhei_interf))]]]
1096 1014
1097 1015 if noise_exist:
1098 1016 # tmp_noise = jnoise[ich] / num_prof
1099 1017 tmp_noise = jnoise[ich]
1100 1018 junkspc_interf = junkspc_interf - tmp_noise
1101 1019 #junkspc_interf[:,comp_mask_prof] = 0
1102 1020
1103 1021 jspc_interf = junkspc_interf.sum(axis=0) / nhei_interf
1104 1022 jspc_interf = jspc_interf.transpose()
1105 1023 # Calculando el espectro de interferencia promedio
1106 1024 noiseid = numpy.where(
1107 1025 jspc_interf <= tmp_noise / numpy.sqrt(num_incoh))
1108 1026 noiseid = noiseid[0]
1109 1027 cnoiseid = noiseid.size
1110 1028 interfid = numpy.where(
1111 1029 jspc_interf > tmp_noise / numpy.sqrt(num_incoh))
1112 1030 interfid = interfid[0]
1113 1031 cinterfid = interfid.size
1114 1032
1115 1033 if (cnoiseid > 0):
1116 1034 jspc_interf[noiseid] = 0
1117 1035
1118 1036 # Expandiendo los perfiles a limpiar
1119 1037 if (cinterfid > 0):
1120 1038 new_interfid = (
1121 1039 numpy.r_[interfid - 1, interfid, interfid + 1] + num_prof) % num_prof
1122 1040 new_interfid = numpy.asarray(new_interfid)
1123 1041 new_interfid = {x for x in new_interfid}
1124 1042 new_interfid = numpy.array(list(new_interfid))
1125 1043 new_cinterfid = new_interfid.size
1126 1044 else:
1127 1045 new_cinterfid = 0
1128 1046
1129 1047 for ip in range(new_cinterfid):
1130 1048 ind = junkspc_interf[:, new_interfid[ip]].ravel().argsort()
1131 1049 jspc_interf[new_interfid[ip]
1132 1050 ] = junkspc_interf[ind[nhei_interf // 2], new_interfid[ip]]
1133 1051
1134 1052 jspectra[ich, :, ind_hei] = jspectra[ich, :,
1135 1053 ind_hei] - jspc_interf # Corregir indices
1136 1054
1137 1055 # Removiendo la interferencia del punto de mayor interferencia
1138 1056 ListAux = jspc_interf[mask_prof].tolist()
1139 1057 maxid = ListAux.index(max(ListAux))
1140 1058
1141 1059 if cinterfid > 0:
1142 1060 for ip in range(cinterfid * (interf == 2) - 1):
1143 1061 ind = (jspectra[ich, interfid[ip], :] < tmp_noise *
1144 1062 (1 + 1 / numpy.sqrt(num_incoh))).nonzero()
1145 1063 cind = len(ind)
1146 1064
1147 1065 if (cind > 0):
1148 1066 jspectra[ich, interfid[ip], ind] = tmp_noise * \
1149 1067 (1 + (numpy.random.uniform(cind) - 0.5) /
1150 1068 numpy.sqrt(num_incoh))
1151 1069
1152 1070 ind = numpy.array([-2, -1, 1, 2])
1153 1071 xx = numpy.zeros([4, 4])
1154 1072
1155 1073 for id1 in range(4):
1156 1074 xx[:, id1] = ind[id1]**numpy.asarray(list(range(4)))
1157 1075
1158 1076 xx_inv = numpy.linalg.inv(xx)
1159 1077 xx = xx_inv[:, 0]
1160 1078 ind = (ind + maxid + num_mask_prof) % num_mask_prof
1161 1079 yy = jspectra[ich, mask_prof[ind], :]
1162 1080 jspectra[ich, mask_prof[maxid], :] = numpy.dot(
1163 1081 yy.transpose(), xx)
1164 1082
1165 1083 indAux = (jspectra[ich, :, :] < tmp_noise *
1166 1084 (1 - 1 / numpy.sqrt(num_incoh))).nonzero()
1167 1085 jspectra[ich, indAux[0], indAux[1]] = tmp_noise * \
1168 1086 (1 - 1 / numpy.sqrt(num_incoh))
1169 1087
1170 1088 # Remocion de Interferencia en el Cross Spectra
1171 1089 if jcspectra is None:
1172 1090 return jspectra, jcspectra
1173 1091 num_pairs = int(jcspectra.size / (num_prof * num_hei))
1174 1092 jcspectra = jcspectra.reshape(num_pairs, num_prof, num_hei)
1175 1093
1176 1094 for ip in range(num_pairs):
1177 1095
1178 1096 #-------------------------------------------
1179 1097
1180 1098 cspower = numpy.abs(jcspectra[ip, mask_prof, :])
1181 1099 cspower = cspower[:, hei_interf]
1182 1100 cspower = cspower.sum(axis=0)
1183 1101
1184 1102 cspsort = cspower.ravel().argsort()
1185 1103 junkcspc_interf = jcspectra[ip, :, hei_interf[cspsort[list(range(
1186 1104 offhei_interf, nhei_interf + offhei_interf))]]]
1187 1105 junkcspc_interf = junkcspc_interf.transpose()
1188 1106 jcspc_interf = junkcspc_interf.sum(axis=1) / nhei_interf
1189 1107
1190 1108 ind = numpy.abs(jcspc_interf[mask_prof]).ravel().argsort()
1191 1109
1192 1110 median_real = int(numpy.median(numpy.real(
1193 1111 junkcspc_interf[mask_prof[ind[list(range(3 * num_prof // 4))]], :])))
1194 1112 median_imag = int(numpy.median(numpy.imag(
1195 1113 junkcspc_interf[mask_prof[ind[list(range(3 * num_prof // 4))]], :])))
1196 1114 comp_mask_prof = [int(e) for e in comp_mask_prof]
1197 1115 junkcspc_interf[comp_mask_prof, :] = numpy.complex(
1198 1116 median_real, median_imag)
1199 1117
1200 1118 for iprof in range(num_prof):
1201 1119 ind = numpy.abs(junkcspc_interf[iprof, :]).ravel().argsort()
1202 1120 jcspc_interf[iprof] = junkcspc_interf[iprof, ind[nhei_interf // 2]]
1203 1121
1204 1122 # Removiendo la Interferencia
1205 1123 jcspectra[ip, :, ind_hei] = jcspectra[ip,
1206 1124 :, ind_hei] - jcspc_interf
1207 1125
1208 1126 ListAux = numpy.abs(jcspc_interf[mask_prof]).tolist()
1209 1127 maxid = ListAux.index(max(ListAux))
1210 1128
1211 1129 ind = numpy.array([-2, -1, 1, 2])
1212 1130 xx = numpy.zeros([4, 4])
1213 1131
1214 1132 for id1 in range(4):
1215 1133 xx[:, id1] = ind[id1]**numpy.asarray(list(range(4)))
1216 1134
1217 1135 xx_inv = numpy.linalg.inv(xx)
1218 1136 xx = xx_inv[:, 0]
1219 1137
1220 1138 ind = (ind + maxid + num_mask_prof) % num_mask_prof
1221 1139 yy = jcspectra[ip, mask_prof[ind], :]
1222 1140 jcspectra[ip, mask_prof[maxid], :] = numpy.dot(yy.transpose(), xx)
1223 1141
1224 1142 # Guardar Resultados
1225 1143 self.dataOut.data_spc = jspectra
1226 1144 self.dataOut.data_cspc = jcspectra
1227 1145
1228 1146 return 1
1229 1147
1230 1148 def run(self, dataOut, interf = 2,hei_interf = None, nhei_interf = None, offhei_interf = None, mode=1):
1231 1149
1232 1150 self.dataOut = dataOut
1233 1151
1234 1152 if mode == 1:
1235 1153 self.removeInterference(interf = 2,hei_interf = None, nhei_interf = None, offhei_interf = None)
1236 1154 elif mode == 2:
1237 1155 self.removeInterference2()
1238 1156
1239 1157 return self.dataOut
1240 1158
1241 1159
1242 1160 class IncohInt(Operation):
1243 1161
1244 1162 __profIndex = 0
1245 1163 __withOverapping = False
1246 1164
1247 1165 __byTime = False
1248 1166 __initime = None
1249 1167 __lastdatatime = None
1250 1168 __integrationtime = None
1251 1169
1252 1170 __buffer_spc = None
1253 1171 __buffer_cspc = None
1254 1172 __buffer_dc = None
1255 1173
1256 1174 __dataReady = False
1257 1175
1258 1176 __timeInterval = None
1259 1177
1260 1178 n = None
1261 1179
1262 1180 def __init__(self):
1263 1181
1264 1182 Operation.__init__(self)
1265 1183
1266 1184 def setup(self, n=None, timeInterval=None, overlapping=False):
1267 1185 """
1268 1186 Set the parameters of the integration class.
1269 1187
1270 1188 Inputs:
1271 1189
1272 1190 n : Number of coherent integrations
1273 1191 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
1274 1192 overlapping :
1275 1193
1276 1194 """
1277 1195
1278 1196 self.__initime = None
1279 1197 self.__lastdatatime = 0
1280 1198
1281 1199 self.__buffer_spc = 0
1282 1200 self.__buffer_cspc = 0
1283 1201 self.__buffer_dc = 0
1284 1202
1285 1203 self.__profIndex = 0
1286 1204 self.__dataReady = False
1287 1205 self.__byTime = False
1288 1206
1289 1207 if n is None and timeInterval is None:
1290 1208 raise ValueError("n or timeInterval should be specified ...")
1291 1209
1292 1210 if n is not None:
1293 1211 self.n = int(n)
1294 1212 else:
1295 1213
1296 1214 self.__integrationtime = int(timeInterval)
1297 1215 self.n = None
1298 1216 self.__byTime = True
1299 1217
1300 1218 def putData(self, data_spc, data_cspc, data_dc):
1301 1219 """
1302 1220 Add a profile to the __buffer_spc and increase in one the __profileIndex
1303 1221
1304 1222 """
1305 1223
1306 1224 self.__buffer_spc += data_spc
1307 1225
1308 1226 if data_cspc is None:
1309 1227 self.__buffer_cspc = None
1310 1228 else:
1311 1229 self.__buffer_cspc += data_cspc
1312 1230
1313 1231 if data_dc is None:
1314 1232 self.__buffer_dc = None
1315 1233 else:
1316 1234 self.__buffer_dc += data_dc
1317 1235
1318 1236 self.__profIndex += 1
1319 1237
1320 1238 return
1321 1239
1322 1240 def pushData(self):
1323 1241 """
1324 1242 Return the sum of the last profiles and the profiles used in the sum.
1325 1243
1326 1244 Affected:
1327 1245
1328 1246 self.__profileIndex
1329 1247
1330 1248 """
1331 1249
1332 1250 data_spc = self.__buffer_spc
1333 1251 data_cspc = self.__buffer_cspc
1334 1252 data_dc = self.__buffer_dc
1335 1253 n = self.__profIndex
1336 1254
1337 1255 self.__buffer_spc = 0
1338 1256 self.__buffer_cspc = 0
1339 1257 self.__buffer_dc = 0
1340 1258 self.__profIndex = 0
1341 1259
1342 1260 return data_spc, data_cspc, data_dc, n
1343 1261
1344 1262 def byProfiles(self, *args):
1345 1263
1346 1264 self.__dataReady = False
1347 1265 avgdata_spc = None
1348 1266 avgdata_cspc = None
1349 1267 avgdata_dc = None
1350 1268
1351 1269 self.putData(*args)
1352 1270
1353 1271 if self.__profIndex == self.n:
1354 1272
1355 1273 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1356 1274 self.n = n
1357 1275 self.__dataReady = True
1358 1276
1359 1277 return avgdata_spc, avgdata_cspc, avgdata_dc
1360 1278
1361 1279 def byTime(self, datatime, *args):
1362 1280
1363 1281 self.__dataReady = False
1364 1282 avgdata_spc = None
1365 1283 avgdata_cspc = None
1366 1284 avgdata_dc = None
1367 1285
1368 1286 self.putData(*args)
1369 1287
1370 1288 if (datatime - self.__initime) >= self.__integrationtime:
1371 1289 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1372 1290 self.n = n
1373 1291 self.__dataReady = True
1374 1292
1375 1293 return avgdata_spc, avgdata_cspc, avgdata_dc
1376 1294
1377 1295 def integrate(self, datatime, *args):
1378 1296
1379 1297 if self.__profIndex == 0:
1380 1298 self.__initime = datatime
1381 1299
1382 1300 if self.__byTime:
1383 1301 avgdata_spc, avgdata_cspc, avgdata_dc = self.byTime(
1384 1302 datatime, *args)
1385 1303 else:
1386 1304 avgdata_spc, avgdata_cspc, avgdata_dc = self.byProfiles(*args)
1387 1305
1388 1306 if not self.__dataReady:
1389 1307 return None, None, None, None
1390 1308
1391 1309 return self.__initime, avgdata_spc, avgdata_cspc, avgdata_dc
1392 1310
1393 1311 def run(self, dataOut, n=None, timeInterval=None, overlapping=False):
1394 1312 if n == 1:
1395 1313 return dataOut
1396 1314
1397 1315 dataOut.flagNoData = True
1398 1316
1399 1317 if not self.isConfig:
1400 1318 self.setup(n, timeInterval, overlapping)
1401 1319 self.isConfig = True
1402 1320
1403 1321 avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc = self.integrate(dataOut.utctime,
1404 1322 dataOut.data_spc,
1405 1323 dataOut.data_cspc,
1406 1324 dataOut.data_dc)
1407 1325
1408 1326 if self.__dataReady:
1409 1327
1410 1328 dataOut.data_spc = avgdata_spc
1411 1329 dataOut.data_cspc = avgdata_cspc
1412 1330 dataOut.data_dc = avgdata_dc
1413 1331 dataOut.nIncohInt *= self.n
1414 1332 dataOut.utctime = avgdatatime
1415 1333 dataOut.flagNoData = False
1416 1334
1417 1335 return dataOut
1418 1336
1419 1337 class dopplerFlip(Operation):
1420 1338
1421 1339 def run(self, dataOut):
1422 1340 # arreglo 1: (num_chan, num_profiles, num_heights)
1423 1341 self.dataOut = dataOut
1424 1342 # JULIA-oblicua, indice 2
1425 1343 # arreglo 2: (num_profiles, num_heights)
1426 1344 jspectra = self.dataOut.data_spc[2]
1427 1345 jspectra_tmp = numpy.zeros(jspectra.shape)
1428 1346 num_profiles = jspectra.shape[0]
1429 1347 freq_dc = int(num_profiles / 2)
1430 1348 # Flip con for
1431 1349 for j in range(num_profiles):
1432 1350 jspectra_tmp[num_profiles-j-1]= jspectra[j]
1433 1351 # Intercambio perfil de DC con perfil inmediato anterior
1434 1352 jspectra_tmp[freq_dc-1]= jspectra[freq_dc-1]
1435 1353 jspectra_tmp[freq_dc]= jspectra[freq_dc]
1436 1354 # canal modificado es re-escrito en el arreglo de canales
1437 1355 self.dataOut.data_spc[2] = jspectra_tmp
1438 1356
1439 1357 return self.dataOut
@@ -1,1624 +1,1622
1 1 import sys
2 2 import numpy,math
3 3 from scipy import interpolate
4 4 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
5 5 from schainpy.model.data.jrodata import Voltage,hildebrand_sekhon
6 6 from schainpy.utils import log
7 7 from time import time
8 8
9 9
10 10
11 11 class VoltageProc(ProcessingUnit):
12 12
13 13 def __init__(self):
14 14
15 15 ProcessingUnit.__init__(self)
16 16
17 17 self.dataOut = Voltage()
18 18 self.flip = 1
19 19 self.setupReq = False
20 20
21 21 def run(self):
22 22
23 23 if self.dataIn.type == 'AMISR':
24 24 self.__updateObjFromAmisrInput()
25 25
26 26 if self.dataIn.type == 'Voltage':
27 27 self.dataOut.copy(self.dataIn)
28 28
29 29 def __updateObjFromAmisrInput(self):
30 30
31 31 self.dataOut.timeZone = self.dataIn.timeZone
32 32 self.dataOut.dstFlag = self.dataIn.dstFlag
33 33 self.dataOut.errorCount = self.dataIn.errorCount
34 34 self.dataOut.useLocalTime = self.dataIn.useLocalTime
35 35
36 36 self.dataOut.flagNoData = self.dataIn.flagNoData
37 37 self.dataOut.data = self.dataIn.data
38 38 self.dataOut.utctime = self.dataIn.utctime
39 39 self.dataOut.channelList = self.dataIn.channelList
40 40 #self.dataOut.timeInterval = self.dataIn.timeInterval
41 41 self.dataOut.heightList = self.dataIn.heightList
42 42 self.dataOut.nProfiles = self.dataIn.nProfiles
43 43
44 44 self.dataOut.nCohInt = self.dataIn.nCohInt
45 45 self.dataOut.ippSeconds = self.dataIn.ippSeconds
46 46 self.dataOut.frequency = self.dataIn.frequency
47 47
48 48 self.dataOut.azimuth = self.dataIn.azimuth
49 49 self.dataOut.zenith = self.dataIn.zenith
50 50
51 51 self.dataOut.beam.codeList = self.dataIn.beam.codeList
52 52 self.dataOut.beam.azimuthList = self.dataIn.beam.azimuthList
53 53 self.dataOut.beam.zenithList = self.dataIn.beam.zenithList
54 54
55 55
56 56 class selectChannels(Operation):
57 57
58 58 def run(self, dataOut, channelList):
59 59
60 60 channelIndexList = []
61 61 self.dataOut = dataOut
62 62 for channel in channelList:
63 63 if channel not in self.dataOut.channelList:
64 64 raise ValueError("Channel %d is not in %s" %(channel, str(self.dataOut.channelList)))
65 65
66 66 index = self.dataOut.channelList.index(channel)
67 67 channelIndexList.append(index)
68 68 self.selectChannelsByIndex(channelIndexList)
69 69 return self.dataOut
70 70
71 71 def selectChannelsByIndex(self, channelIndexList):
72 72 """
73 73 Selecciona un bloque de datos en base a canales segun el channelIndexList
74 74
75 75 Input:
76 76 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
77 77
78 78 Affected:
79 79 self.dataOut.data
80 80 self.dataOut.channelIndexList
81 81 self.dataOut.nChannels
82 82 self.dataOut.m_ProcessingHeader.totalSpectra
83 83 self.dataOut.systemHeaderObj.numChannels
84 84 self.dataOut.m_ProcessingHeader.blockSize
85 85
86 86 Return:
87 87 None
88 88 """
89 89
90 90 for channelIndex in channelIndexList:
91 91 if channelIndex not in self.dataOut.channelIndexList:
92 92 raise ValueError("The value %d in channelIndexList is not valid" %channelIndex)
93 93
94 94 if self.dataOut.type == 'Voltage':
95 95 if self.dataOut.flagDataAsBlock:
96 96 """
97 97 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
98 98 """
99 99 data = self.dataOut.data[channelIndexList,:,:]
100 100 else:
101 101 data = self.dataOut.data[channelIndexList,:]
102 102
103 103 self.dataOut.data = data
104 104 # self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
105 105 self.dataOut.channelList = range(len(channelIndexList))
106 106
107 107 elif self.dataOut.type == 'Spectra':
108 108 data_spc = self.dataOut.data_spc[channelIndexList, :]
109 109 data_dc = self.dataOut.data_dc[channelIndexList, :]
110 110
111 111 self.dataOut.data_spc = data_spc
112 112 self.dataOut.data_dc = data_dc
113 113
114 114 # self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
115 115 self.dataOut.channelList = channelIndexList
116 116 self.__selectPairsByChannel(channelIndexList)
117 117
118 118 return 1
119 119
120 120 def __selectPairsByChannel(self, channelList=None):
121 121
122 122 if channelList == None:
123 123 return
124 124
125 125 pairsIndexListSelected = []
126 126 for pairIndex in self.dataOut.pairsIndexList:
127 127 # First pair
128 128 if self.dataOut.pairsList[pairIndex][0] not in channelList:
129 129 continue
130 130 # Second pair
131 131 if self.dataOut.pairsList[pairIndex][1] not in channelList:
132 132 continue
133 133
134 134 pairsIndexListSelected.append(pairIndex)
135 135
136 136 if not pairsIndexListSelected:
137 137 self.dataOut.data_cspc = None
138 138 self.dataOut.pairsList = []
139 139 return
140 140
141 141 self.dataOut.data_cspc = self.dataOut.data_cspc[pairsIndexListSelected]
142 142 self.dataOut.pairsList = [self.dataOut.pairsList[i]
143 143 for i in pairsIndexListSelected]
144 144
145 145 return
146 146
147 147 class selectHeights(Operation):
148 148
149 149 def run(self, dataOut, minHei=None, maxHei=None, minIndex=None, maxIndex=None):
150 150 """
151 151 Selecciona un bloque de datos en base a un grupo de valores de alturas segun el rango
152 152 minHei <= height <= maxHei
153 153
154 154 Input:
155 155 minHei : valor minimo de altura a considerar
156 156 maxHei : valor maximo de altura a considerar
157 157
158 158 Affected:
159 159 Indirectamente son cambiados varios valores a travez del metodo selectHeightsByIndex
160 160
161 161 Return:
162 162 1 si el metodo se ejecuto con exito caso contrario devuelve 0
163 163 """
164 164
165 165 self.dataOut = dataOut
166 166
167 167 if minHei and maxHei:
168 168
169 169 if (minHei < self.dataOut.heightList[0]):
170 170 minHei = self.dataOut.heightList[0]
171 171
172 172 if (maxHei > self.dataOut.heightList[-1]):
173 173 maxHei = self.dataOut.heightList[-1]
174 174
175 175 minIndex = 0
176 176 maxIndex = 0
177 177 heights = self.dataOut.heightList
178 178
179 179 inda = numpy.where(heights >= minHei)
180 180 indb = numpy.where(heights <= maxHei)
181 181
182 182 try:
183 183 minIndex = inda[0][0]
184 184 except:
185 185 minIndex = 0
186 186
187 187 try:
188 188 maxIndex = indb[0][-1]
189 189 except:
190 190 maxIndex = len(heights)
191 191
192 192 self.selectHeightsByIndex(minIndex, maxIndex)
193 193
194 194 return self.dataOut
195 195
196 196 def selectHeightsByIndex(self, minIndex, maxIndex):
197 197 """
198 198 Selecciona un bloque de datos en base a un grupo indices de alturas segun el rango
199 199 minIndex <= index <= maxIndex
200 200
201 201 Input:
202 202 minIndex : valor de indice minimo de altura a considerar
203 203 maxIndex : valor de indice maximo de altura a considerar
204 204
205 205 Affected:
206 206 self.dataOut.data
207 207 self.dataOut.heightList
208 208
209 209 Return:
210 210 1 si el metodo se ejecuto con exito caso contrario devuelve 0
211 211 """
212 212
213 213 if self.dataOut.type == 'Voltage':
214 214 if (minIndex < 0) or (minIndex > maxIndex):
215 215 raise ValueError("Height index range (%d,%d) is not valid" % (minIndex, maxIndex))
216 216
217 217 if (maxIndex >= self.dataOut.nHeights):
218 218 maxIndex = self.dataOut.nHeights
219 219
220 220 #voltage
221 221 if self.dataOut.flagDataAsBlock:
222 222 """
223 223 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
224 224 """
225 225 data = self.dataOut.data[:,:, minIndex:maxIndex]
226 226 else:
227 227 data = self.dataOut.data[:, minIndex:maxIndex]
228 228
229 229 # firstHeight = self.dataOut.heightList[minIndex]
230 230
231 231 self.dataOut.data = data
232 232 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex]
233 233
234 234 if self.dataOut.nHeights <= 1:
235 235 raise ValueError("selectHeights: Too few heights. Current number of heights is %d" %(self.dataOut.nHeights))
236 236 elif self.dataOut.type == 'Spectra':
237 237 if (minIndex < 0) or (minIndex > maxIndex):
238 238 raise ValueError("Error selecting heights: Index range (%d,%d) is not valid" % (
239 239 minIndex, maxIndex))
240 240
241 241 if (maxIndex >= self.dataOut.nHeights):
242 242 maxIndex = self.dataOut.nHeights - 1
243 243
244 244 # Spectra
245 245 data_spc = self.dataOut.data_spc[:, :, minIndex:maxIndex + 1]
246 246
247 247 data_cspc = None
248 248 if self.dataOut.data_cspc is not None:
249 249 data_cspc = self.dataOut.data_cspc[:, :, minIndex:maxIndex + 1]
250 250
251 251 data_dc = None
252 252 if self.dataOut.data_dc is not None:
253 253 data_dc = self.dataOut.data_dc[:, minIndex:maxIndex + 1]
254 254
255 255 self.dataOut.data_spc = data_spc
256 256 self.dataOut.data_cspc = data_cspc
257 257 self.dataOut.data_dc = data_dc
258 258
259 259 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex + 1]
260 260
261 261 return 1
262 262
263 263
264 264 class filterByHeights(Operation):
265 265
266 266 def run(self, dataOut, window):
267 267
268 268 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
269 269
270 270 if window == None:
271 271 window = (dataOut.radarControllerHeaderObj.txA/dataOut.radarControllerHeaderObj.nBaud) / deltaHeight
272 272
273 273 newdelta = deltaHeight * window
274 274 r = dataOut.nHeights % window
275 275 newheights = (dataOut.nHeights-r)/window
276 276
277 277 if newheights <= 1:
278 278 raise ValueError("filterByHeights: Too few heights. Current number of heights is %d and window is %d" %(dataOut.nHeights, window))
279 279
280 280 if dataOut.flagDataAsBlock:
281 281 """
282 282 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
283 283 """
284 284 buffer = dataOut.data[:, :, 0:int(dataOut.nHeights-r)]
285 285 buffer = buffer.reshape(dataOut.nChannels, dataOut.nProfiles, int(dataOut.nHeights/window), window)
286 286 buffer = numpy.sum(buffer,3)
287 287
288 288 else:
289 289 buffer = dataOut.data[:,0:int(dataOut.nHeights-r)]
290 290 buffer = buffer.reshape(dataOut.nChannels,int(dataOut.nHeights/window),int(window))
291 291 buffer = numpy.sum(buffer,2)
292 292
293 293 dataOut.data = buffer
294 294 dataOut.heightList = dataOut.heightList[0] + numpy.arange( newheights )*newdelta
295 295 dataOut.windowOfFilter = window
296 296
297 297 return dataOut
298 298
299 299
300 300 class setH0(Operation):
301 301
302 302 def run(self, dataOut, h0, deltaHeight = None):
303 303
304 304 if not deltaHeight:
305 305 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
306 306
307 307 nHeights = dataOut.nHeights
308 308
309 309 newHeiRange = h0 + numpy.arange(nHeights)*deltaHeight
310 310
311 311 dataOut.heightList = newHeiRange
312 312
313 313 return dataOut
314 314
315 315
316 316 class deFlip(Operation):
317 317
318 318 def run(self, dataOut, channelList = []):
319 319
320 320 data = dataOut.data.copy()
321 321
322 322 if dataOut.flagDataAsBlock:
323 323 flip = self.flip
324 324 profileList = list(range(dataOut.nProfiles))
325 325
326 326 if not channelList:
327 327 for thisProfile in profileList:
328 328 data[:,thisProfile,:] = data[:,thisProfile,:]*flip
329 329 flip *= -1.0
330 330 else:
331 331 for thisChannel in channelList:
332 332 if thisChannel not in dataOut.channelList:
333 333 continue
334 334
335 335 for thisProfile in profileList:
336 336 data[thisChannel,thisProfile,:] = data[thisChannel,thisProfile,:]*flip
337 337 flip *= -1.0
338 338
339 339 self.flip = flip
340 340
341 341 else:
342 342 if not channelList:
343 343 data[:,:] = data[:,:]*self.flip
344 344 else:
345 345 for thisChannel in channelList:
346 346 if thisChannel not in dataOut.channelList:
347 347 continue
348 348
349 349 data[thisChannel,:] = data[thisChannel,:]*self.flip
350 350
351 351 self.flip *= -1.
352 352
353 353 dataOut.data = data
354 354
355 355 return dataOut
356 356
357 357
358 358 class setAttribute(Operation):
359 359 '''
360 360 Set an arbitrary attribute(s) to dataOut
361 361 '''
362 362
363 363 def __init__(self):
364 364
365 365 Operation.__init__(self)
366 366 self._ready = False
367 367
368 368 def run(self, dataOut, **kwargs):
369 369
370 370 for key, value in kwargs.items():
371 371 setattr(dataOut, key, value)
372 372
373 373 return dataOut
374 374
375 375
376 376 @MPDecorator
377 377 class printAttribute(Operation):
378 378 '''
379 379 Print an arbitrary attribute of dataOut
380 380 '''
381 381
382 382 def __init__(self):
383 383
384 384 Operation.__init__(self)
385 385
386 386 def run(self, dataOut, attributes):
387 387
388 388 if isinstance(attributes, str):
389 389 attributes = [attributes]
390 390 for attr in attributes:
391 391 if hasattr(dataOut, attr):
392 392 log.log(getattr(dataOut, attr), attr)
393 393
394 394
395 395 class interpolateHeights(Operation):
396 396
397 397 def run(self, dataOut, topLim, botLim):
398 398 #69 al 72 para julia
399 399 #82-84 para meteoros
400 400 if len(numpy.shape(dataOut.data))==2:
401 401 sampInterp = (dataOut.data[:,botLim-1] + dataOut.data[:,topLim+1])/2
402 402 sampInterp = numpy.transpose(numpy.tile(sampInterp,(topLim-botLim + 1,1)))
403 403 #dataOut.data[:,botLim:limSup+1] = sampInterp
404 404 dataOut.data[:,botLim:topLim+1] = sampInterp
405 405 else:
406 406 nHeights = dataOut.data.shape[2]
407 407 x = numpy.hstack((numpy.arange(botLim),numpy.arange(topLim+1,nHeights)))
408 408 y = dataOut.data[:,:,list(range(botLim))+list(range(topLim+1,nHeights))]
409 409 f = interpolate.interp1d(x, y, axis = 2)
410 410 xnew = numpy.arange(botLim,topLim+1)
411 411 ynew = f(xnew)
412 412 dataOut.data[:,:,botLim:topLim+1] = ynew
413 413
414 414 return dataOut
415 415
416 416
417 417 class CohInt(Operation):
418 418
419 419 isConfig = False
420 420 __profIndex = 0
421 421 __byTime = False
422 422 __initime = None
423 423 __lastdatatime = None
424 424 __integrationtime = None
425 425 __buffer = None
426 426 __bufferStride = []
427 427 __dataReady = False
428 428 __profIndexStride = 0
429 429 __dataToPutStride = False
430 430 n = None
431 431
432 432 def __init__(self, **kwargs):
433 433
434 434 Operation.__init__(self, **kwargs)
435 435
436 436 def setup(self, n=None, timeInterval=None, stride=None, overlapping=False, byblock=False):
437 437 """
438 438 Set the parameters of the integration class.
439 439
440 440 Inputs:
441 441
442 442 n : Number of coherent integrations
443 443 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
444 444 overlapping :
445 445 """
446 446
447 447 self.__initime = None
448 448 self.__lastdatatime = 0
449 449 self.__buffer = None
450 450 self.__dataReady = False
451 451 self.byblock = byblock
452 452 self.stride = stride
453 453
454 454 if n == None and timeInterval == None:
455 455 raise ValueError("n or timeInterval should be specified ...")
456 456
457 457 if n != None:
458 458 self.n = n
459 459 self.__byTime = False
460 460 else:
461 461 self.__integrationtime = timeInterval #* 60. #if (type(timeInterval)!=integer) -> change this line
462 462 self.n = 9999
463 463 self.__byTime = True
464 464
465 465 if overlapping:
466 466 self.__withOverlapping = True
467 467 self.__buffer = None
468 468 else:
469 469 self.__withOverlapping = False
470 470 self.__buffer = 0
471 471
472 472 self.__profIndex = 0
473 473
474 474 def putData(self, data):
475 475
476 476 """
477 477 Add a profile to the __buffer and increase in one the __profileIndex
478 478
479 479 """
480 480
481 481 if not self.__withOverlapping:
482 482 self.__buffer += data.copy()
483 483 self.__profIndex += 1
484 484 return
485 485
486 486 #Overlapping data
487 487 nChannels, nHeis = data.shape
488 488 data = numpy.reshape(data, (1, nChannels, nHeis))
489 489
490 490 #If the buffer is empty then it takes the data value
491 491 if self.__buffer is None:
492 492 self.__buffer = data
493 493 self.__profIndex += 1
494 494 return
495 495
496 496 #If the buffer length is lower than n then stakcing the data value
497 497 if self.__profIndex < self.n:
498 498 self.__buffer = numpy.vstack((self.__buffer, data))
499 499 self.__profIndex += 1
500 500 return
501 501
502 502 #If the buffer length is equal to n then replacing the last buffer value with the data value
503 503 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
504 504 self.__buffer[self.n-1] = data
505 505 self.__profIndex = self.n
506 506 return
507 507
508 508
509 509 def pushData(self):
510 510 """
511 511 Return the sum of the last profiles and the profiles used in the sum.
512 512
513 513 Affected:
514 514
515 515 self.__profileIndex
516 516
517 517 """
518 518
519 519 if not self.__withOverlapping:
520 520 data = self.__buffer
521 521 n = self.__profIndex
522 522
523 523 self.__buffer = 0
524 524 self.__profIndex = 0
525 525
526 526 return data, n
527 527
528 528 #Integration with Overlapping
529 529 data = numpy.sum(self.__buffer, axis=0)
530 530 # print data
531 531 # raise
532 532 n = self.__profIndex
533 533
534 534 return data, n
535 535
536 536 def byProfiles(self, data):
537 537
538 538 self.__dataReady = False
539 539 avgdata = None
540 540 # n = None
541 541 # print data
542 542 # raise
543 543 self.putData(data)
544 544
545 545 if self.__profIndex == self.n:
546 546 avgdata, n = self.pushData()
547 547 self.__dataReady = True
548 548
549 549 return avgdata
550 550
551 551 def byTime(self, data, datatime):
552 552
553 553 self.__dataReady = False
554 554 avgdata = None
555 555 n = None
556 556
557 557 self.putData(data)
558 558
559 559 if (datatime - self.__initime) >= self.__integrationtime:
560 560 avgdata, n = self.pushData()
561 561 self.n = n
562 562 self.__dataReady = True
563 563
564 564 return avgdata
565 565
566 566 def integrateByStride(self, data, datatime):
567 567 # print data
568 568 if self.__profIndex == 0:
569 569 self.__buffer = [[data.copy(), datatime]]
570 570 else:
571 571 self.__buffer.append([data.copy(),datatime])
572 572 self.__profIndex += 1
573 573 self.__dataReady = False
574 574
575 575 if self.__profIndex == self.n * self.stride :
576 576 self.__dataToPutStride = True
577 577 self.__profIndexStride = 0
578 578 self.__profIndex = 0
579 579 self.__bufferStride = []
580 580 for i in range(self.stride):
581 581 current = self.__buffer[i::self.stride]
582 582 data = numpy.sum([t[0] for t in current], axis=0)
583 583 avgdatatime = numpy.average([t[1] for t in current])
584 584 # print data
585 585 self.__bufferStride.append((data, avgdatatime))
586 586
587 587 if self.__dataToPutStride:
588 588 self.__dataReady = True
589 589 self.__profIndexStride += 1
590 590 if self.__profIndexStride == self.stride:
591 591 self.__dataToPutStride = False
592 592 # print self.__bufferStride[self.__profIndexStride - 1]
593 593 # raise
594 594 return self.__bufferStride[self.__profIndexStride - 1]
595 595
596 596
597 597 return None, None
598 598
599 599 def integrate(self, data, datatime=None):
600 600
601 601 if self.__initime == None:
602 602 self.__initime = datatime
603 603
604 604 if self.__byTime:
605 605 avgdata = self.byTime(data, datatime)
606 606 else:
607 607 avgdata = self.byProfiles(data)
608 608
609 609
610 610 self.__lastdatatime = datatime
611 611
612 612 if avgdata is None:
613 613 return None, None
614 614
615 615 avgdatatime = self.__initime
616 616
617 617 deltatime = datatime - self.__lastdatatime
618 618
619 619 if not self.__withOverlapping:
620 620 self.__initime = datatime
621 621 else:
622 622 self.__initime += deltatime
623 623
624 624 return avgdata, avgdatatime
625 625
626 626 def integrateByBlock(self, dataOut):
627 627
628 628 times = int(dataOut.data.shape[1]/self.n)
629 629 avgdata = numpy.zeros((dataOut.nChannels, times, dataOut.nHeights), dtype=numpy.complex)
630 630
631 631 id_min = 0
632 632 id_max = self.n
633 633
634 634 for i in range(times):
635 635 junk = dataOut.data[:,id_min:id_max,:]
636 636 avgdata[:,i,:] = junk.sum(axis=1)
637 637 id_min += self.n
638 638 id_max += self.n
639 639
640 640 timeInterval = dataOut.ippSeconds*self.n
641 641 avgdatatime = (times - 1) * timeInterval + dataOut.utctime
642 642 self.__dataReady = True
643 643 return avgdata, avgdatatime
644 644
645 645 def run(self, dataOut, n=None, timeInterval=None, stride=None, overlapping=False, byblock=False, **kwargs):
646 646
647 647 if not self.isConfig:
648 648 self.setup(n=n, stride=stride, timeInterval=timeInterval, overlapping=overlapping, byblock=byblock, **kwargs)
649 649 self.isConfig = True
650 650
651 651 if dataOut.flagDataAsBlock:
652 652 """
653 653 Si la data es leida por bloques, dimension = [nChannels, nProfiles, nHeis]
654 654 """
655 655 avgdata, avgdatatime = self.integrateByBlock(dataOut)
656 656 dataOut.nProfiles /= self.n
657 657 else:
658 658 if stride is None:
659 659 avgdata, avgdatatime = self.integrate(dataOut.data, dataOut.utctime)
660 660 else:
661 661 avgdata, avgdatatime = self.integrateByStride(dataOut.data, dataOut.utctime)
662 662
663 663
664 664 # dataOut.timeInterval *= n
665 665 dataOut.flagNoData = True
666 666
667 667 if self.__dataReady:
668 668 dataOut.data = avgdata
669 669 if not dataOut.flagCohInt:
670 670 dataOut.nCohInt *= self.n
671 671 dataOut.flagCohInt = True
672 672 dataOut.utctime = avgdatatime
673 673 # print avgdata, avgdatatime
674 674 # raise
675 675 # dataOut.timeInterval = dataOut.ippSeconds * dataOut.nCohInt
676 676 dataOut.flagNoData = False
677 677 return dataOut
678 678
679 679 class Decoder(Operation):
680 680
681 681 isConfig = False
682 682 __profIndex = 0
683 683
684 684 code = None
685 685
686 686 nCode = None
687 687 nBaud = None
688 688
689 689 def __init__(self, **kwargs):
690 690
691 691 Operation.__init__(self, **kwargs)
692 692
693 693 self.times = None
694 694 self.osamp = None
695 695 # self.__setValues = False
696 696 self.isConfig = False
697 697 self.setupReq = False
698 698 def setup(self, code, osamp, dataOut):
699 699
700 700 self.__profIndex = 0
701 701
702 702 self.code = code
703 703
704 704 self.nCode = len(code)
705 705 self.nBaud = len(code[0])
706 706 if (osamp != None) and (osamp >1):
707 707 self.osamp = osamp
708 708 self.code = numpy.repeat(code, repeats=self.osamp, axis=1)
709 709 self.nBaud = self.nBaud*self.osamp
710 710
711 711 self.__nChannels = dataOut.nChannels
712 712 self.__nProfiles = dataOut.nProfiles
713 713 self.__nHeis = dataOut.nHeights
714 714
715 715 if self.__nHeis < self.nBaud:
716 716 raise ValueError('Number of heights (%d) should be greater than number of bauds (%d)' %(self.__nHeis, self.nBaud))
717 717
718 718 #Frequency
719 719 __codeBuffer = numpy.zeros((self.nCode, self.__nHeis), dtype=numpy.complex)
720 720
721 721 __codeBuffer[:,0:self.nBaud] = self.code
722 722
723 723 self.fft_code = numpy.conj(numpy.fft.fft(__codeBuffer, axis=1))
724 724
725 725 if dataOut.flagDataAsBlock:
726 726
727 727 self.ndatadec = self.__nHeis #- self.nBaud + 1
728 728
729 729 self.datadecTime = numpy.zeros((self.__nChannels, self.__nProfiles, self.ndatadec), dtype=numpy.complex)
730 730
731 731 else:
732 732
733 733 #Time
734 734 self.ndatadec = self.__nHeis #- self.nBaud + 1
735 735
736 736 self.datadecTime = numpy.zeros((self.__nChannels, self.ndatadec), dtype=numpy.complex)
737 737
738 738 def __convolutionInFreq(self, data):
739 739
740 740 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
741 741
742 742 fft_data = numpy.fft.fft(data, axis=1)
743 743
744 744 conv = fft_data*fft_code
745 745
746 746 data = numpy.fft.ifft(conv,axis=1)
747 747
748 748 return data
749 749
750 750 def __convolutionInFreqOpt(self, data):
751 751
752 752 raise NotImplementedError
753 753
754 754 def __convolutionInTime(self, data):
755 755
756 756 code = self.code[self.__profIndex]
757 757 for i in range(self.__nChannels):
758 758 self.datadecTime[i,:] = numpy.correlate(data[i,:], code, mode='full')[self.nBaud-1:]
759 759
760 760 return self.datadecTime
761 761
762 762 def __convolutionByBlockInTime(self, data):
763 763
764 764 repetitions = int(self.__nProfiles / self.nCode)
765 765 junk = numpy.lib.stride_tricks.as_strided(self.code, (repetitions, self.code.size), (0, self.code.itemsize))
766 766 junk = junk.flatten()
767 767 code_block = numpy.reshape(junk, (self.nCode*repetitions, self.nBaud))
768 768 profilesList = range(self.__nProfiles)
769 769
770 770 for i in range(self.__nChannels):
771 771 for j in profilesList:
772 772 self.datadecTime[i,j,:] = numpy.correlate(data[i,j,:], code_block[j,:], mode='full')[self.nBaud-1:]
773 773 return self.datadecTime
774 774
775 775 def __convolutionByBlockInFreq(self, data):
776 776
777 777 raise NotImplementedError("Decoder by frequency fro Blocks not implemented")
778 778
779 779
780 780 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
781 781
782 782 fft_data = numpy.fft.fft(data, axis=2)
783 783
784 784 conv = fft_data*fft_code
785 785
786 786 data = numpy.fft.ifft(conv,axis=2)
787 787
788 788 return data
789 789
790 790
791 791 def run(self, dataOut, code=None, nCode=None, nBaud=None, mode = 0, osamp=None, times=None):
792 792
793 793 if dataOut.flagDecodeData:
794 794 print("This data is already decoded, recoding again ...")
795 795
796 796 if not self.isConfig:
797 797
798 798 if code is None:
799 799 if dataOut.code is None:
800 800 raise ValueError("Code could not be read from %s instance. Enter a value in Code parameter" %dataOut.type)
801 801
802 802 code = dataOut.code
803 803 else:
804 804 code = numpy.array(code).reshape(nCode,nBaud)
805 805 self.setup(code, osamp, dataOut)
806 806
807 807 self.isConfig = True
808 808
809 809 if mode == 3:
810 810 sys.stderr.write("Decoder Warning: mode=%d is not valid, using mode=0\n" %mode)
811 811
812 812 if times != None:
813 813 sys.stderr.write("Decoder Warning: Argument 'times' in not used anymore\n")
814 814
815 815 if self.code is None:
816 816 print("Fail decoding: Code is not defined.")
817 817 return
818 818
819 819 self.__nProfiles = dataOut.nProfiles
820 820 datadec = None
821 821
822 822 if mode == 3:
823 823 mode = 0
824 824
825 825 if dataOut.flagDataAsBlock:
826 826 """
827 827 Decoding when data have been read as block,
828 828 """
829 829
830 830 if mode == 0:
831 831 datadec = self.__convolutionByBlockInTime(dataOut.data)
832 832 if mode == 1:
833 833 datadec = self.__convolutionByBlockInFreq(dataOut.data)
834 834 else:
835 835 """
836 836 Decoding when data have been read profile by profile
837 837 """
838 838 if mode == 0:
839 839 datadec = self.__convolutionInTime(dataOut.data)
840 840
841 841 if mode == 1:
842 842 datadec = self.__convolutionInFreq(dataOut.data)
843 843
844 844 if mode == 2:
845 845 datadec = self.__convolutionInFreqOpt(dataOut.data)
846 846
847 847 if datadec is None:
848 848 raise ValueError("Codification mode selected is not valid: mode=%d. Try selecting 0 or 1" %mode)
849 849
850 850 dataOut.code = self.code
851 851 dataOut.nCode = self.nCode
852 852 dataOut.nBaud = self.nBaud
853 853
854 854 dataOut.data = datadec
855 855
856 856 dataOut.heightList = dataOut.heightList[0:datadec.shape[-1]]
857 857
858 858 dataOut.flagDecodeData = True #asumo q la data esta decodificada
859 859
860 860 if self.__profIndex == self.nCode-1:
861 861 self.__profIndex = 0
862 862 return dataOut
863 863
864 864 self.__profIndex += 1
865 865
866 866 return dataOut
867 867 # dataOut.flagDeflipData = True #asumo q la data no esta sin flip
868 868
869 869
870 870 class ProfileConcat(Operation):
871 871
872 872 isConfig = False
873 873 buffer = None
874 874
875 875 def __init__(self, **kwargs):
876 876
877 877 Operation.__init__(self, **kwargs)
878 878 self.profileIndex = 0
879 879
880 880 def reset(self):
881 881 self.buffer = numpy.zeros_like(self.buffer)
882 882 self.start_index = 0
883 883 self.times = 1
884 884
885 885 def setup(self, data, m, n=1):
886 886 self.buffer = numpy.zeros((data.shape[0],data.shape[1]*m),dtype=type(data[0,0]))
887 887 self.nHeights = data.shape[1]#.nHeights
888 888 self.start_index = 0
889 889 self.times = 1
890 890
891 891 def concat(self, data):
892 892
893 893 self.buffer[:,self.start_index:self.nHeights*self.times] = data.copy()
894 894 self.start_index = self.start_index + self.nHeights
895 895
896 896 def run(self, dataOut, m):
897 897 dataOut.flagNoData = True
898 898
899 899 if not self.isConfig:
900 900 self.setup(dataOut.data, m, 1)
901 901 self.isConfig = True
902 902
903 903 if dataOut.flagDataAsBlock:
904 904 raise ValueError("ProfileConcat can only be used when voltage have been read profile by profile, getBlock = False")
905 905
906 906 else:
907 907 self.concat(dataOut.data)
908 908 self.times += 1
909 909 if self.times > m:
910 910 dataOut.data = self.buffer
911 911 self.reset()
912 912 dataOut.flagNoData = False
913 913 # se deben actualizar mas propiedades del header y del objeto dataOut, por ejemplo, las alturas
914 914 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
915 915 xf = dataOut.heightList[0] + dataOut.nHeights * deltaHeight * m
916 916 dataOut.heightList = numpy.arange(dataOut.heightList[0], xf, deltaHeight)
917 917 dataOut.ippSeconds *= m
918 918 return dataOut
919 919
920 920 class ProfileSelector(Operation):
921 921
922 922 profileIndex = None
923 923 # Tamanho total de los perfiles
924 924 nProfiles = None
925 925
926 926 def __init__(self, **kwargs):
927 927
928 928 Operation.__init__(self, **kwargs)
929 929 self.profileIndex = 0
930 930
931 931 def incProfileIndex(self):
932 932
933 933 self.profileIndex += 1
934 934
935 935 if self.profileIndex >= self.nProfiles:
936 936 self.profileIndex = 0
937 937
938 938 def isThisProfileInRange(self, profileIndex, minIndex, maxIndex):
939 939
940 940 if profileIndex < minIndex:
941 941 return False
942 942
943 943 if profileIndex > maxIndex:
944 944 return False
945 945
946 946 return True
947 947
948 948 def isThisProfileInList(self, profileIndex, profileList):
949 949
950 950 if profileIndex not in profileList:
951 951 return False
952 952
953 953 return True
954 954
955 955 def run(self, dataOut, profileList=None, profileRangeList=None, beam=None, byblock=False, rangeList = None, nProfiles=None):
956 956
957 957 """
958 958 ProfileSelector:
959 959
960 960 Inputs:
961 961 profileList : Index of profiles selected. Example: profileList = (0,1,2,7,8)
962 962
963 963 profileRangeList : Minimum and maximum profile indexes. Example: profileRangeList = (4, 30)
964 964
965 965 rangeList : List of profile ranges. Example: rangeList = ((4, 30), (32, 64), (128, 256))
966 966
967 967 """
968 968
969 969 if rangeList is not None:
970 970 if type(rangeList[0]) not in (tuple, list):
971 971 rangeList = [rangeList]
972 972
973 973 dataOut.flagNoData = True
974 974
975 975 if dataOut.flagDataAsBlock:
976 976 """
977 977 data dimension = [nChannels, nProfiles, nHeis]
978 978 """
979 979 if profileList != None:
980 980 dataOut.data = dataOut.data[:,profileList,:]
981 981
982 982 if profileRangeList != None:
983 983 minIndex = profileRangeList[0]
984 984 maxIndex = profileRangeList[1]
985 985 profileList = list(range(minIndex, maxIndex+1))
986 986
987 987 dataOut.data = dataOut.data[:,minIndex:maxIndex+1,:]
988 988
989 989 if rangeList != None:
990 990
991 991 profileList = []
992 992
993 993 for thisRange in rangeList:
994 994 minIndex = thisRange[0]
995 995 maxIndex = thisRange[1]
996 996
997 997 profileList.extend(list(range(minIndex, maxIndex+1)))
998 998
999 999 dataOut.data = dataOut.data[:,profileList,:]
1000 1000
1001 1001 dataOut.nProfiles = len(profileList)
1002 1002 dataOut.profileIndex = dataOut.nProfiles - 1
1003 1003 dataOut.flagNoData = False
1004 1004
1005 1005 return dataOut
1006 1006
1007 1007 """
1008 1008 data dimension = [nChannels, nHeis]
1009 1009 """
1010 1010
1011 1011 if profileList != None:
1012 1012
1013 1013 if self.isThisProfileInList(dataOut.profileIndex, profileList):
1014 1014
1015 1015 self.nProfiles = len(profileList)
1016 1016 dataOut.nProfiles = self.nProfiles
1017 1017 dataOut.profileIndex = self.profileIndex
1018 1018 dataOut.flagNoData = False
1019 1019
1020 1020 self.incProfileIndex()
1021 1021 return dataOut
1022 1022
1023 1023 if profileRangeList != None:
1024 1024
1025 1025 minIndex = profileRangeList[0]
1026 1026 maxIndex = profileRangeList[1]
1027 1027
1028 1028 if self.isThisProfileInRange(dataOut.profileIndex, minIndex, maxIndex):
1029 1029
1030 1030 self.nProfiles = maxIndex - minIndex + 1
1031 1031 dataOut.nProfiles = self.nProfiles
1032 1032 dataOut.profileIndex = self.profileIndex
1033 1033 dataOut.flagNoData = False
1034 1034
1035 1035 self.incProfileIndex()
1036 1036 return dataOut
1037 1037
1038 1038 if rangeList != None:
1039 1039
1040 1040 nProfiles = 0
1041 1041
1042 1042 for thisRange in rangeList:
1043 1043 minIndex = thisRange[0]
1044 1044 maxIndex = thisRange[1]
1045 1045
1046 1046 nProfiles += maxIndex - minIndex + 1
1047 1047
1048 1048 for thisRange in rangeList:
1049 1049
1050 1050 minIndex = thisRange[0]
1051 1051 maxIndex = thisRange[1]
1052 1052
1053 1053 if self.isThisProfileInRange(dataOut.profileIndex, minIndex, maxIndex):
1054 1054
1055 1055 self.nProfiles = nProfiles
1056 1056 dataOut.nProfiles = self.nProfiles
1057 1057 dataOut.profileIndex = self.profileIndex
1058 1058 dataOut.flagNoData = False
1059 1059
1060 1060 self.incProfileIndex()
1061 1061
1062 1062 break
1063 1063
1064 1064 return dataOut
1065 1065
1066 1066
1067 1067 if beam != None: #beam is only for AMISR data
1068 1068 if self.isThisProfileInList(dataOut.profileIndex, dataOut.beamRangeDict[beam]):
1069 1069 dataOut.flagNoData = False
1070 1070 dataOut.profileIndex = self.profileIndex
1071 1071
1072 1072 self.incProfileIndex()
1073 1073
1074 1074 return dataOut
1075 1075
1076 1076 raise ValueError("ProfileSelector needs profileList, profileRangeList or rangeList parameter")
1077 1077
1078 1078
1079 1079 class Reshaper(Operation):
1080 1080
1081 1081 def __init__(self, **kwargs):
1082 1082
1083 1083 Operation.__init__(self, **kwargs)
1084 1084
1085 1085 self.__buffer = None
1086 1086 self.__nitems = 0
1087 1087
1088 1088 def __appendProfile(self, dataOut, nTxs):
1089 1089
1090 1090 if self.__buffer is None:
1091 1091 shape = (dataOut.nChannels, int(dataOut.nHeights/nTxs) )
1092 1092 self.__buffer = numpy.empty(shape, dtype = dataOut.data.dtype)
1093 1093
1094 1094 ini = dataOut.nHeights * self.__nitems
1095 1095 end = ini + dataOut.nHeights
1096 1096
1097 1097 self.__buffer[:, ini:end] = dataOut.data
1098 1098
1099 1099 self.__nitems += 1
1100 1100
1101 1101 return int(self.__nitems*nTxs)
1102 1102
1103 1103 def __getBuffer(self):
1104 1104
1105 1105 if self.__nitems == int(1./self.__nTxs):
1106 1106
1107 1107 self.__nitems = 0
1108 1108
1109 1109 return self.__buffer.copy()
1110 1110
1111 1111 return None
1112 1112
1113 1113 def __checkInputs(self, dataOut, shape, nTxs):
1114 1114
1115 1115 if shape is None and nTxs is None:
1116 1116 raise ValueError("Reshaper: shape of factor should be defined")
1117 1117
1118 1118 if nTxs:
1119 1119 if nTxs < 0:
1120 1120 raise ValueError("nTxs should be greater than 0")
1121 1121
1122 1122 if nTxs < 1 and dataOut.nProfiles % (1./nTxs) != 0:
1123 1123 raise ValueError("nProfiles= %d is not divisibled by (1./nTxs) = %f" %(dataOut.nProfiles, (1./nTxs)))
1124 1124
1125 1125 shape = [dataOut.nChannels, dataOut.nProfiles*nTxs, dataOut.nHeights/nTxs]
1126 1126
1127 1127 return shape, nTxs
1128 1128
1129 1129 if len(shape) != 2 and len(shape) != 3:
1130 1130 raise ValueError("shape dimension should be equal to 2 or 3. shape = (nProfiles, nHeis) or (nChannels, nProfiles, nHeis). Actually shape = (%d, %d, %d)" %(dataOut.nChannels, dataOut.nProfiles, dataOut.nHeights))
1131 1131
1132 1132 if len(shape) == 2:
1133 1133 shape_tuple = [dataOut.nChannels]
1134 1134 shape_tuple.extend(shape)
1135 1135 else:
1136 1136 shape_tuple = list(shape)
1137 1137
1138 1138 nTxs = 1.0*shape_tuple[1]/dataOut.nProfiles
1139 1139
1140 1140 return shape_tuple, nTxs
1141 1141
1142 1142 def run(self, dataOut, shape=None, nTxs=None):
1143 1143
1144 1144 shape_tuple, self.__nTxs = self.__checkInputs(dataOut, shape, nTxs)
1145 1145
1146 1146 dataOut.flagNoData = True
1147 1147 profileIndex = None
1148 1148
1149 1149 if dataOut.flagDataAsBlock:
1150 1150
1151 1151 dataOut.data = numpy.reshape(dataOut.data, shape_tuple)
1152 1152 dataOut.flagNoData = False
1153 1153
1154 1154 profileIndex = int(dataOut.nProfiles*self.__nTxs) - 1
1155 1155
1156 1156 else:
1157 1157
1158 1158 if self.__nTxs < 1:
1159 1159
1160 1160 self.__appendProfile(dataOut, self.__nTxs)
1161 1161 new_data = self.__getBuffer()
1162 1162
1163 1163 if new_data is not None:
1164 1164 dataOut.data = new_data
1165 1165 dataOut.flagNoData = False
1166 1166
1167 1167 profileIndex = dataOut.profileIndex*nTxs
1168 1168
1169 1169 else:
1170 1170 raise ValueError("nTxs should be greater than 0 and lower than 1, or use VoltageReader(..., getblock=True)")
1171 1171
1172 1172 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1173 1173
1174 1174 dataOut.heightList = numpy.arange(dataOut.nHeights/self.__nTxs) * deltaHeight + dataOut.heightList[0]
1175 1175
1176 1176 dataOut.nProfiles = int(dataOut.nProfiles*self.__nTxs)
1177 1177
1178 1178 dataOut.profileIndex = profileIndex
1179 1179
1180 1180 dataOut.ippSeconds /= self.__nTxs
1181 1181
1182 1182 return dataOut
1183 1183
1184 1184 class SplitProfiles(Operation):
1185 1185
1186 1186 def __init__(self, **kwargs):
1187 1187
1188 1188 Operation.__init__(self, **kwargs)
1189 1189
1190 1190 def run(self, dataOut, n):
1191 1191
1192 1192 dataOut.flagNoData = True
1193 1193 profileIndex = None
1194 1194
1195 1195 if dataOut.flagDataAsBlock:
1196 1196
1197 1197 #nchannels, nprofiles, nsamples
1198 1198 shape = dataOut.data.shape
1199 1199
1200 1200 if shape[2] % n != 0:
1201 1201 raise ValueError("Could not split the data, n=%d has to be multiple of %d" %(n, shape[2]))
1202 1202
1203 1203 new_shape = shape[0], shape[1]*n, int(shape[2]/n)
1204 1204
1205 1205 dataOut.data = numpy.reshape(dataOut.data, new_shape)
1206 1206 dataOut.flagNoData = False
1207 1207
1208 1208 profileIndex = int(dataOut.nProfiles/n) - 1
1209 1209
1210 1210 else:
1211 1211
1212 1212 raise ValueError("Could not split the data when is read Profile by Profile. Use VoltageReader(..., getblock=True)")
1213 1213
1214 1214 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1215 1215
1216 1216 dataOut.heightList = numpy.arange(dataOut.nHeights/n) * deltaHeight + dataOut.heightList[0]
1217 1217
1218 1218 dataOut.nProfiles = int(dataOut.nProfiles*n)
1219 1219
1220 1220 dataOut.profileIndex = profileIndex
1221 1221
1222 1222 dataOut.ippSeconds /= n
1223 1223
1224 1224 return dataOut
1225 1225
1226 1226 class CombineProfiles(Operation):
1227 1227 def __init__(self, **kwargs):
1228 1228
1229 1229 Operation.__init__(self, **kwargs)
1230 1230
1231 1231 self.__remData = None
1232 1232 self.__profileIndex = 0
1233 1233
1234 1234 def run(self, dataOut, n):
1235 1235
1236 1236 dataOut.flagNoData = True
1237 1237 profileIndex = None
1238 1238
1239 1239 if dataOut.flagDataAsBlock:
1240 1240
1241 1241 #nchannels, nprofiles, nsamples
1242 1242 shape = dataOut.data.shape
1243 1243 new_shape = shape[0], shape[1]/n, shape[2]*n
1244 1244
1245 1245 if shape[1] % n != 0:
1246 1246 raise ValueError("Could not split the data, n=%d has to be multiple of %d" %(n, shape[1]))
1247 1247
1248 1248 dataOut.data = numpy.reshape(dataOut.data, new_shape)
1249 1249 dataOut.flagNoData = False
1250 1250
1251 1251 profileIndex = int(dataOut.nProfiles*n) - 1
1252 1252
1253 1253 else:
1254 1254
1255 1255 #nchannels, nsamples
1256 1256 if self.__remData is None:
1257 1257 newData = dataOut.data
1258 1258 else:
1259 1259 newData = numpy.concatenate((self.__remData, dataOut.data), axis=1)
1260 1260
1261 1261 self.__profileIndex += 1
1262 1262
1263 1263 if self.__profileIndex < n:
1264 1264 self.__remData = newData
1265 1265 #continue
1266 1266 return
1267 1267
1268 1268 self.__profileIndex = 0
1269 1269 self.__remData = None
1270 1270
1271 1271 dataOut.data = newData
1272 1272 dataOut.flagNoData = False
1273 1273
1274 1274 profileIndex = dataOut.profileIndex/n
1275 1275
1276 1276
1277 1277 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1278 1278
1279 1279 dataOut.heightList = numpy.arange(dataOut.nHeights*n) * deltaHeight + dataOut.heightList[0]
1280 1280
1281 1281 dataOut.nProfiles = int(dataOut.nProfiles/n)
1282 1282
1283 1283 dataOut.profileIndex = profileIndex
1284 1284
1285 1285 dataOut.ippSeconds *= n
1286 1286
1287 1287 return dataOut
1288 1288
1289 1289 class PulsePairVoltage(Operation):
1290 1290 '''
1291 1291 Function PulsePair(Signal Power, Velocity)
1292 1292 The real component of Lag[0] provides Intensity Information
1293 1293 The imag component of Lag[1] Phase provides Velocity Information
1294 1294
1295 1295 Configuration Parameters:
1296 1296 nPRF = Number of Several PRF
1297 1297 theta = Degree Azimuth angel Boundaries
1298 1298
1299 1299 Input:
1300 1300 self.dataOut
1301 1301 lag[N]
1302 1302 Affected:
1303 1303 self.dataOut.spc
1304 1304 '''
1305 1305 isConfig = False
1306 1306 __profIndex = 0
1307 1307 __initime = None
1308 1308 __lastdatatime = None
1309 1309 __buffer = None
1310 1310 noise = None
1311 1311 __dataReady = False
1312 1312 n = None
1313 1313 __nch = 0
1314 1314 __nHeis = 0
1315 1315 removeDC = False
1316 1316 ipp = None
1317 1317 lambda_ = 0
1318 1318
1319 1319 def __init__(self,**kwargs):
1320 1320 Operation.__init__(self,**kwargs)
1321 1321
1322 1322 def setup(self, dataOut, n = None, removeDC=False):
1323 1323 '''
1324 1324 n= Numero de PRF's de entrada
1325 1325 '''
1326 1326 self.__initime = None
1327 1327 self.__lastdatatime = 0
1328 1328 self.__dataReady = False
1329 1329 self.__buffer = 0
1330 1330 self.__profIndex = 0
1331 1331 self.noise = None
1332 1332 self.__nch = dataOut.nChannels
1333 1333 self.__nHeis = dataOut.nHeights
1334 1334 self.removeDC = removeDC
1335 1335 self.lambda_ = 3.0e8/(9345.0e6)
1336 1336 self.ippSec = dataOut.ippSeconds
1337 1337 self.nCohInt = dataOut.nCohInt
1338 print("IPPseconds",dataOut.ippSeconds)
1339
1340 print("ELVALOR DE n es:", n)
1338
1341 1339 if n == None:
1342 1340 raise ValueError("n should be specified.")
1343 1341
1344 1342 if n != None:
1345 1343 if n<2:
1346 1344 raise ValueError("n should be greater than 2")
1347 1345
1348 1346 self.n = n
1349 1347 self.__nProf = n
1350 1348
1351 1349 self.__buffer = numpy.zeros((dataOut.nChannels,
1352 1350 n,
1353 1351 dataOut.nHeights),
1354 1352 dtype='complex')
1355 1353
1356 1354 def putData(self,data):
1357 1355 '''
1358 1356 Add a profile to he __buffer and increase in one the __profiel Index
1359 1357 '''
1360 1358 self.__buffer[:,self.__profIndex,:]= data
1361 1359 self.__profIndex += 1
1362 1360 return
1363 1361
1364 1362 def pushData(self,dataOut):
1365 1363 '''
1366 1364 Return the PULSEPAIR and the profiles used in the operation
1367 1365 Affected : self.__profileIndex
1368 1366 '''
1369 1367 #----------------- Remove DC-----------------------------------
1370 1368 if self.removeDC==True:
1371 1369 mean = numpy.mean(self.__buffer,1)
1372 1370 tmp = mean.reshape(self.__nch,1,self.__nHeis)
1373 1371 dc= numpy.tile(tmp,[1,self.__nProf,1])
1374 1372 self.__buffer = self.__buffer - dc
1375 1373 #------------------Calculo de Potencia ------------------------
1376 1374 pair0 = self.__buffer*numpy.conj(self.__buffer)
1377 1375 pair0 = pair0.real
1378 1376 lag_0 = numpy.sum(pair0,1)
1379 1377 #------------------Calculo de Ruido x canal--------------------
1380 1378 self.noise = numpy.zeros(self.__nch)
1381 1379 for i in range(self.__nch):
1382 1380 daux = numpy.sort(pair0[i,:,:],axis= None)
1383 1381 self.noise[i]=hildebrand_sekhon( daux ,self.nCohInt)
1384 1382
1385 1383 self.noise = self.noise.reshape(self.__nch,1)
1386 1384 self.noise = numpy.tile(self.noise,[1,self.__nHeis])
1387 1385 noise_buffer = self.noise.reshape(self.__nch,1,self.__nHeis)
1388 1386 noise_buffer = numpy.tile(noise_buffer,[1,self.__nProf,1])
1389 1387 #------------------ Potencia recibida= P , Potencia senal = S , Ruido= N--
1390 1388 #------------------ P= S+N ,P=lag_0/N ---------------------------------
1391 1389 #-------------------- Power --------------------------------------------------
1392 1390 data_power = lag_0/(self.n*self.nCohInt)
1393 1391 #------------------ Senal ---------------------------------------------------
1394 1392 data_intensity = pair0 - noise_buffer
1395 1393 data_intensity = numpy.sum(data_intensity,axis=1)*(self.n*self.nCohInt)#*self.nCohInt)
1396 1394 #data_intensity = (lag_0-self.noise*self.n)*(self.n*self.nCohInt)
1397 1395 for i in range(self.__nch):
1398 1396 for j in range(self.__nHeis):
1399 1397 if data_intensity[i][j] < 0:
1400 1398 data_intensity[i][j] = numpy.min(numpy.absolute(data_intensity[i][j]))
1401 1399
1402 1400 #----------------- Calculo de Frecuencia y Velocidad doppler--------
1403 1401 pair1 = self.__buffer[:,:-1,:]*numpy.conjugate(self.__buffer[:,1:,:])
1404 1402 lag_1 = numpy.sum(pair1,1)
1405 1403 data_freq = (-1/(2.0*math.pi*self.ippSec*self.nCohInt))*numpy.angle(lag_1)
1406 1404 data_velocity = (self.lambda_/2.0)*data_freq
1407 1405
1408 1406 #---------------- Potencia promedio estimada de la Senal-----------
1409 1407 lag_0 = lag_0/self.n
1410 1408 S = lag_0-self.noise
1411 1409
1412 1410 #---------------- Frecuencia Doppler promedio ---------------------
1413 1411 lag_1 = lag_1/(self.n-1)
1414 1412 R1 = numpy.abs(lag_1)
1415 1413
1416 1414 #---------------- Calculo del SNR----------------------------------
1417 1415 data_snrPP = S/self.noise
1418 1416 for i in range(self.__nch):
1419 1417 for j in range(self.__nHeis):
1420 1418 if data_snrPP[i][j] < 1.e-20:
1421 1419 data_snrPP[i][j] = 1.e-20
1422 1420
1423 1421 #----------------- Calculo del ancho espectral ----------------------
1424 1422 L = S/R1
1425 1423 L = numpy.where(L<0,1,L)
1426 1424 L = numpy.log(L)
1427 1425 tmp = numpy.sqrt(numpy.absolute(L))
1428 1426 data_specwidth = (self.lambda_/(2*math.sqrt(2)*math.pi*self.ippSec*self.nCohInt))*tmp*numpy.sign(L)
1429 1427 n = self.__profIndex
1430 1428
1431 1429 self.__buffer = numpy.zeros((self.__nch, self.__nProf,self.__nHeis), dtype='complex')
1432 1430 self.__profIndex = 0
1433 1431 return data_power,data_intensity,data_velocity,data_snrPP,data_specwidth,n
1434 1432
1435 1433
1436 1434 def pulsePairbyProfiles(self,dataOut):
1437 1435
1438 1436 self.__dataReady = False
1439 1437 data_power = None
1440 1438 data_intensity = None
1441 1439 data_velocity = None
1442 1440 data_specwidth = None
1443 1441 data_snrPP = None
1444 1442 self.putData(data=dataOut.data)
1445 1443 if self.__profIndex == self.n:
1446 1444 data_power,data_intensity, data_velocity,data_snrPP,data_specwidth, n = self.pushData(dataOut=dataOut)
1447 1445 self.__dataReady = True
1448 1446
1449 1447 return data_power, data_intensity, data_velocity, data_snrPP, data_specwidth
1450 1448
1451 1449
1452 1450 def pulsePairOp(self, dataOut, datatime= None):
1453 1451
1454 1452 if self.__initime == None:
1455 1453 self.__initime = datatime
1456 1454 data_power, data_intensity, data_velocity, data_snrPP, data_specwidth = self.pulsePairbyProfiles(dataOut)
1457 1455 self.__lastdatatime = datatime
1458 1456
1459 1457 if data_power is None:
1460 1458 return None, None, None,None,None,None
1461 1459
1462 1460 avgdatatime = self.__initime
1463 1461 deltatime = datatime - self.__lastdatatime
1464 1462 self.__initime = datatime
1465 1463
1466 1464 return data_power, data_intensity, data_velocity, data_snrPP, data_specwidth, avgdatatime
1467 1465
1468 1466 def run(self, dataOut,n = None,removeDC= False, overlapping= False,**kwargs):
1469 1467
1470 1468 if not self.isConfig:
1471 1469 self.setup(dataOut = dataOut, n = n , removeDC=removeDC , **kwargs)
1472 1470 self.isConfig = True
1473 1471 data_power, data_intensity, data_velocity,data_snrPP,data_specwidth, avgdatatime = self.pulsePairOp(dataOut, dataOut.utctime)
1474 1472 dataOut.flagNoData = True
1475 1473
1476 1474 if self.__dataReady:
1477 1475 dataOut.nCohInt *= self.n
1478 1476 dataOut.dataPP_POW = data_intensity # S
1479 1477 dataOut.dataPP_POWER = data_power # P
1480 1478 dataOut.dataPP_DOP = data_velocity
1481 1479 dataOut.dataPP_SNR = data_snrPP
1482 1480 dataOut.dataPP_WIDTH = data_specwidth
1483 1481 dataOut.PRFbyAngle = self.n #numero de PRF*cada angulo rotado que equivale a un tiempo.
1484 1482 dataOut.utctime = avgdatatime
1485 1483 dataOut.flagNoData = False
1486 1484 return dataOut
1487 1485
1488 1486
1489 1487
1490 1488 # import collections
1491 1489 # from scipy.stats import mode
1492 1490 #
1493 1491 # class Synchronize(Operation):
1494 1492 #
1495 1493 # isConfig = False
1496 1494 # __profIndex = 0
1497 1495 #
1498 1496 # def __init__(self, **kwargs):
1499 1497 #
1500 1498 # Operation.__init__(self, **kwargs)
1501 1499 # # self.isConfig = False
1502 1500 # self.__powBuffer = None
1503 1501 # self.__startIndex = 0
1504 1502 # self.__pulseFound = False
1505 1503 #
1506 1504 # def __findTxPulse(self, dataOut, channel=0, pulse_with = None):
1507 1505 #
1508 1506 # #Read data
1509 1507 #
1510 1508 # powerdB = dataOut.getPower(channel = channel)
1511 1509 # noisedB = dataOut.getNoise(channel = channel)[0]
1512 1510 #
1513 1511 # self.__powBuffer.extend(powerdB.flatten())
1514 1512 #
1515 1513 # dataArray = numpy.array(self.__powBuffer)
1516 1514 #
1517 1515 # filteredPower = numpy.correlate(dataArray, dataArray[0:self.__nSamples], "same")
1518 1516 #
1519 1517 # maxValue = numpy.nanmax(filteredPower)
1520 1518 #
1521 1519 # if maxValue < noisedB + 10:
1522 1520 # #No se encuentra ningun pulso de transmision
1523 1521 # return None
1524 1522 #
1525 1523 # maxValuesIndex = numpy.where(filteredPower > maxValue - 0.1*abs(maxValue))[0]
1526 1524 #
1527 1525 # if len(maxValuesIndex) < 2:
1528 1526 # #Solo se encontro un solo pulso de transmision de un baudio, esperando por el siguiente TX
1529 1527 # return None
1530 1528 #
1531 1529 # phasedMaxValuesIndex = maxValuesIndex - self.__nSamples
1532 1530 #
1533 1531 # #Seleccionar solo valores con un espaciamiento de nSamples
1534 1532 # pulseIndex = numpy.intersect1d(maxValuesIndex, phasedMaxValuesIndex)
1535 1533 #
1536 1534 # if len(pulseIndex) < 2:
1537 1535 # #Solo se encontro un pulso de transmision con ancho mayor a 1
1538 1536 # return None
1539 1537 #
1540 1538 # spacing = pulseIndex[1:] - pulseIndex[:-1]
1541 1539 #
1542 1540 # #remover senales que se distancien menos de 10 unidades o muestras
1543 1541 # #(No deberian existir IPP menor a 10 unidades)
1544 1542 #
1545 1543 # realIndex = numpy.where(spacing > 10 )[0]
1546 1544 #
1547 1545 # if len(realIndex) < 2:
1548 1546 # #Solo se encontro un pulso de transmision con ancho mayor a 1
1549 1547 # return None
1550 1548 #
1551 1549 # #Eliminar pulsos anchos (deja solo la diferencia entre IPPs)
1552 1550 # realPulseIndex = pulseIndex[realIndex]
1553 1551 #
1554 1552 # period = mode(realPulseIndex[1:] - realPulseIndex[:-1])[0][0]
1555 1553 #
1556 1554 # print "IPP = %d samples" %period
1557 1555 #
1558 1556 # self.__newNSamples = dataOut.nHeights #int(period)
1559 1557 # self.__startIndex = int(realPulseIndex[0])
1560 1558 #
1561 1559 # return 1
1562 1560 #
1563 1561 #
1564 1562 # def setup(self, nSamples, nChannels, buffer_size = 4):
1565 1563 #
1566 1564 # self.__powBuffer = collections.deque(numpy.zeros( buffer_size*nSamples,dtype=numpy.float),
1567 1565 # maxlen = buffer_size*nSamples)
1568 1566 #
1569 1567 # bufferList = []
1570 1568 #
1571 1569 # for i in range(nChannels):
1572 1570 # bufferByChannel = collections.deque(numpy.zeros( buffer_size*nSamples, dtype=numpy.complex) + numpy.NAN,
1573 1571 # maxlen = buffer_size*nSamples)
1574 1572 #
1575 1573 # bufferList.append(bufferByChannel)
1576 1574 #
1577 1575 # self.__nSamples = nSamples
1578 1576 # self.__nChannels = nChannels
1579 1577 # self.__bufferList = bufferList
1580 1578 #
1581 1579 # def run(self, dataOut, channel = 0):
1582 1580 #
1583 1581 # if not self.isConfig:
1584 1582 # nSamples = dataOut.nHeights
1585 1583 # nChannels = dataOut.nChannels
1586 1584 # self.setup(nSamples, nChannels)
1587 1585 # self.isConfig = True
1588 1586 #
1589 1587 # #Append new data to internal buffer
1590 1588 # for thisChannel in range(self.__nChannels):
1591 1589 # bufferByChannel = self.__bufferList[thisChannel]
1592 1590 # bufferByChannel.extend(dataOut.data[thisChannel])
1593 1591 #
1594 1592 # if self.__pulseFound:
1595 1593 # self.__startIndex -= self.__nSamples
1596 1594 #
1597 1595 # #Finding Tx Pulse
1598 1596 # if not self.__pulseFound:
1599 1597 # indexFound = self.__findTxPulse(dataOut, channel)
1600 1598 #
1601 1599 # if indexFound == None:
1602 1600 # dataOut.flagNoData = True
1603 1601 # return
1604 1602 #
1605 1603 # self.__arrayBuffer = numpy.zeros((self.__nChannels, self.__newNSamples), dtype = numpy.complex)
1606 1604 # self.__pulseFound = True
1607 1605 # self.__startIndex = indexFound
1608 1606 #
1609 1607 # #If pulse was found ...
1610 1608 # for thisChannel in range(self.__nChannels):
1611 1609 # bufferByChannel = self.__bufferList[thisChannel]
1612 1610 # #print self.__startIndex
1613 1611 # x = numpy.array(bufferByChannel)
1614 1612 # self.__arrayBuffer[thisChannel] = x[self.__startIndex:self.__startIndex+self.__newNSamples]
1615 1613 #
1616 1614 # deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1617 1615 # dataOut.heightList = numpy.arange(self.__newNSamples)*deltaHeight
1618 1616 # # dataOut.ippSeconds = (self.__newNSamples / deltaHeight)/1e6
1619 1617 #
1620 1618 # dataOut.data = self.__arrayBuffer
1621 1619 #
1622 1620 # self.__startIndex += self.__newNSamples
1623 1621 #
1624 1622 # return
General Comments 0
You need to be logged in to leave comments. Login now