##// END OF EJS Templates
completado op clean Rayleigh, modificada la escritura y lectura de hdf5 para potencia, cambio en gráficos para uso del ChannelList, añadir en otros pendiente
joabAM -
r1397:9109c52a497c
parent child
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -0,0 +1,11
1 #!/home/soporte/workspace/schain/ENV_DIR/bin/python3
2
3 # -*- coding: utf-8 -*-
4 import re
5 import sys
6
7 from setuptools.command.easy_install import main
8
9 if __name__ == '__main__':
10 sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
11 sys.exit(main())
@@ -0,0 +1,11
1 #!/home/soporte/workspace/schain/ENV_DIR/bin/python3
2
3 # -*- coding: utf-8 -*-
4 import re
5 import sys
6
7 from setuptools.command.easy_install import main
8
9 if __name__ == '__main__':
10 sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
11 sys.exit(main())
@@ -0,0 +1,11
1 #!/home/soporte/workspace/schain/ENV_DIR/bin/python3
2
3 # -*- coding: utf-8 -*-
4 import re
5 import sys
6
7 from pip import main
8
9 if __name__ == '__main__':
10 sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
11 sys.exit(main())
@@ -0,0 +1,11
1 #!/home/soporte/workspace/schain/ENV_DIR/bin/python3
2
3 # -*- coding: utf-8 -*-
4 import re
5 import sys
6
7 from pip import main
8
9 if __name__ == '__main__':
10 sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
11 sys.exit(main())
@@ -0,0 +1,11
1 #!/home/soporte/workspace/schain/ENV_DIR/bin/python3
2
3 # -*- coding: utf-8 -*-
4 import re
5 import sys
6
7 from pip import main
8
9 if __name__ == '__main__':
10 sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
11 sys.exit(main())
@@ -0,0 +1,1
1 python3 No newline at end of file
@@ -0,0 +1,1
1 /usr/bin/python3 No newline at end of file
@@ -0,0 +1,1
1 lib No newline at end of file
@@ -0,0 +1,3
1 home = /usr/bin
2 include-system-site-packages = false
3 version = 3.6.9
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
1 NO CONTENT: new file 100644, binary diff hidden
@@ -1,1074 +1,1073
1 1 # Copyright (c) 2012-2020 Jicamarca Radio Observatory
2 2 # All rights reserved.
3 3 #
4 4 # Distributed under the terms of the BSD 3-clause license.
5 5 """Definition of diferent Data objects for different types of data
6 6
7 7 Here you will find the diferent data objects for the different types
8 8 of data, this data objects must be used as dataIn or dataOut objects in
9 9 processing units and operations. Currently the supported data objects are:
10 10 Voltage, Spectra, SpectraHeis, Fits, Correlation and Parameters
11 11 """
12 12
13 13 import copy
14 14 import numpy
15 15 import datetime
16 16 import json
17 17
18 18 import schainpy.admin
19 19 from schainpy.utils import log
20 20 from .jroheaderIO import SystemHeader, RadarControllerHeader
21 21 from schainpy.model.data import _noise
22 22
23 23
24 24 def getNumpyDtype(dataTypeCode):
25 25
26 26 if dataTypeCode == 0:
27 27 numpyDtype = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
28 28 elif dataTypeCode == 1:
29 29 numpyDtype = numpy.dtype([('real', '<i2'), ('imag', '<i2')])
30 30 elif dataTypeCode == 2:
31 31 numpyDtype = numpy.dtype([('real', '<i4'), ('imag', '<i4')])
32 32 elif dataTypeCode == 3:
33 33 numpyDtype = numpy.dtype([('real', '<i8'), ('imag', '<i8')])
34 34 elif dataTypeCode == 4:
35 35 numpyDtype = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
36 36 elif dataTypeCode == 5:
37 37 numpyDtype = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
38 38 else:
39 39 raise ValueError('dataTypeCode was not defined')
40 40
41 41 return numpyDtype
42 42
43 43
44 44 def getDataTypeCode(numpyDtype):
45 45
46 46 if numpyDtype == numpy.dtype([('real', '<i1'), ('imag', '<i1')]):
47 47 datatype = 0
48 48 elif numpyDtype == numpy.dtype([('real', '<i2'), ('imag', '<i2')]):
49 49 datatype = 1
50 50 elif numpyDtype == numpy.dtype([('real', '<i4'), ('imag', '<i4')]):
51 51 datatype = 2
52 52 elif numpyDtype == numpy.dtype([('real', '<i8'), ('imag', '<i8')]):
53 53 datatype = 3
54 54 elif numpyDtype == numpy.dtype([('real', '<f4'), ('imag', '<f4')]):
55 55 datatype = 4
56 56 elif numpyDtype == numpy.dtype([('real', '<f8'), ('imag', '<f8')]):
57 57 datatype = 5
58 58 else:
59 59 datatype = None
60 60
61 61 return datatype
62 62
63 63
64 64 def hildebrand_sekhon(data, navg):
65 65 """
66 66 This method is for the objective determination of the noise level in Doppler spectra. This
67 67 implementation technique is based on the fact that the standard deviation of the spectral
68 68 densities is equal to the mean spectral density for white Gaussian noise
69 69
70 70 Inputs:
71 71 Data : heights
72 72 navg : numbers of averages
73 73
74 74 Return:
75 75 mean : noise's level
76 76 """
77 77
78 78 sortdata = numpy.sort(data, axis=None)
79 79 '''
80 80 lenOfData = len(sortdata)
81 81 nums_min = lenOfData*0.2
82 82
83 83 if nums_min <= 5:
84 84
85 85 nums_min = 5
86 86
87 87 sump = 0.
88 88 sumq = 0.
89 89
90 90 j = 0
91 91 cont = 1
92 92
93 93 while((cont == 1)and(j < lenOfData)):
94 94
95 95 sump += sortdata[j]
96 96 sumq += sortdata[j]**2
97 97
98 98 if j > nums_min:
99 99 rtest = float(j)/(j-1) + 1.0/navg
100 100 if ((sumq*j) > (rtest*sump**2)):
101 101 j = j - 1
102 102 sump = sump - sortdata[j]
103 103 sumq = sumq - sortdata[j]**2
104 104 cont = 0
105 105
106 106 j += 1
107 107
108 108 lnoise = sump / j
109 109 '''
110 110 return _noise.hildebrand_sekhon(sortdata, navg)
111 111
112 112
113 113 class Beam:
114 114
115 115 def __init__(self):
116 116 self.codeList = []
117 117 self.azimuthList = []
118 118 self.zenithList = []
119 119
120 120
121 121
122 122 class GenericData(object):
123 123
124 124 flagNoData = True
125 125
126 126 def copy(self, inputObj=None):
127 127
128 128 if inputObj == None:
129 129 return copy.deepcopy(self)
130 130
131 131 for key in list(inputObj.__dict__.keys()):
132 132
133 133 attribute = inputObj.__dict__[key]
134 134
135 135 # If this attribute is a tuple or list
136 136 if type(inputObj.__dict__[key]) in (tuple, list):
137 137 self.__dict__[key] = attribute[:]
138 138 continue
139 139
140 140 # If this attribute is another object or instance
141 141 if hasattr(attribute, '__dict__'):
142 142 self.__dict__[key] = attribute.copy()
143 143 continue
144 144
145 145 self.__dict__[key] = inputObj.__dict__[key]
146 146
147 147 def deepcopy(self):
148 148
149 149 return copy.deepcopy(self)
150 150
151 151 def isEmpty(self):
152 152
153 153 return self.flagNoData
154 154
155 155 def isReady(self):
156 156
157 157 return not self.flagNoData
158 158
159 159
160 160 class JROData(GenericData):
161 161
162 162 systemHeaderObj = SystemHeader()
163 163 radarControllerHeaderObj = RadarControllerHeader()
164 164 type = None
165 165 datatype = None # dtype but in string
166 166 nProfiles = None
167 167 heightList = None
168 168 channelList = None
169 169 flagDiscontinuousBlock = False
170 170 useLocalTime = False
171 171 utctime = None
172 172 timeZone = None
173 173 dstFlag = None
174 174 errorCount = None
175 175 blocksize = None
176 176 flagDecodeData = False # asumo q la data no esta decodificada
177 177 flagDeflipData = False # asumo q la data no esta sin flip
178 178 flagShiftFFT = False
179 179 nCohInt = None
180 180 windowOfFilter = 1
181 181 C = 3e8
182 182 frequency = 49.92e6
183 183 realtime = False
184 184 beacon_heiIndexList = None
185 185 last_block = None
186 186 blocknow = None
187 187 azimuth = None
188 188 zenith = None
189 189 beam = Beam()
190 190 profileIndex = None
191 191 error = None
192 192 data = None
193 193 nmodes = None
194 194 metadata_list = ['heightList', 'timeZone', 'type']
195 195 codeList = None
196 196 azimuthList = None
197 197 elevationList = None
198 198
199 199 def __str__(self):
200 200
201 201 return '{} - {}'.format(self.type, self.datatime())
202 202
203 203 def getNoise(self):
204 204
205 205 raise NotImplementedError
206 206
207 207 @property
208 208 def nChannels(self):
209 209
210 210 return len(self.channelList)
211 211
212 212 @property
213 213 def channelIndexList(self):
214 214
215 215 return list(range(self.nChannels))
216 216
217 217 @property
218 218 def nHeights(self):
219 219
220 220 return len(self.heightList)
221 221
222 222 def getDeltaH(self):
223 223
224 224 return self.heightList[1] - self.heightList[0]
225 225
226 226 @property
227 227 def ltctime(self):
228 228
229 229 if self.useLocalTime:
230 230 return self.utctime - self.timeZone * 60
231 231
232 232 return self.utctime
233 233
234 234 @property
235 235 def datatime(self):
236 236
237 237 datatimeValue = datetime.datetime.utcfromtimestamp(self.ltctime)
238 238 return datatimeValue
239 239
240 240 def getTimeRange(self):
241 241
242 242 datatime = []
243 243
244 244 datatime.append(self.ltctime)
245 245 datatime.append(self.ltctime + self.timeInterval + 1)
246 246
247 247 datatime = numpy.array(datatime)
248 248
249 249 return datatime
250 250
251 251 def getFmaxTimeResponse(self):
252 252
253 253 period = (10**-6) * self.getDeltaH() / (0.15)
254 254
255 255 PRF = 1. / (period * self.nCohInt)
256 256
257 257 fmax = PRF
258 258
259 259 return fmax
260 260
261 261 def getFmax(self):
262 262 PRF = 1. / (self.ippSeconds * self.nCohInt)
263 263
264 264 fmax = PRF
265 265 return fmax
266 266
267 267 def getVmax(self):
268 268
269 269 _lambda = self.C / self.frequency
270 270
271 271 vmax = self.getFmax() * _lambda / 2
272 272
273 273 return vmax
274 274
275 275 @property
276 276 def ippSeconds(self):
277 277 '''
278 278 '''
279 279 return self.radarControllerHeaderObj.ippSeconds
280 280
281 281 @ippSeconds.setter
282 282 def ippSeconds(self, ippSeconds):
283 283 '''
284 284 '''
285 285 self.radarControllerHeaderObj.ippSeconds = ippSeconds
286 286
287 287 @property
288 288 def code(self):
289 289 '''
290 290 '''
291 291 return self.radarControllerHeaderObj.code
292 292
293 293 @code.setter
294 294 def code(self, code):
295 295 '''
296 296 '''
297 297 self.radarControllerHeaderObj.code = code
298 298
299 299 @property
300 300 def nCode(self):
301 301 '''
302 302 '''
303 303 return self.radarControllerHeaderObj.nCode
304 304
305 305 @nCode.setter
306 306 def nCode(self, ncode):
307 307 '''
308 308 '''
309 309 self.radarControllerHeaderObj.nCode = ncode
310 310
311 311 @property
312 312 def nBaud(self):
313 313 '''
314 314 '''
315 315 return self.radarControllerHeaderObj.nBaud
316 316
317 317 @nBaud.setter
318 318 def nBaud(self, nbaud):
319 319 '''
320 320 '''
321 321 self.radarControllerHeaderObj.nBaud = nbaud
322 322
323 323 @property
324 324 def ipp(self):
325 325 '''
326 326 '''
327 327 return self.radarControllerHeaderObj.ipp
328 328
329 329 @ipp.setter
330 330 def ipp(self, ipp):
331 331 '''
332 332 '''
333 333 self.radarControllerHeaderObj.ipp = ipp
334 334
335 335 @property
336 336 def metadata(self):
337 337 '''
338 338 '''
339 339
340 340 return {attr: getattr(self, attr) for attr in self.metadata_list}
341 341
342 342
343 343 class Voltage(JROData):
344 344
345 345 dataPP_POW = None
346 346 dataPP_DOP = None
347 347 dataPP_WIDTH = None
348 348 dataPP_SNR = None
349 349
350 350 def __init__(self):
351 351 '''
352 352 Constructor
353 353 '''
354 354
355 355 self.useLocalTime = True
356 356 self.radarControllerHeaderObj = RadarControllerHeader()
357 357 self.systemHeaderObj = SystemHeader()
358 358 self.type = "Voltage"
359 359 self.data = None
360 360 self.nProfiles = None
361 361 self.heightList = None
362 362 self.channelList = None
363 363 self.flagNoData = True
364 364 self.flagDiscontinuousBlock = False
365 365 self.utctime = None
366 366 self.timeZone = 0
367 367 self.dstFlag = None
368 368 self.errorCount = None
369 369 self.nCohInt = None
370 370 self.blocksize = None
371 371 self.flagCohInt = False
372 372 self.flagDecodeData = False # asumo q la data no esta decodificada
373 373 self.flagDeflipData = False # asumo q la data no esta sin flip
374 374 self.flagShiftFFT = False
375 375 self.flagDataAsBlock = False # Asumo que la data es leida perfil a perfil
376 376 self.profileIndex = 0
377 377 self.metadata_list = ['type', 'heightList', 'timeZone', 'nProfiles', 'channelList', 'nCohInt',
378 378 'code', 'nCode', 'nBaud', 'ippSeconds', 'ipp']
379 379
380 380 def getNoisebyHildebrand(self, channel=None):
381 381 """
382 382 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
383 383
384 384 Return:
385 385 noiselevel
386 386 """
387 387
388 388 if channel != None:
389 389 data = self.data[channel]
390 390 nChannels = 1
391 391 else:
392 392 data = self.data
393 393 nChannels = self.nChannels
394 394
395 395 noise = numpy.zeros(nChannels)
396 396 power = data * numpy.conjugate(data)
397 397
398 398 for thisChannel in range(nChannels):
399 399 if nChannels == 1:
400 400 daux = power[:].real
401 401 else:
402 402 daux = power[thisChannel, :].real
403 403 noise[thisChannel] = hildebrand_sekhon(daux, self.nCohInt)
404 404
405 405 return noise
406 406
407 407 def getNoise(self, type=1, channel=None):
408 408
409 409 if type == 1:
410 410 noise = self.getNoisebyHildebrand(channel)
411 411
412 412 return noise
413 413
414 414 def getPower(self, channel=None):
415 415
416 416 if channel != None:
417 417 data = self.data[channel]
418 418 else:
419 419 data = self.data
420 420
421 421 power = data * numpy.conjugate(data)
422 422 powerdB = 10 * numpy.log10(power.real)
423 423 powerdB = numpy.squeeze(powerdB)
424 424
425 425 return powerdB
426 426
427 427 @property
428 428 def timeInterval(self):
429 429
430 430 return self.ippSeconds * self.nCohInt
431 431
432 432 noise = property(getNoise, "I'm the 'nHeights' property.")
433 433
434 434
435 435 class Spectra(JROData):
436 436
437 437 def __init__(self):
438 438 '''
439 439 Constructor
440 440 '''
441 441
442 442 self.data_dc = None
443 443 self.data_spc = None
444 444 self.data_cspc = None
445 445 self.useLocalTime = True
446 446 self.radarControllerHeaderObj = RadarControllerHeader()
447 447 self.systemHeaderObj = SystemHeader()
448 448 self.type = "Spectra"
449 449 self.timeZone = 0
450 450 self.nProfiles = None
451 451 self.heightList = None
452 452 self.channelList = None
453 453 self.pairsList = None
454 454 self.flagNoData = True
455 455 self.flagDiscontinuousBlock = False
456 456 self.utctime = None
457 457 self.nCohInt = None
458 458 self.nIncohInt = None
459 459 self.blocksize = None
460 460 self.nFFTPoints = None
461 461 self.wavelength = None
462 462 self.flagDecodeData = False # asumo q la data no esta decodificada
463 463 self.flagDeflipData = False # asumo q la data no esta sin flip
464 464 self.flagShiftFFT = False
465 465 self.ippFactor = 1
466 466 self.beacon_heiIndexList = []
467 467 self.noise_estimation = None
468 468 self.metadata_list = ['type', 'heightList', 'timeZone', 'pairsList', 'channelList', 'nCohInt',
469 469 'code', 'nCode', 'nBaud', 'ippSeconds', 'ipp','nIncohInt', 'nFFTPoints', 'nProfiles']
470 470
471 471
472 472 def getNoisebyHildebrand(self, xmin_index=None, xmax_index=None, ymin_index=None, ymax_index=None):
473 473 """
474 474 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
475 475
476 476 Return:
477 477 noiselevel
478 478 """
479 479
480 480 noise = numpy.zeros(self.nChannels)
481
482 481 for channel in range(self.nChannels):
483 482 daux = self.data_spc[channel,
484 483 xmin_index:xmax_index, ymin_index:ymax_index]
485 484 noise[channel] = hildebrand_sekhon(daux, self.nIncohInt)
486 485
487 486 return noise
488 487
489 488 def getNoise(self, xmin_index=None, xmax_index=None, ymin_index=None, ymax_index=None):
490 489
491 490 if self.noise_estimation is not None:
492 491 # this was estimated by getNoise Operation defined in jroproc_spectra.py
493 492 return self.noise_estimation
494 493 else:
495 494 noise = self.getNoisebyHildebrand(
496 495 xmin_index, xmax_index, ymin_index, ymax_index)
497 496 return noise
498 497
499 498 def getFreqRangeTimeResponse(self, extrapoints=0):
500 499
501 500 deltafreq = self.getFmaxTimeResponse() / (self.nFFTPoints * self.ippFactor)
502 501 freqrange = deltafreq * (numpy.arange(self.nFFTPoints + extrapoints) - self.nFFTPoints / 2.) - deltafreq / 2
503 502
504 503 return freqrange
505 504
506 505 def getAcfRange(self, extrapoints=0):
507 506
508 507 deltafreq = 10. / (self.getFmax() / (self.nFFTPoints * self.ippFactor))
509 508 freqrange = deltafreq * (numpy.arange(self.nFFTPoints + extrapoints) -self.nFFTPoints / 2.) - deltafreq / 2
510 509
511 510 return freqrange
512 511
513 512 def getFreqRange(self, extrapoints=0):
514 513
515 514 deltafreq = self.getFmax() / (self.nFFTPoints * self.ippFactor)
516 515 freqrange = deltafreq * (numpy.arange(self.nFFTPoints + extrapoints) -self.nFFTPoints / 2.) - deltafreq / 2
517 516
518 517 return freqrange
519 518
520 519 def getVelRange(self, extrapoints=0):
521 520
522 521 deltav = self.getVmax() / (self.nFFTPoints * self.ippFactor)
523 522 velrange = deltav * (numpy.arange(self.nFFTPoints + extrapoints) - self.nFFTPoints / 2.)
524 523
525 524 if self.nmodes:
526 525 return velrange/self.nmodes
527 526 else:
528 527 return velrange
529 528
530 529 @property
531 530 def nPairs(self):
532 531
533 532 return len(self.pairsList)
534 533
535 534 @property
536 535 def pairsIndexList(self):
537 536
538 537 return list(range(self.nPairs))
539 538
540 539 @property
541 540 def normFactor(self):
542 541
543 542 pwcode = 1
544 543
545 544 if self.flagDecodeData:
546 545 pwcode = numpy.sum(self.code[0]**2)
547 546 #normFactor = min(self.nFFTPoints,self.nProfiles)*self.nIncohInt*self.nCohInt*pwcode*self.windowOfFilter
548 547 normFactor = self.nProfiles * self.nIncohInt * self.nCohInt * pwcode * self.windowOfFilter
549 548
550 549 return normFactor
551 550
552 551 @property
553 552 def flag_cspc(self):
554 553
555 554 if self.data_cspc is None:
556 555 return True
557 556
558 557 return False
559 558
560 559 @property
561 560 def flag_dc(self):
562 561
563 562 if self.data_dc is None:
564 563 return True
565 564
566 565 return False
567 566
568 567 @property
569 568 def timeInterval(self):
570 569
571 570 timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt * self.nProfiles * self.ippFactor
572 571 if self.nmodes:
573 572 return self.nmodes*timeInterval
574 573 else:
575 574 return timeInterval
576 575
577 576 def getPower(self):
578 577
579 578 factor = self.normFactor
580 579 z = self.data_spc / factor
581 580 z = numpy.where(numpy.isfinite(z), z, numpy.NAN)
582 581 avg = numpy.average(z, axis=1)
583 582
584 583 return 10 * numpy.log10(avg)
585 584
586 585 def getCoherence(self, pairsList=None, phase=False):
587 586
588 587 z = []
589 588 if pairsList is None:
590 589 pairsIndexList = self.pairsIndexList
591 590 else:
592 591 pairsIndexList = []
593 592 for pair in pairsList:
594 593 if pair not in self.pairsList:
595 594 raise ValueError("Pair %s is not in dataOut.pairsList" % (
596 595 pair))
597 596 pairsIndexList.append(self.pairsList.index(pair))
598 597 for i in range(len(pairsIndexList)):
599 598 pair = self.pairsList[pairsIndexList[i]]
600 599 ccf = numpy.average(self.data_cspc[pairsIndexList[i], :, :], axis=0)
601 600 powa = numpy.average(self.data_spc[pair[0], :, :], axis=0)
602 601 powb = numpy.average(self.data_spc[pair[1], :, :], axis=0)
603 602 avgcoherenceComplex = ccf / numpy.sqrt(powa * powb)
604 603 if phase:
605 604 data = numpy.arctan2(avgcoherenceComplex.imag,
606 605 avgcoherenceComplex.real) * 180 / numpy.pi
607 606 else:
608 607 data = numpy.abs(avgcoherenceComplex)
609 608
610 609 z.append(data)
611 610
612 611 return numpy.array(z)
613 612
614 613 def setValue(self, value):
615 614
616 615 print("This property should not be initialized")
617 616
618 617 return
619 618
620 619 noise = property(getNoise, setValue, "I'm the 'nHeights' property.")
621 620
622 621
623 622 class SpectraHeis(Spectra):
624 623
625 624 def __init__(self):
626 625
627 626 self.radarControllerHeaderObj = RadarControllerHeader()
628 627 self.systemHeaderObj = SystemHeader()
629 628 self.type = "SpectraHeis"
630 629 self.nProfiles = None
631 630 self.heightList = None
632 631 self.channelList = None
633 632 self.flagNoData = True
634 633 self.flagDiscontinuousBlock = False
635 634 self.utctime = None
636 635 self.blocksize = None
637 636 self.profileIndex = 0
638 637 self.nCohInt = 1
639 638 self.nIncohInt = 1
640 639
641 640 @property
642 641 def normFactor(self):
643 642 pwcode = 1
644 643 if self.flagDecodeData:
645 644 pwcode = numpy.sum(self.code[0]**2)
646 645
647 646 normFactor = self.nIncohInt * self.nCohInt * pwcode
648 647
649 648 return normFactor
650 649
651 650 @property
652 651 def timeInterval(self):
653 652
654 653 return self.ippSeconds * self.nCohInt * self.nIncohInt
655 654
656 655
657 656 class Fits(JROData):
658 657
659 658 def __init__(self):
660 659
661 660 self.type = "Fits"
662 661 self.nProfiles = None
663 662 self.heightList = None
664 663 self.channelList = None
665 664 self.flagNoData = True
666 665 self.utctime = None
667 666 self.nCohInt = 1
668 667 self.nIncohInt = 1
669 668 self.useLocalTime = True
670 669 self.profileIndex = 0
671 670 self.timeZone = 0
672 671
673 672 def getTimeRange(self):
674 673
675 674 datatime = []
676 675
677 676 datatime.append(self.ltctime)
678 677 datatime.append(self.ltctime + self.timeInterval)
679 678
680 679 datatime = numpy.array(datatime)
681 680
682 681 return datatime
683 682
684 683 def getChannelIndexList(self):
685 684
686 685 return list(range(self.nChannels))
687 686
688 687 def getNoise(self, type=1):
689 688
690 689
691 690 if type == 1:
692 691 noise = self.getNoisebyHildebrand()
693 692
694 693 if type == 2:
695 694 noise = self.getNoisebySort()
696 695
697 696 if type == 3:
698 697 noise = self.getNoisebyWindow()
699 698
700 699 return noise
701 700
702 701 @property
703 702 def timeInterval(self):
704 703
705 704 timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt
706 705
707 706 return timeInterval
708 707
709 708 @property
710 709 def ippSeconds(self):
711 710 '''
712 711 '''
713 712 return self.ipp_sec
714 713
715 714 noise = property(getNoise, "I'm the 'nHeights' property.")
716 715
717 716
718 717 class Correlation(JROData):
719 718
720 719 def __init__(self):
721 720 '''
722 721 Constructor
723 722 '''
724 723 self.radarControllerHeaderObj = RadarControllerHeader()
725 724 self.systemHeaderObj = SystemHeader()
726 725 self.type = "Correlation"
727 726 self.data = None
728 727 self.dtype = None
729 728 self.nProfiles = None
730 729 self.heightList = None
731 730 self.channelList = None
732 731 self.flagNoData = True
733 732 self.flagDiscontinuousBlock = False
734 733 self.utctime = None
735 734 self.timeZone = 0
736 735 self.dstFlag = None
737 736 self.errorCount = None
738 737 self.blocksize = None
739 738 self.flagDecodeData = False # asumo q la data no esta decodificada
740 739 self.flagDeflipData = False # asumo q la data no esta sin flip
741 740 self.pairsList = None
742 741 self.nPoints = None
743 742
744 743 def getPairsList(self):
745 744
746 745 return self.pairsList
747 746
748 747 def getNoise(self, mode=2):
749 748
750 749 indR = numpy.where(self.lagR == 0)[0][0]
751 750 indT = numpy.where(self.lagT == 0)[0][0]
752 751
753 752 jspectra0 = self.data_corr[:, :, indR, :]
754 753 jspectra = copy.copy(jspectra0)
755 754
756 755 num_chan = jspectra.shape[0]
757 756 num_hei = jspectra.shape[2]
758 757
759 758 freq_dc = jspectra.shape[1] / 2
760 759 ind_vel = numpy.array([-2, -1, 1, 2]) + freq_dc
761 760
762 761 if ind_vel[0] < 0:
763 762 ind_vel[list(range(0, 1))] = ind_vel[list(
764 763 range(0, 1))] + self.num_prof
765 764
766 765 if mode == 1:
767 766 jspectra[:, freq_dc, :] = (
768 767 jspectra[:, ind_vel[1], :] + jspectra[:, ind_vel[2], :]) / 2 # CORRECCION
769 768
770 769 if mode == 2:
771 770
772 771 vel = numpy.array([-2, -1, 1, 2])
773 772 xx = numpy.zeros([4, 4])
774 773
775 774 for fil in range(4):
776 775 xx[fil, :] = vel[fil]**numpy.asarray(list(range(4)))
777 776
778 777 xx_inv = numpy.linalg.inv(xx)
779 778 xx_aux = xx_inv[0, :]
780 779
781 780 for ich in range(num_chan):
782 781 yy = jspectra[ich, ind_vel, :]
783 782 jspectra[ich, freq_dc, :] = numpy.dot(xx_aux, yy)
784 783
785 784 junkid = jspectra[ich, freq_dc, :] <= 0
786 785 cjunkid = sum(junkid)
787 786
788 787 if cjunkid.any():
789 788 jspectra[ich, freq_dc, junkid.nonzero()] = (
790 789 jspectra[ich, ind_vel[1], junkid] + jspectra[ich, ind_vel[2], junkid]) / 2
791 790
792 791 noise = jspectra0[:, freq_dc, :] - jspectra[:, freq_dc, :]
793 792
794 793 return noise
795 794
796 795 @property
797 796 def timeInterval(self):
798 797
799 798 return self.ippSeconds * self.nCohInt * self.nProfiles
800 799
801 800 def splitFunctions(self):
802 801
803 802 pairsList = self.pairsList
804 803 ccf_pairs = []
805 804 acf_pairs = []
806 805 ccf_ind = []
807 806 acf_ind = []
808 807 for l in range(len(pairsList)):
809 808 chan0 = pairsList[l][0]
810 809 chan1 = pairsList[l][1]
811 810
812 811 # Obteniendo pares de Autocorrelacion
813 812 if chan0 == chan1:
814 813 acf_pairs.append(chan0)
815 814 acf_ind.append(l)
816 815 else:
817 816 ccf_pairs.append(pairsList[l])
818 817 ccf_ind.append(l)
819 818
820 819 data_acf = self.data_cf[acf_ind]
821 820 data_ccf = self.data_cf[ccf_ind]
822 821
823 822 return acf_ind, ccf_ind, acf_pairs, ccf_pairs, data_acf, data_ccf
824 823
825 824 @property
826 825 def normFactor(self):
827 826 acf_ind, ccf_ind, acf_pairs, ccf_pairs, data_acf, data_ccf = self.splitFunctions()
828 827 acf_pairs = numpy.array(acf_pairs)
829 828 normFactor = numpy.zeros((self.nPairs, self.nHeights))
830 829
831 830 for p in range(self.nPairs):
832 831 pair = self.pairsList[p]
833 832
834 833 ch0 = pair[0]
835 834 ch1 = pair[1]
836 835
837 836 ch0_max = numpy.max(data_acf[acf_pairs == ch0, :, :], axis=1)
838 837 ch1_max = numpy.max(data_acf[acf_pairs == ch1, :, :], axis=1)
839 838 normFactor[p, :] = numpy.sqrt(ch0_max * ch1_max)
840 839
841 840 return normFactor
842 841
843 842
844 843 class Parameters(Spectra):
845 844
846 845 groupList = None # List of Pairs, Groups, etc
847 846 data_param = None # Parameters obtained
848 847 data_pre = None # Data Pre Parametrization
849 848 data_SNR = None # Signal to Noise Ratio
850 849 abscissaList = None # Abscissa, can be velocities, lags or time
851 850 utctimeInit = None # Initial UTC time
852 851 paramInterval = None # Time interval to calculate Parameters in seconds
853 852 useLocalTime = True
854 853 # Fitting
855 854 data_error = None # Error of the estimation
856 855 constants = None
857 856 library = None
858 857 # Output signal
859 858 outputInterval = None # Time interval to calculate output signal in seconds
860 859 data_output = None # Out signal
861 860 nAvg = None
862 861 noise_estimation = None
863 862 GauSPC = None # Fit gaussian SPC
864 863
865 864 def __init__(self):
866 865 '''
867 866 Constructor
868 867 '''
869 868 self.radarControllerHeaderObj = RadarControllerHeader()
870 869 self.systemHeaderObj = SystemHeader()
871 870 self.type = "Parameters"
872 871 self.timeZone = 0
873 872
874 873 def getTimeRange1(self, interval):
875 874
876 875 datatime = []
877 876
878 877 if self.useLocalTime:
879 878 time1 = self.utctimeInit - self.timeZone * 60
880 879 else:
881 880 time1 = self.utctimeInit
882 881
883 882 datatime.append(time1)
884 883 datatime.append(time1 + interval)
885 884 datatime = numpy.array(datatime)
886 885
887 886 return datatime
888 887
889 888 @property
890 889 def timeInterval(self):
891 890
892 891 if hasattr(self, 'timeInterval1'):
893 892 return self.timeInterval1
894 893 else:
895 894 return self.paramInterval
896 895
897 896 def setValue(self, value):
898 897
899 898 print("This property should not be initialized")
900 899
901 900 return
902 901
903 902 def getNoise(self):
904 903
905 904 return self.spc_noise
906 905
907 906 noise = property(getNoise, setValue, "I'm the 'Noise' property.")
908 907
909 908
910 909 class PlotterData(object):
911 910 '''
912 911 Object to hold data to be plotted
913 912 '''
914 913
915 914 MAXNUMX = 200
916 915 MAXNUMY = 200
917 916
918 917 def __init__(self, code, exp_code, localtime=True):
919 918
920 919 self.key = code
921 920 self.exp_code = exp_code
922 921 self.ready = False
923 922 self.flagNoData = False
924 923 self.localtime = localtime
925 924 self.data = {}
926 925 self.meta = {}
927 926 self.__heights = []
928 927
929 928 def __str__(self):
930 929 dum = ['{}{}'.format(key, self.shape(key)) for key in self.data]
931 930 return 'Data[{}][{}]'.format(';'.join(dum), len(self.times))
932 931
933 932 def __len__(self):
934 933 return len(self.data)
935 934
936 935 def __getitem__(self, key):
937 936 if isinstance(key, int):
938 937 return self.data[self.times[key]]
939 938 elif isinstance(key, str):
940 939 ret = numpy.array([self.data[x][key] for x in self.times])
941 940 if ret.ndim > 1:
942 941 ret = numpy.swapaxes(ret, 0, 1)
943 942 return ret
944 943
945 944 def __contains__(self, key):
946 945 return key in self.data[self.min_time]
947 946
948 947 def setup(self):
949 948 '''
950 949 Configure object
951 950 '''
952 951 self.type = ''
953 952 self.ready = False
954 953 del self.data
955 954 self.data = {}
956 955 self.__heights = []
957 956 self.__all_heights = set()
958 957
959 958 def shape(self, key):
960 959 '''
961 960 Get the shape of the one-element data for the given key
962 961 '''
963 962
964 963 if len(self.data[self.min_time][key]):
965 964 return self.data[self.min_time][key].shape
966 965 return (0,)
967 966
968 967 def update(self, data, tm, meta={}):
969 968 '''
970 969 Update data object with new dataOut
971 970 '''
972 971
973 972 self.data[tm] = data
974 973
975 974 for key, value in meta.items():
976 975 setattr(self, key, value)
977 976
978 977 def normalize_heights(self):
979 978 '''
980 979 Ensure same-dimension of the data for different heighList
981 980 '''
982 981
983 982 H = numpy.array(list(self.__all_heights))
984 983 H.sort()
985 984 for key in self.data:
986 985 shape = self.shape(key)[:-1] + H.shape
987 986 for tm, obj in list(self.data[key].items()):
988 987 h = self.__heights[self.times.tolist().index(tm)]
989 988 if H.size == h.size:
990 989 continue
991 990 index = numpy.where(numpy.in1d(H, h))[0]
992 991 dummy = numpy.zeros(shape) + numpy.nan
993 992 if len(shape) == 2:
994 993 dummy[:, index] = obj
995 994 else:
996 995 dummy[index] = obj
997 996 self.data[key][tm] = dummy
998 997
999 998 self.__heights = [H for tm in self.times]
1000 999
1001 1000 def jsonify(self, tm, plot_name, plot_type, decimate=False):
1002 1001 '''
1003 1002 Convert data to json
1004 1003 '''
1005 1004
1006 1005 meta = {}
1007 1006 meta['xrange'] = []
1008 1007 dy = int(len(self.yrange)/self.MAXNUMY) + 1
1009 1008 tmp = self.data[tm][self.key]
1010 1009 shape = tmp.shape
1011 1010 if len(shape) == 2:
1012 1011 data = self.roundFloats(self.data[tm][self.key][::, ::dy].tolist())
1013 1012 elif len(shape) == 3:
1014 1013 dx = int(self.data[tm][self.key].shape[1]/self.MAXNUMX) + 1
1015 1014 data = self.roundFloats(
1016 1015 self.data[tm][self.key][::, ::dx, ::dy].tolist())
1017 1016 meta['xrange'] = self.roundFloats(self.xrange[2][::dx].tolist())
1018 1017 else:
1019 1018 data = self.roundFloats(self.data[tm][self.key].tolist())
1020 1019
1021 1020 ret = {
1022 1021 'plot': plot_name,
1023 1022 'code': self.exp_code,
1024 1023 'time': float(tm),
1025 1024 'data': data,
1026 1025 }
1027 1026 meta['type'] = plot_type
1028 1027 meta['interval'] = float(self.interval)
1029 1028 meta['localtime'] = self.localtime
1030 1029 meta['yrange'] = self.roundFloats(self.yrange[::dy].tolist())
1031 1030 meta.update(self.meta)
1032 1031 ret['metadata'] = meta
1033 1032 return json.dumps(ret)
1034 1033
1035 1034 @property
1036 1035 def times(self):
1037 1036 '''
1038 1037 Return the list of times of the current data
1039 1038 '''
1040 1039
1041 1040 ret = [t for t in self.data]
1042 1041 ret.sort()
1043 1042 return numpy.array(ret)
1044 1043
1045 1044 @property
1046 1045 def min_time(self):
1047 1046 '''
1048 1047 Return the minimun time value
1049 1048 '''
1050 1049
1051 1050 return self.times[0]
1052 1051
1053 1052 @property
1054 1053 def max_time(self):
1055 1054 '''
1056 1055 Return the maximun time value
1057 1056 '''
1058 1057
1059 1058 return self.times[-1]
1060 1059
1061 1060 # @property
1062 1061 # def heights(self):
1063 1062 # '''
1064 1063 # Return the list of heights of the current data
1065 1064 # '''
1066 1065
1067 1066 # return numpy.array(self.__heights[-1])
1068 1067
1069 1068 @staticmethod
1070 1069 def roundFloats(obj):
1071 1070 if isinstance(obj, list):
1072 1071 return list(map(PlotterData.roundFloats, obj))
1073 1072 elif isinstance(obj, float):
1074 1073 return round(obj, 2)
@@ -1,695 +1,695
1 1 # Copyright (c) 2012-2020 Jicamarca Radio Observatory
2 2 # All rights reserved.
3 3 #
4 4 # Distributed under the terms of the BSD 3-clause license.
5 5 """Base class to create plot operations
6 6
7 7 """
8 8
9 9 import os
10 10 import sys
11 11 import zmq
12 12 import time
13 13 import numpy
14 14 import datetime
15 15 from collections import deque
16 16 from functools import wraps
17 17 from threading import Thread
18 18 import matplotlib
19 19
20 20 if 'BACKEND' in os.environ:
21 21 matplotlib.use(os.environ['BACKEND'])
22 22 elif 'linux' in sys.platform:
23 23 matplotlib.use("TkAgg")
24 24 elif 'darwin' in sys.platform:
25 25 matplotlib.use('MacOSX')
26 26 else:
27 27 from schainpy.utils import log
28 28 log.warning('Using default Backend="Agg"', 'INFO')
29 29 matplotlib.use('Agg')
30 30
31 31 import matplotlib.pyplot as plt
32 32 from matplotlib.patches import Polygon
33 33 from mpl_toolkits.axes_grid1 import make_axes_locatable
34 34 from matplotlib.ticker import FuncFormatter, LinearLocator, MultipleLocator
35 35
36 36 from schainpy.model.data.jrodata import PlotterData
37 37 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
38 38 from schainpy.utils import log
39 39
40 40 jet_values = matplotlib.pyplot.get_cmap('jet', 100)(numpy.arange(100))[10:90]
41 41 blu_values = matplotlib.pyplot.get_cmap(
42 42 'seismic_r', 20)(numpy.arange(20))[10:15]
43 43 ncmap = matplotlib.colors.LinearSegmentedColormap.from_list(
44 44 'jro', numpy.vstack((blu_values, jet_values)))
45 45 matplotlib.pyplot.register_cmap(cmap=ncmap)
46 46
47 47 CMAPS = [plt.get_cmap(s) for s in ('jro', 'jet', 'viridis',
48 48 'plasma', 'inferno', 'Greys', 'seismic', 'bwr', 'coolwarm')]
49 49
50 50 EARTH_RADIUS = 6.3710e3
51 51
52 52 def ll2xy(lat1, lon1, lat2, lon2):
53 53
54 54 p = 0.017453292519943295
55 55 a = 0.5 - numpy.cos((lat2 - lat1) * p)/2 + numpy.cos(lat1 * p) * \
56 56 numpy.cos(lat2 * p) * (1 - numpy.cos((lon2 - lon1) * p)) / 2
57 57 r = 12742 * numpy.arcsin(numpy.sqrt(a))
58 58 theta = numpy.arctan2(numpy.sin((lon2-lon1)*p)*numpy.cos(lat2*p), numpy.cos(lat1*p)
59 59 * numpy.sin(lat2*p)-numpy.sin(lat1*p)*numpy.cos(lat2*p)*numpy.cos((lon2-lon1)*p))
60 60 theta = -theta + numpy.pi/2
61 61 return r*numpy.cos(theta), r*numpy.sin(theta)
62 62
63 63
64 64 def km2deg(km):
65 65 '''
66 66 Convert distance in km to degrees
67 67 '''
68 68
69 69 return numpy.rad2deg(km/EARTH_RADIUS)
70 70
71 71
72 72 def figpause(interval):
73 73 backend = plt.rcParams['backend']
74 74 if backend in matplotlib.rcsetup.interactive_bk:
75 75 figManager = matplotlib._pylab_helpers.Gcf.get_active()
76 76 if figManager is not None:
77 77 canvas = figManager.canvas
78 78 if canvas.figure.stale:
79 79 canvas.draw()
80 80 try:
81 81 canvas.start_event_loop(interval)
82 82 except:
83 83 pass
84 84 return
85 85
86 86 def popup(message):
87 87 '''
88 88 '''
89 89
90 90 fig = plt.figure(figsize=(12, 8), facecolor='r')
91 91 text = '\n'.join([s.strip() for s in message.split(':')])
92 92 fig.text(0.01, 0.5, text, ha='left', va='center',
93 93 size='20', weight='heavy', color='w')
94 94 fig.show()
95 95 figpause(1000)
96 96
97 97
98 98 class Throttle(object):
99 99 '''
100 100 Decorator that prevents a function from being called more than once every
101 101 time period.
102 102 To create a function that cannot be called more than once a minute, but
103 103 will sleep until it can be called:
104 104 @Throttle(minutes=1)
105 105 def foo():
106 106 pass
107 107
108 108 for i in range(10):
109 109 foo()
110 110 print "This function has run %s times." % i
111 111 '''
112 112
113 113 def __init__(self, seconds=0, minutes=0, hours=0):
114 114 self.throttle_period = datetime.timedelta(
115 115 seconds=seconds, minutes=minutes, hours=hours
116 116 )
117 117
118 118 self.time_of_last_call = datetime.datetime.min
119 119
120 120 def __call__(self, fn):
121 121 @wraps(fn)
122 122 def wrapper(*args, **kwargs):
123 123 coerce = kwargs.pop('coerce', None)
124 124 if coerce:
125 125 self.time_of_last_call = datetime.datetime.now()
126 126 return fn(*args, **kwargs)
127 127 else:
128 128 now = datetime.datetime.now()
129 129 time_since_last_call = now - self.time_of_last_call
130 130 time_left = self.throttle_period - time_since_last_call
131 131
132 132 if time_left > datetime.timedelta(seconds=0):
133 133 return
134 134
135 135 self.time_of_last_call = datetime.datetime.now()
136 136 return fn(*args, **kwargs)
137 137
138 138 return wrapper
139 139
140 140 def apply_throttle(value):
141 141
142 142 @Throttle(seconds=value)
143 143 def fnThrottled(fn):
144 144 fn()
145 145
146 146 return fnThrottled
147 147
148 148
149 149 @MPDecorator
150 150 class Plot(Operation):
151 151 """Base class for Schain plotting operations
152 152
153 153 This class should never be use directtly you must subclass a new operation,
154 154 children classes must be defined as follow:
155 155
156 156 ExamplePlot(Plot):
157 157
158 158 CODE = 'code'
159 159 colormap = 'jet'
160 160 plot_type = 'pcolor' # options are ('pcolor', 'pcolorbuffer', 'scatter', 'scatterbuffer')
161 161
162 162 def setup(self):
163 163 pass
164 164
165 165 def plot(self):
166 166 pass
167 167
168 168 """
169 169
170 170 CODE = 'Figure'
171 171 colormap = 'jet'
172 172 bgcolor = 'white'
173 173 buffering = True
174 174 __missing = 1E30
175 175
176 176 __attrs__ = ['show', 'save', 'ymin', 'ymax', 'zmin', 'zmax', 'title',
177 177 'showprofile']
178 178
179 179 def __init__(self):
180 180
181 181 Operation.__init__(self)
182 182 self.isConfig = False
183 183 self.isPlotConfig = False
184 184 self.save_time = 0
185 185 self.sender_time = 0
186 186 self.data = None
187 187 self.firsttime = True
188 188 self.sender_queue = deque(maxlen=10)
189 189 self.plots_adjust = {'left': 0.125, 'right': 0.9, 'bottom': 0.15, 'top': 0.9, 'wspace': 0.2, 'hspace': 0.2}
190 190
191 191 def __fmtTime(self, x, pos):
192 192 '''
193 193 '''
194 194
195 195 return '{}'.format(self.getDateTime(x).strftime('%H:%M'))
196 196
197 197 def __setup(self, **kwargs):
198 198 '''
199 199 Initialize variables
200 200 '''
201 201
202 202 self.figures = []
203 203 self.axes = []
204 204 self.cb_axes = []
205 205 self.pf_axes = []
206 206 self.localtime = kwargs.pop('localtime', True)
207 207 self.show = kwargs.get('show', True)
208 208 self.save = kwargs.get('save', False)
209 209 self.save_period = kwargs.get('save_period', 0)
210 210 self.colormap = kwargs.get('colormap', self.colormap)
211 211 self.colormap_coh = kwargs.get('colormap_coh', 'jet')
212 212 self.colormap_phase = kwargs.get('colormap_phase', 'RdBu_r')
213 213 self.colormaps = kwargs.get('colormaps', None)
214 214 self.bgcolor = kwargs.get('bgcolor', self.bgcolor)
215 215 self.showprofile = kwargs.get('showprofile', False)
216 216 self.title = kwargs.get('wintitle', self.CODE.upper())
217 217 self.cb_label = kwargs.get('cb_label', None)
218 218 self.cb_labels = kwargs.get('cb_labels', None)
219 219 self.labels = kwargs.get('labels', None)
220 220 self.xaxis = kwargs.get('xaxis', 'frequency')
221 221 self.zmin = kwargs.get('zmin', None)
222 222 self.zmax = kwargs.get('zmax', None)
223 223 self.zlimits = kwargs.get('zlimits', None)
224 224 self.xmin = kwargs.get('xmin', None)
225 225 self.xmax = kwargs.get('xmax', None)
226 226 self.xrange = kwargs.get('xrange', 12)
227 227 self.xscale = kwargs.get('xscale', None)
228 228 self.ymin = kwargs.get('ymin', None)
229 229 self.ymax = kwargs.get('ymax', None)
230 230 self.yscale = kwargs.get('yscale', None)
231 231 self.xlabel = kwargs.get('xlabel', None)
232 232 self.attr_time = kwargs.get('attr_time', 'utctime')
233 233 self.attr_data = kwargs.get('attr_data', 'data_param')
234 234 self.decimation = kwargs.get('decimation', None)
235 235 self.oneFigure = kwargs.get('oneFigure', True)
236 236 self.width = kwargs.get('width', None)
237 237 self.height = kwargs.get('height', None)
238 238 self.colorbar = kwargs.get('colorbar', True)
239 self.factors = kwargs.get('factors', [1, 1, 1, 1, 1, 1, 1, 1])
239 self.factors = kwargs.get('factors', range(18))
240 240 self.channels = kwargs.get('channels', None)
241 241 self.titles = kwargs.get('titles', [])
242 242 self.polar = False
243 243 self.type = kwargs.get('type', 'iq')
244 244 self.grid = kwargs.get('grid', False)
245 245 self.pause = kwargs.get('pause', False)
246 246 self.save_code = kwargs.get('save_code', self.CODE)
247 247 self.throttle = kwargs.get('throttle', 0)
248 248 self.exp_code = kwargs.get('exp_code', None)
249 249 self.server = kwargs.get('server', False)
250 250 self.sender_period = kwargs.get('sender_period', 60)
251 251 self.tag = kwargs.get('tag', '')
252 252 self.height_index = kwargs.get('height_index', None)
253 253 self.__throttle_plot = apply_throttle(self.throttle)
254 254 code = self.attr_data if self.attr_data else self.CODE
255 255 self.data = PlotterData(self.CODE, self.exp_code, self.localtime)
256 256 self.tmin = kwargs.get('tmin', None)
257 257
258 258 if self.server:
259 259 if not self.server.startswith('tcp://'):
260 260 self.server = 'tcp://{}'.format(self.server)
261 261 log.success(
262 262 'Sending to server: {}'.format(self.server),
263 263 self.name
264 264 )
265 265
266 266 if isinstance(self.attr_data, str):
267 267 self.attr_data = [self.attr_data]
268 268
269 269 def __setup_plot(self):
270 270 '''
271 271 Common setup for all figures, here figures and axes are created
272 272 '''
273 273
274 274 self.setup()
275 275
276 276 self.time_label = 'LT' if self.localtime else 'UTC'
277 277
278 278 if self.width is None:
279 279 self.width = 8
280 280
281 281 self.figures = []
282 282 self.axes = []
283 283 self.cb_axes = []
284 284 self.pf_axes = []
285 285 self.cmaps = []
286 286
287 287 size = '15%' if self.ncols == 1 else '30%'
288 288 pad = '4%' if self.ncols == 1 else '8%'
289 289
290 290 if self.oneFigure:
291 291 if self.height is None:
292 292 self.height = 1.4 * self.nrows + 1
293 293 fig = plt.figure(figsize=(self.width, self.height),
294 294 edgecolor='k',
295 295 facecolor='w')
296 296 self.figures.append(fig)
297 297 for n in range(self.nplots):
298 298 ax = fig.add_subplot(self.nrows, self.ncols,
299 299 n + 1, polar=self.polar)
300 300 ax.tick_params(labelsize=8)
301 301 ax.firsttime = True
302 302 ax.index = 0
303 303 ax.press = None
304 304 self.axes.append(ax)
305 305 if self.showprofile:
306 306 cax = self.__add_axes(ax, size=size, pad=pad)
307 307 cax.tick_params(labelsize=8)
308 308 self.pf_axes.append(cax)
309 309 else:
310 310 if self.height is None:
311 311 self.height = 3
312 312 for n in range(self.nplots):
313 313 fig = plt.figure(figsize=(self.width, self.height),
314 314 edgecolor='k',
315 315 facecolor='w')
316 316 ax = fig.add_subplot(1, 1, 1, polar=self.polar)
317 317 ax.tick_params(labelsize=8)
318 318 ax.firsttime = True
319 319 ax.index = 0
320 320 ax.press = None
321 321 self.figures.append(fig)
322 322 self.axes.append(ax)
323 323 if self.showprofile:
324 324 cax = self.__add_axes(ax, size=size, pad=pad)
325 325 cax.tick_params(labelsize=8)
326 326 self.pf_axes.append(cax)
327 327
328 328 for n in range(self.nrows):
329 329 if self.colormaps is not None:
330 330 cmap = plt.get_cmap(self.colormaps[n])
331 331 else:
332 332 cmap = plt.get_cmap(self.colormap)
333 333 cmap.set_bad(self.bgcolor, 1.)
334 334 self.cmaps.append(cmap)
335 335
336 336 def __add_axes(self, ax, size='30%', pad='8%'):
337 337 '''
338 338 Add new axes to the given figure
339 339 '''
340 340 divider = make_axes_locatable(ax)
341 341 nax = divider.new_horizontal(size=size, pad=pad)
342 342 ax.figure.add_axes(nax)
343 343 return nax
344 344
345 345 def fill_gaps(self, x_buffer, y_buffer, z_buffer):
346 346 '''
347 347 Create a masked array for missing data
348 348 '''
349 349 if x_buffer.shape[0] < 2:
350 350 return x_buffer, y_buffer, z_buffer
351 351
352 352 deltas = x_buffer[1:] - x_buffer[0:-1]
353 353 x_median = numpy.median(deltas)
354 354
355 355 index = numpy.where(deltas > 5 * x_median)
356 356
357 357 if len(index[0]) != 0:
358 358 z_buffer[::, index[0], ::] = self.__missing
359 359 z_buffer = numpy.ma.masked_inside(z_buffer,
360 360 0.99 * self.__missing,
361 361 1.01 * self.__missing)
362 362
363 363 return x_buffer, y_buffer, z_buffer
364 364
365 365 def decimate(self):
366 366
367 367 # dx = int(len(self.x)/self.__MAXNUMX) + 1
368 368 dy = int(len(self.y) / self.decimation) + 1
369 369
370 370 # x = self.x[::dx]
371 371 x = self.x
372 372 y = self.y[::dy]
373 373 z = self.z[::, ::, ::dy]
374 374
375 375 return x, y, z
376 376
377 377 def format(self):
378 378 '''
379 379 Set min and max values, labels, ticks and titles
380 380 '''
381 381
382 382 for n, ax in enumerate(self.axes):
383 383 if ax.firsttime:
384 384 if self.xaxis != 'time':
385 385 xmin = self.xmin
386 386 xmax = self.xmax
387 387 else:
388 388 xmin = self.tmin
389 389 xmax = self.tmin + self.xrange*60*60
390 390 ax.xaxis.set_major_formatter(FuncFormatter(self.__fmtTime))
391 391 ax.xaxis.set_major_locator(LinearLocator(9))
392 392 ymin = self.ymin if self.ymin is not None else numpy.nanmin(self.y[numpy.isfinite(self.y)])
393 393 ymax = self.ymax if self.ymax is not None else numpy.nanmax(self.y[numpy.isfinite(self.y)])
394 394 ax.set_facecolor(self.bgcolor)
395 395 if self.xscale:
396 396 ax.xaxis.set_major_formatter(FuncFormatter(
397 397 lambda x, pos: '{0:g}'.format(x*self.xscale)))
398 398 if self.yscale:
399 399 ax.yaxis.set_major_formatter(FuncFormatter(
400 400 lambda x, pos: '{0:g}'.format(x*self.yscale)))
401 401 if self.xlabel is not None:
402 402 ax.set_xlabel(self.xlabel)
403 403 if self.ylabel is not None:
404 404 ax.set_ylabel(self.ylabel)
405 405 if self.showprofile:
406 406 self.pf_axes[n].set_ylim(ymin, ymax)
407 407 self.pf_axes[n].set_xlim(self.zmin, self.zmax)
408 408 self.pf_axes[n].set_xlabel('dB')
409 409 self.pf_axes[n].grid(b=True, axis='x')
410 410 [tick.set_visible(False)
411 411 for tick in self.pf_axes[n].get_yticklabels()]
412 412 if self.colorbar:
413 413 ax.cbar = plt.colorbar(
414 414 ax.plt, ax=ax, fraction=0.05, pad=0.02, aspect=10)
415 415 ax.cbar.ax.tick_params(labelsize=8)
416 416 ax.cbar.ax.press = None
417 417 if self.cb_label:
418 418 ax.cbar.set_label(self.cb_label, size=8)
419 419 elif self.cb_labels:
420 420 ax.cbar.set_label(self.cb_labels[n], size=8)
421 421 else:
422 422 ax.cbar = None
423 423 ax.set_xlim(xmin, xmax)
424 424 ax.set_ylim(ymin, ymax)
425 425 ax.firsttime = False
426 426 if self.grid:
427 427 ax.grid(True)
428 428 if not self.polar:
429 429 ax.set_title('{} {} {}'.format(
430 430 self.titles[n],
431 431 self.getDateTime(self.data.max_time).strftime(
432 432 '%Y-%m-%d %H:%M:%S'),
433 433 self.time_label),
434 434 size=8)
435 435 else:
436 436 ax.set_title('{}'.format(self.titles[n]), size=8)
437 437 ax.set_ylim(0, 90)
438 438 ax.set_yticks(numpy.arange(0, 90, 20))
439 439 ax.yaxis.labelpad = 40
440 440
441 441 if self.firsttime:
442 442 for n, fig in enumerate(self.figures):
443 443 fig.subplots_adjust(**self.plots_adjust)
444 444 self.firsttime = False
445 445
446 446 def clear_figures(self):
447 447 '''
448 448 Reset axes for redraw plots
449 449 '''
450 450
451 451 for ax in self.axes+self.pf_axes+self.cb_axes:
452 452 ax.clear()
453 453 ax.firsttime = True
454 454 if hasattr(ax, 'cbar') and ax.cbar:
455 455 ax.cbar.remove()
456 456
457 457 def __plot(self):
458 458 '''
459 459 Main function to plot, format and save figures
460 460 '''
461 461
462 462 self.plot()
463 463 self.format()
464 464
465 465 for n, fig in enumerate(self.figures):
466 466 if self.nrows == 0 or self.nplots == 0:
467 467 log.warning('No data', self.name)
468 468 fig.text(0.5, 0.5, 'No Data', fontsize='large', ha='center')
469 469 fig.canvas.manager.set_window_title(self.CODE)
470 470 continue
471 471
472 472 fig.canvas.manager.set_window_title('{} - {}'.format(self.title,
473 473 self.getDateTime(self.data.max_time).strftime('%Y/%m/%d')))
474 474 fig.canvas.draw()
475 475 if self.show:
476 476 fig.show()
477 477 figpause(0.01)
478 478
479 479 if self.save:
480 480 self.save_figure(n)
481 481
482 482 if self.server:
483 483 self.send_to_server()
484 484
485 485 def __update(self, dataOut, timestamp):
486 486 '''
487 487 '''
488 488
489 489 metadata = {
490 490 'yrange': dataOut.heightList,
491 491 'interval': dataOut.timeInterval,
492 492 'channels': dataOut.channelList
493 493 }
494 494
495 495 data, meta = self.update(dataOut)
496 496 metadata.update(meta)
497 497 self.data.update(data, timestamp, metadata)
498 498
499 499 def save_figure(self, n):
500 500 '''
501 501 '''
502 502
503 503 if (self.data.max_time - self.save_time) <= self.save_period:
504 504 return
505 505
506 506 self.save_time = self.data.max_time
507 507
508 508 fig = self.figures[n]
509 509
510 510 if self.throttle == 0:
511 511 figname = os.path.join(
512 512 self.save,
513 513 self.save_code,
514 514 '{}_{}.png'.format(
515 515 self.save_code,
516 516 self.getDateTime(self.data.max_time).strftime(
517 517 '%Y%m%d_%H%M%S'
518 518 ),
519 519 )
520 520 )
521 521 log.log('Saving figure: {}'.format(figname), self.name)
522 522 if not os.path.isdir(os.path.dirname(figname)):
523 523 os.makedirs(os.path.dirname(figname))
524 524 fig.savefig(figname)
525 525
526 526 figname = os.path.join(
527 527 self.save,
528 528 '{}_{}.png'.format(
529 529 self.save_code,
530 530 self.getDateTime(self.data.min_time).strftime(
531 531 '%Y%m%d'
532 532 ),
533 533 )
534 534 )
535 535
536 536 log.log('Saving figure: {}'.format(figname), self.name)
537 537 if not os.path.isdir(os.path.dirname(figname)):
538 538 os.makedirs(os.path.dirname(figname))
539 539 fig.savefig(figname)
540 540
541 541 def send_to_server(self):
542 542 '''
543 543 '''
544 544
545 545 if self.exp_code == None:
546 546 log.warning('Missing `exp_code` skipping sending to server...')
547 547
548 548 last_time = self.data.max_time
549 549 interval = last_time - self.sender_time
550 550 if interval < self.sender_period:
551 551 return
552 552
553 553 self.sender_time = last_time
554 554
555 555 attrs = ['titles', 'zmin', 'zmax', 'tag', 'ymin', 'ymax']
556 556 for attr in attrs:
557 557 value = getattr(self, attr)
558 558 if value:
559 559 if isinstance(value, (numpy.float32, numpy.float64)):
560 560 value = round(float(value), 2)
561 561 self.data.meta[attr] = value
562 562 if self.colormap == 'jet':
563 563 self.data.meta['colormap'] = 'Jet'
564 564 elif 'RdBu' in self.colormap:
565 565 self.data.meta['colormap'] = 'RdBu'
566 566 else:
567 567 self.data.meta['colormap'] = 'Viridis'
568 568 self.data.meta['interval'] = int(interval)
569 569
570 570 self.sender_queue.append(last_time)
571 571
572 572 while True:
573 573 try:
574 574 tm = self.sender_queue.popleft()
575 575 except IndexError:
576 576 break
577 577 msg = self.data.jsonify(tm, self.save_code, self.plot_type)
578 578 self.socket.send_string(msg)
579 579 socks = dict(self.poll.poll(2000))
580 580 if socks.get(self.socket) == zmq.POLLIN:
581 581 reply = self.socket.recv_string()
582 582 if reply == 'ok':
583 583 log.log("Response from server ok", self.name)
584 584 time.sleep(0.1)
585 585 continue
586 586 else:
587 587 log.warning(
588 588 "Malformed reply from server: {}".format(reply), self.name)
589 589 else:
590 590 log.warning(
591 591 "No response from server, retrying...", self.name)
592 592 self.sender_queue.appendleft(tm)
593 593 self.socket.setsockopt(zmq.LINGER, 0)
594 594 self.socket.close()
595 595 self.poll.unregister(self.socket)
596 596 self.socket = self.context.socket(zmq.REQ)
597 597 self.socket.connect(self.server)
598 598 self.poll.register(self.socket, zmq.POLLIN)
599 599 break
600 600
601 601 def setup(self):
602 602 '''
603 603 This method should be implemented in the child class, the following
604 604 attributes should be set:
605 605
606 606 self.nrows: number of rows
607 607 self.ncols: number of cols
608 608 self.nplots: number of plots (channels or pairs)
609 609 self.ylabel: label for Y axes
610 610 self.titles: list of axes title
611 611
612 612 '''
613 613 raise NotImplementedError
614 614
615 615 def plot(self):
616 616 '''
617 617 Must be defined in the child class, the actual plotting method
618 618 '''
619 619 raise NotImplementedError
620 620
621 621 def update(self, dataOut):
622 622 '''
623 623 Must be defined in the child class, update self.data with new data
624 624 '''
625 625
626 626 data = {
627 627 self.CODE: getattr(dataOut, 'data_{}'.format(self.CODE))
628 628 }
629 629 meta = {}
630 630
631 631 return data, meta
632 632
633 633 def run(self, dataOut, **kwargs):
634 634 '''
635 635 Main plotting routine
636 636 '''
637 637
638 638 if self.isConfig is False:
639 639 self.__setup(**kwargs)
640 640
641 641 if self.localtime:
642 642 self.getDateTime = datetime.datetime.fromtimestamp
643 643 else:
644 644 self.getDateTime = datetime.datetime.utcfromtimestamp
645 645
646 646 self.data.setup()
647 647 self.isConfig = True
648 648 if self.server:
649 649 self.context = zmq.Context()
650 650 self.socket = self.context.socket(zmq.REQ)
651 651 self.socket.connect(self.server)
652 652 self.poll = zmq.Poller()
653 653 self.poll.register(self.socket, zmq.POLLIN)
654 654
655 655 tm = getattr(dataOut, self.attr_time)
656 656
657 657 if self.data and 'time' in self.xaxis and (tm - self.tmin) >= self.xrange*60*60:
658 658 self.save_time = tm
659 659 self.__plot()
660 660 self.tmin += self.xrange*60*60
661 661 self.data.setup()
662 662 self.clear_figures()
663 663
664 664 self.__update(dataOut, tm)
665 665
666 666 if self.isPlotConfig is False:
667 667 self.__setup_plot()
668 668 self.isPlotConfig = True
669 669 if self.xaxis == 'time':
670 670 dt = self.getDateTime(tm)
671 671 if self.xmin is None:
672 672 self.tmin = tm
673 673 self.xmin = dt.hour
674 674 minutes = (self.xmin-int(self.xmin)) * 60
675 675 seconds = (minutes - int(minutes)) * 60
676 676 self.tmin = (dt.replace(hour=int(self.xmin), minute=int(minutes), second=int(seconds)) -
677 677 datetime.datetime(1970, 1, 1)).total_seconds()
678 678 if self.localtime:
679 679 self.tmin += time.timezone
680 680
681 681 if self.xmin is not None and self.xmax is not None:
682 682 self.xrange = self.xmax - self.xmin
683 683
684 684 if self.throttle == 0:
685 685 self.__plot()
686 686 else:
687 687 self.__throttle_plot(self.__plot)#, coerce=coerce)
688 688
689 689 def close(self):
690 690
691 691 if self.data and not self.data.flagNoData:
692 692 self.save_time = 0
693 693 self.__plot()
694 694 if self.data and not self.data.flagNoData and self.pause:
695 695 figpause(10)
@@ -1,356 +1,358
1 1 import os
2 2 import datetime
3 3 import numpy
4 4
5 5 from schainpy.model.graphics.jroplot_base import Plot, plt
6 6 from schainpy.model.graphics.jroplot_spectra import SpectraPlot, RTIPlot, CoherencePlot
7 7 from schainpy.utils import log
8 8
9 9 EARTH_RADIUS = 6.3710e3
10 10
11 11
12 12 def ll2xy(lat1, lon1, lat2, lon2):
13 13
14 14 p = 0.017453292519943295
15 15 a = 0.5 - numpy.cos((lat2 - lat1) * p)/2 + numpy.cos(lat1 * p) * \
16 16 numpy.cos(lat2 * p) * (1 - numpy.cos((lon2 - lon1) * p)) / 2
17 17 r = 12742 * numpy.arcsin(numpy.sqrt(a))
18 18 theta = numpy.arctan2(numpy.sin((lon2-lon1)*p)*numpy.cos(lat2*p), numpy.cos(lat1*p)
19 19 * numpy.sin(lat2*p)-numpy.sin(lat1*p)*numpy.cos(lat2*p)*numpy.cos((lon2-lon1)*p))
20 20 theta = -theta + numpy.pi/2
21 21 return r*numpy.cos(theta), r*numpy.sin(theta)
22 22
23 23
24 24 def km2deg(km):
25 25 '''
26 26 Convert distance in km to degrees
27 27 '''
28 28
29 29 return numpy.rad2deg(km/EARTH_RADIUS)
30 30
31 31
32 32
33 33 class SpectralMomentsPlot(SpectraPlot):
34 34 '''
35 35 Plot for Spectral Moments
36 36 '''
37 37 CODE = 'spc_moments'
38 38 colormap = 'jet'
39 39 plot_type = 'pcolor'
40 40
41 41
42 42 class SnrPlot(RTIPlot):
43 43 '''
44 44 Plot for SNR Data
45 45 '''
46 46
47 47 CODE = 'snr'
48 48 colormap = 'jet'
49 49
50 50 def update(self, dataOut):
51
51 if len(self.channelList) == 0:
52 self.channelList = dataOut.channelList
52 53 data = {
53 54 'snr': 10*numpy.log10(dataOut.data_snr)
54 55 }
55 56
56 57 return data, {}
57 58
58 59 class DopplerPlot(RTIPlot):
59 60 '''
60 61 Plot for DOPPLER Data (1st moment)
61 62 '''
62 63
63 64 CODE = 'dop'
64 65 colormap = 'jet'
65 66
66 67 def update(self, dataOut):
67 68
68 69 data = {
69 70 'dop': 10*numpy.log10(dataOut.data_dop)
70 71 }
71 72
72 73 return data, {}
73 74
74 75 class PowerPlot(RTIPlot):
75 76 '''
76 77 Plot for Power Data (0 moment)
77 78 '''
78 79
79 80 CODE = 'pow'
80 81 colormap = 'jet'
81 82
82 83 def update(self, dataOut):
83
84 if len(self.channelList) == 0:
85 self.channelList = dataOut.channelList
84 86 data = {
85 87 'pow': 10*numpy.log10(dataOut.data_pow)
86 88 }
87 print("data",data)
89 #print("data",data)
88 90 return data, {}
89 91
90 92 class SpectralWidthPlot(RTIPlot):
91 93 '''
92 94 Plot for Spectral Width Data (2nd moment)
93 95 '''
94 96
95 97 CODE = 'width'
96 98 colormap = 'jet'
97 99
98 100 def update(self, dataOut):
99 101
100 102 data = {
101 103 'width': dataOut.data_width
102 104 }
103 105
104 106 return data, {}
105 107
106 108 class SkyMapPlot(Plot):
107 109 '''
108 110 Plot for meteors detection data
109 111 '''
110 112
111 113 CODE = 'param'
112 114
113 115 def setup(self):
114 116
115 117 self.ncols = 1
116 118 self.nrows = 1
117 119 self.width = 7.2
118 120 self.height = 7.2
119 121 self.nplots = 1
120 122 self.xlabel = 'Zonal Zenith Angle (deg)'
121 123 self.ylabel = 'Meridional Zenith Angle (deg)'
122 124 self.polar = True
123 125 self.ymin = -180
124 126 self.ymax = 180
125 127 self.colorbar = False
126 128
127 129 def plot(self):
128 130
129 131 arrayParameters = numpy.concatenate(self.data['param'])
130 132 error = arrayParameters[:, -1]
131 133 indValid = numpy.where(error == 0)[0]
132 134 finalMeteor = arrayParameters[indValid, :]
133 135 finalAzimuth = finalMeteor[:, 3]
134 136 finalZenith = finalMeteor[:, 4]
135 137
136 138 x = finalAzimuth * numpy.pi / 180
137 139 y = finalZenith
138 140
139 141 ax = self.axes[0]
140 142
141 143 if ax.firsttime:
142 144 ax.plot = ax.plot(x, y, 'bo', markersize=5)[0]
143 145 else:
144 146 ax.plot.set_data(x, y)
145 147
146 148 dt1 = self.getDateTime(self.data.min_time).strftime('%y/%m/%d %H:%M:%S')
147 149 dt2 = self.getDateTime(self.data.max_time).strftime('%y/%m/%d %H:%M:%S')
148 150 title = 'Meteor Detection Sky Map\n %s - %s \n Number of events: %5.0f\n' % (dt1,
149 151 dt2,
150 152 len(x))
151 153 self.titles[0] = title
152 154
153 155
154 156 class GenericRTIPlot(Plot):
155 157 '''
156 158 Plot for data_xxxx object
157 159 '''
158 160
159 161 CODE = 'param'
160 162 colormap = 'viridis'
161 163 plot_type = 'pcolorbuffer'
162 164
163 165 def setup(self):
164 166 self.xaxis = 'time'
165 167 self.ncols = 1
166 168 self.nrows = self.data.shape('param')[0]
167 169 self.nplots = self.nrows
168 170 self.plots_adjust.update({'hspace':0.8, 'left': 0.1, 'bottom': 0.08, 'right':0.95, 'top': 0.95})
169 171
170 172 if not self.xlabel:
171 173 self.xlabel = 'Time'
172 174
173 175 self.ylabel = 'Height [km]'
174 176 if not self.titles:
175 177 self.titles = ['Param {}'.format(x) for x in range(self.nrows)]
176 178
177 179 def update(self, dataOut):
178 180
179 181 data = {
180 182 'param' : numpy.concatenate([getattr(dataOut, attr) for attr in self.attr_data], axis=0)
181 183 }
182 184
183 185 meta = {}
184 186
185 187 return data, meta
186 188
187 189 def plot(self):
188 190 # self.data.normalize_heights()
189 191 self.x = self.data.times
190 192 self.y = self.data.yrange
191 193 self.z = self.data['param']
192 194
193 195 self.z = numpy.ma.masked_invalid(self.z)
194 196
195 197 if self.decimation is None:
196 198 x, y, z = self.fill_gaps(self.x, self.y, self.z)
197 199 else:
198 200 x, y, z = self.fill_gaps(*self.decimate())
199 201
200 202 for n, ax in enumerate(self.axes):
201 203
202 204 self.zmax = self.zmax if self.zmax is not None else numpy.max(
203 205 self.z[n])
204 206 self.zmin = self.zmin if self.zmin is not None else numpy.min(
205 207 self.z[n])
206 208
207 209 if ax.firsttime:
208 210 if self.zlimits is not None:
209 211 self.zmin, self.zmax = self.zlimits[n]
210 212
211 213 ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n],
212 214 vmin=self.zmin,
213 215 vmax=self.zmax,
214 216 cmap=self.cmaps[n]
215 217 )
216 218 else:
217 219 if self.zlimits is not None:
218 220 self.zmin, self.zmax = self.zlimits[n]
219 221 ax.collections.remove(ax.collections[0])
220 222 ax.plt = ax.pcolormesh(x, y, z[n].T * self.factors[n],
221 223 vmin=self.zmin,
222 224 vmax=self.zmax,
223 225 cmap=self.cmaps[n]
224 226 )
225 227
226 228
227 229 class PolarMapPlot(Plot):
228 230 '''
229 231 Plot for weather radar
230 232 '''
231 233
232 234 CODE = 'param'
233 235 colormap = 'seismic'
234 236
235 237 def setup(self):
236 238 self.ncols = 1
237 239 self.nrows = 1
238 240 self.width = 9
239 241 self.height = 8
240 242 self.mode = self.data.meta['mode']
241 243 if self.channels is not None:
242 244 self.nplots = len(self.channels)
243 245 self.nrows = len(self.channels)
244 246 else:
245 247 self.nplots = self.data.shape(self.CODE)[0]
246 248 self.nrows = self.nplots
247 249 self.channels = list(range(self.nplots))
248 250 if self.mode == 'E':
249 251 self.xlabel = 'Longitude'
250 252 self.ylabel = 'Latitude'
251 253 else:
252 254 self.xlabel = 'Range (km)'
253 255 self.ylabel = 'Height (km)'
254 256 self.bgcolor = 'white'
255 257 self.cb_labels = self.data.meta['units']
256 258 self.lat = self.data.meta['latitude']
257 259 self.lon = self.data.meta['longitude']
258 260 self.xmin, self.xmax = float(
259 261 km2deg(self.xmin) + self.lon), float(km2deg(self.xmax) + self.lon)
260 262 self.ymin, self.ymax = float(
261 263 km2deg(self.ymin) + self.lat), float(km2deg(self.ymax) + self.lat)
262 264 # self.polar = True
263 265
264 266 def plot(self):
265 267
266 268 for n, ax in enumerate(self.axes):
267 269 data = self.data['param'][self.channels[n]]
268 270
269 271 zeniths = numpy.linspace(
270 272 0, self.data.meta['max_range'], data.shape[1])
271 273 if self.mode == 'E':
272 274 azimuths = -numpy.radians(self.data.yrange)+numpy.pi/2
273 275 r, theta = numpy.meshgrid(zeniths, azimuths)
274 276 x, y = r*numpy.cos(theta)*numpy.cos(numpy.radians(self.data.meta['elevation'])), r*numpy.sin(
275 277 theta)*numpy.cos(numpy.radians(self.data.meta['elevation']))
276 278 x = km2deg(x) + self.lon
277 279 y = km2deg(y) + self.lat
278 280 else:
279 281 azimuths = numpy.radians(self.data.yrange)
280 282 r, theta = numpy.meshgrid(zeniths, azimuths)
281 283 x, y = r*numpy.cos(theta), r*numpy.sin(theta)
282 284 self.y = zeniths
283 285
284 286 if ax.firsttime:
285 287 if self.zlimits is not None:
286 288 self.zmin, self.zmax = self.zlimits[n]
287 289 ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
288 290 x, y, numpy.ma.array(data, mask=numpy.isnan(data)),
289 291 vmin=self.zmin,
290 292 vmax=self.zmax,
291 293 cmap=self.cmaps[n])
292 294 else:
293 295 if self.zlimits is not None:
294 296 self.zmin, self.zmax = self.zlimits[n]
295 297 ax.collections.remove(ax.collections[0])
296 298 ax.plt = ax.pcolormesh( # r, theta, numpy.ma.array(data, mask=numpy.isnan(data)),
297 299 x, y, numpy.ma.array(data, mask=numpy.isnan(data)),
298 300 vmin=self.zmin,
299 301 vmax=self.zmax,
300 302 cmap=self.cmaps[n])
301 303
302 304 if self.mode == 'A':
303 305 continue
304 306
305 307 # plot district names
306 308 f = open('/data/workspace/schain_scripts/distrito.csv')
307 309 for line in f:
308 310 label, lon, lat = [s.strip() for s in line.split(',') if s]
309 311 lat = float(lat)
310 312 lon = float(lon)
311 313 # ax.plot(lon, lat, '.b', ms=2)
312 314 ax.text(lon, lat, label.decode('utf8'), ha='center',
313 315 va='bottom', size='8', color='black')
314 316
315 317 # plot limites
316 318 limites = []
317 319 tmp = []
318 320 for line in open('/data/workspace/schain_scripts/lima.csv'):
319 321 if '#' in line:
320 322 if tmp:
321 323 limites.append(tmp)
322 324 tmp = []
323 325 continue
324 326 values = line.strip().split(',')
325 327 tmp.append((float(values[0]), float(values[1])))
326 328 for points in limites:
327 329 ax.add_patch(
328 330 Polygon(points, ec='k', fc='none', ls='--', lw=0.5))
329 331
330 332 # plot Cuencas
331 333 for cuenca in ('rimac', 'lurin', 'mala', 'chillon', 'chilca', 'chancay-huaral'):
332 334 f = open('/data/workspace/schain_scripts/{}.csv'.format(cuenca))
333 335 values = [line.strip().split(',') for line in f]
334 336 points = [(float(s[0]), float(s[1])) for s in values]
335 337 ax.add_patch(Polygon(points, ec='b', fc='none'))
336 338
337 339 # plot grid
338 340 for r in (15, 30, 45, 60):
339 341 ax.add_artist(plt.Circle((self.lon, self.lat),
340 342 km2deg(r), color='0.6', fill=False, lw=0.2))
341 343 ax.text(
342 344 self.lon + (km2deg(r))*numpy.cos(60*numpy.pi/180),
343 345 self.lat + (km2deg(r))*numpy.sin(60*numpy.pi/180),
344 346 '{}km'.format(r),
345 347 ha='center', va='bottom', size='8', color='0.6', weight='heavy')
346 348
347 349 if self.mode == 'E':
348 350 title = 'El={}$^\circ$'.format(self.data.meta['elevation'])
349 351 label = 'E{:02d}'.format(int(self.data.meta['elevation']))
350 352 else:
351 353 title = 'Az={}$^\circ$'.format(self.data.meta['azimuth'])
352 354 label = 'A{:02d}'.format(int(self.data.meta['azimuth']))
353 355
354 356 self.save_labels = ['{}-{}'.format(lbl, label) for lbl in self.labels]
355 357 self.titles = ['{} {}'.format(
356 358 self.data.parameters[x], title) for x in self.channels]
@@ -1,727 +1,727
1 1 # Copyright (c) 2012-2020 Jicamarca Radio Observatory
2 2 # All rights reserved.
3 3 #
4 4 # Distributed under the terms of the BSD 3-clause license.
5 5 """Classes to plot Spectra data
6 6
7 7 """
8 8
9 9 import os
10 10 import numpy
11 11
12 12 from schainpy.model.graphics.jroplot_base import Plot, plt, log
13
13 from itertools import combinations
14 14
15 15 class SpectraPlot(Plot):
16 16 '''
17 17 Plot for Spectra data
18 18 '''
19 19
20 20 CODE = 'spc'
21 21 colormap = 'jet'
22 22 plot_type = 'pcolor'
23 23 buffering = False
24 24 channelList = []
25 25
26 26 def setup(self):
27 27 self.nplots = len(self.data.channels)
28 28 self.ncols = int(numpy.sqrt(self.nplots) + 0.9)
29 29 self.nrows = int((1.0 * self.nplots / self.ncols) + 0.9)
30 30 self.height = 2.6 * self.nrows
31 31
32 32 self.cb_label = 'dB'
33 33 if self.showprofile:
34 34 self.width = 4 * self.ncols
35 35 else:
36 36 self.width = 3.5 * self.ncols
37 37 self.plots_adjust.update({'wspace': 0.4, 'hspace':0.4, 'left': 0.1, 'right': 0.9, 'bottom': 0.08})
38 38 self.ylabel = 'Range [km]'
39 39
40 40 def update(self, dataOut):
41 41 if self.channelList == None:
42 42 self.channelList = dataOut.channelList
43 43 data = {}
44 44 meta = {}
45 45 spc = 10*numpy.log10(dataOut.data_spc/dataOut.normFactor)
46
46 47 data['spc'] = spc
47 48 data['rti'] = dataOut.getPower()
48 49 data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor)
49 50 meta['xrange'] = (dataOut.getFreqRange(1)/1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
50 51 if self.CODE == 'spc_moments':
51 52 data['moments'] = dataOut.moments
52 53
53 54 return data, meta
54 55
55 56 def plot(self):
56 57 if self.xaxis == "frequency":
57 58 x = self.data.xrange[0]
58 59 self.xlabel = "Frequency (kHz)"
59 60 elif self.xaxis == "time":
60 61 x = self.data.xrange[1]
61 62 self.xlabel = "Time (ms)"
62 63 else:
63 64 x = self.data.xrange[2]
64 65 self.xlabel = "Velocity (m/s)"
65 66
66 67 if self.CODE == 'spc_moments':
67 68 x = self.data.xrange[2]
68 69 self.xlabel = "Velocity (m/s)"
69 70
70 71 self.titles = []
71 72
72 73 y = self.data.yrange
73 74 self.y = y
74 75
75 76 data = self.data[-1]
76 77 z = data['spc']
77 78
78 79 for n, ax in enumerate(self.axes):
79 80 noise = data['noise'][n]
80 81 if self.CODE == 'spc_moments':
81 82 mean = data['moments'][n, 1]
82 83 if ax.firsttime:
83 84 self.xmax = self.xmax if self.xmax else numpy.nanmax(x)
84 85 self.xmin = self.xmin if self.xmin else -self.xmax
85 86 self.zmin = self.zmin if self.zmin else numpy.nanmin(z)
86 87 self.zmax = self.zmax if self.zmax else numpy.nanmax(z)
87 88 ax.plt = ax.pcolormesh(x, y, z[n].T,
88 89 vmin=self.zmin,
89 90 vmax=self.zmax,
90 91 cmap=plt.get_cmap(self.colormap)
91 92 )
92 93
93 94 if self.showprofile:
94 95 ax.plt_profile = self.pf_axes[n].plot(
95 96 data['rti'][n], y)[0]
96 97 ax.plt_noise = self.pf_axes[n].plot(numpy.repeat(noise, len(y)), y,
97 98 color="k", linestyle="dashed", lw=1)[0]
98 99 if self.CODE == 'spc_moments':
99 100 ax.plt_mean = ax.plot(mean, y, color='k')[0]
100 101 else:
101 102 ax.plt.set_array(z[n].T.ravel())
102 103 if self.showprofile:
103 104 ax.plt_profile.set_data(data['rti'][n], y)
104 105 ax.plt_noise.set_data(numpy.repeat(noise, len(y)), y)
105 106 if self.CODE == 'spc_moments':
106 107 ax.plt_mean.set_data(mean, y)
107 108 self.titles.append('CH {}: {:3.2f}dB'.format(self.channelList[n], noise))
108 109
109 110
110 111 class CrossSpectraPlot(Plot):
111 112
112 113 CODE = 'cspc'
113 114 colormap = 'jet'
114 115 plot_type = 'pcolor'
115 116 zmin_coh = None
116 117 zmax_coh = None
117 118 zmin_phase = None
118 119 zmax_phase = None
119 120 realChannels = None
120 121 crossPairs = None
121 122
122 123 def setup(self):
123 124
124 125 self.ncols = 4
125 126 self.nplots = len(self.data.pairs) * 2
126 127 self.nrows = int((1.0 * self.nplots / self.ncols) + 0.9)
127 128 self.width = 3.1 * self.ncols
128 129 self.height = 2.6 * self.nrows
129 130 self.ylabel = 'Range [km]'
130 131 self.showprofile = False
131 132 self.plots_adjust.update({'left': 0.08, 'right': 0.92, 'wspace': 0.5, 'hspace':0.4, 'top':0.95, 'bottom': 0.08})
132 133
133 134 def update(self, dataOut):
134 135
135 136 data = {}
136 137 meta = {}
137 138
138 139 spc = dataOut.data_spc
139 140 cspc = dataOut.data_cspc
140 141 meta['xrange'] = (dataOut.getFreqRange(1)/1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
141 142 rawPairs = list(combinations(list(range(dataOut.nChannels)), 2))
142 #print(rawPairs)
143 143 meta['pairs'] = rawPairs
144 144
145 145 if self.crossPairs == None:
146 146 self.crossPairs = dataOut.pairsList
147 147
148 148 tmp = []
149 149
150 150 for n, pair in enumerate(meta['pairs']):
151 151
152 152 out = cspc[n] / numpy.sqrt(spc[pair[0]] * spc[pair[1]])
153 153 coh = numpy.abs(out)
154 154 phase = numpy.arctan2(out.imag, out.real) * 180 / numpy.pi
155 155 tmp.append(coh)
156 156 tmp.append(phase)
157 157
158 158 data['cspc'] = numpy.array(tmp)
159 159
160 160 return data, meta
161 161
162 162 def plot(self):
163 163
164 164 if self.xaxis == "frequency":
165 165 x = self.data.xrange[0]
166 166 self.xlabel = "Frequency (kHz)"
167 167 elif self.xaxis == "time":
168 168 x = self.data.xrange[1]
169 169 self.xlabel = "Time (ms)"
170 170 else:
171 171 x = self.data.xrange[2]
172 172 self.xlabel = "Velocity (m/s)"
173 173
174 174 self.titles = []
175 175
176 176 y = self.data.yrange
177 177 self.y = y
178 178
179 179 data = self.data[-1]
180 180 cspc = data['cspc']
181 181 #print(self.crossPairs)
182 182 for n in range(len(self.data.pairs)):
183 183 #pair = self.data.pairs[n]
184 184 pair = self.crossPairs[n]
185 185
186 186 coh = cspc[n*2]
187 187 phase = cspc[n*2+1]
188 188 ax = self.axes[2 * n]
189 189
190 190 if ax.firsttime:
191 191 ax.plt = ax.pcolormesh(x, y, coh.T,
192 192 vmin=0,
193 193 vmax=1,
194 194 cmap=plt.get_cmap(self.colormap_coh)
195 195 )
196 196 else:
197 197 ax.plt.set_array(coh.T.ravel())
198 198 self.titles.append(
199 199 'Coherence Ch{} * Ch{}'.format(pair[0], pair[1]))
200 200
201 201 ax = self.axes[2 * n + 1]
202 202 if ax.firsttime:
203 203 ax.plt = ax.pcolormesh(x, y, phase.T,
204 204 vmin=-180,
205 205 vmax=180,
206 206 cmap=plt.get_cmap(self.colormap_phase)
207 207 )
208 208 else:
209 209 ax.plt.set_array(phase.T.ravel())
210 210 self.titles.append('Phase CH{} * CH{}'.format(pair[0], pair[1]))
211 211
212 212
213 213 class RTIPlot(Plot):
214 214 '''
215 215 Plot for RTI data
216 216 '''
217 217
218 218 CODE = 'rti'
219 219 colormap = 'jet'
220 220 plot_type = 'pcolorbuffer'
221 221 titles = None
222 222 channelList = []
223 223
224 224 def setup(self):
225 225 self.xaxis = 'time'
226 226 self.ncols = 1
227 print("dataChannels ",self.data.channels)
227 #print("dataChannels ",self.data.channels)
228 228 self.nrows = len(self.data.channels)
229 229 self.nplots = len(self.data.channels)
230 230 self.ylabel = 'Range [km]'
231 231 self.xlabel = 'Time'
232 232 self.cb_label = 'dB'
233 233 self.plots_adjust.update({'hspace':0.8, 'left': 0.1, 'bottom': 0.08, 'right':0.95})
234 234 self.titles = ['{} Channel {}'.format(
235 235 self.CODE.upper(), x) for x in range(self.nplots)]
236 print("SETUP")
236
237 237 def update(self, dataOut):
238 238 if len(self.channelList) == 0:
239 239 self.channelList = dataOut.channelList
240 240 data = {}
241 241 meta = {}
242 242 data['rti'] = dataOut.getPower()
243 243 data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor)
244
245 244 return data, meta
246 245
247 246 def plot(self):
247
248 248 self.x = self.data.times
249 249 self.y = self.data.yrange
250 250 self.z = self.data[self.CODE]
251 251 self.z = numpy.ma.masked_invalid(self.z)
252 252 try:
253 253 if self.channelList != None:
254 254 self.titles = ['{} Channel {}'.format(
255 255 self.CODE.upper(), x) for x in self.channelList]
256 256 except:
257 257 if self.channelList.any() != None:
258 258 self.titles = ['{} Channel {}'.format(
259 259 self.CODE.upper(), x) for x in self.channelList]
260 260 if self.decimation is None:
261 261 x, y, z = self.fill_gaps(self.x, self.y, self.z)
262 262 else:
263 263 x, y, z = self.fill_gaps(*self.decimate())
264 264
265 265 for n, ax in enumerate(self.axes):
266 266 self.zmin = self.zmin if self.zmin else numpy.min(self.z)
267 267 self.zmax = self.zmax if self.zmax else numpy.max(self.z)
268 268 data = self.data[-1]
269 269 if ax.firsttime:
270 270 ax.plt = ax.pcolormesh(x, y, z[n].T,
271 271 vmin=self.zmin,
272 272 vmax=self.zmax,
273 273 cmap=plt.get_cmap(self.colormap)
274 274 )
275 275 if self.showprofile:
276 276 ax.plot_profile = self.pf_axes[n].plot(
277 277 data['rti'][n], self.y)[0]
278 278 ax.plot_noise = self.pf_axes[n].plot(numpy.repeat(data['noise'][n], len(self.y)), self.y,
279 279 color="k", linestyle="dashed", lw=1)[0]
280 280 else:
281 281 ax.collections.remove(ax.collections[0])
282 282 ax.plt = ax.pcolormesh(x, y, z[n].T,
283 283 vmin=self.zmin,
284 284 vmax=self.zmax,
285 285 cmap=plt.get_cmap(self.colormap)
286 286 )
287 287 if self.showprofile:
288 288 ax.plot_profile.set_data(data['rti'][n], self.y)
289 289 ax.plot_noise.set_data(numpy.repeat(
290 290 data['noise'][n], len(self.y)), self.y)
291 291
292 292
293 293 class CoherencePlot(RTIPlot):
294 294 '''
295 295 Plot for Coherence data
296 296 '''
297 297
298 298 CODE = 'coh'
299 299
300 300 def setup(self):
301 301 self.xaxis = 'time'
302 302 self.ncols = 1
303 303 self.nrows = len(self.data.pairs)
304 304 self.nplots = len(self.data.pairs)
305 305 self.ylabel = 'Range [km]'
306 306 self.xlabel = 'Time'
307 307 self.plots_adjust.update({'hspace':0.6, 'left': 0.1, 'bottom': 0.1,'right':0.95})
308 308 if self.CODE == 'coh':
309 309 self.cb_label = ''
310 310 self.titles = [
311 311 'Coherence Map Ch{} * Ch{}'.format(x[0], x[1]) for x in self.data.pairs]
312 312 else:
313 313 self.cb_label = 'Degrees'
314 314 self.titles = [
315 315 'Phase Map Ch{} * Ch{}'.format(x[0], x[1]) for x in self.data.pairs]
316 316
317 317 def update(self, dataOut):
318 318
319 319 data = {}
320 320 meta = {}
321 321 data['coh'] = dataOut.getCoherence()
322 322 meta['pairs'] = dataOut.pairsList
323 323
324 324 return data, meta
325 325
326 326 class PhasePlot(CoherencePlot):
327 327 '''
328 328 Plot for Phase map data
329 329 '''
330 330
331 331 CODE = 'phase'
332 332 colormap = 'seismic'
333 333
334 334 def update(self, dataOut):
335 335
336 336 data = {}
337 337 meta = {}
338 338 data['phase'] = dataOut.getCoherence(phase=True)
339 339 meta['pairs'] = dataOut.pairsList
340 340
341 341 return data, meta
342 342
343 343 class NoisePlot(Plot):
344 344 '''
345 345 Plot for noise
346 346 '''
347 347
348 348 CODE = 'noise'
349 349 plot_type = 'scatterbuffer'
350 350
351 351 def setup(self):
352 352 self.xaxis = 'time'
353 353 self.ncols = 1
354 354 self.nrows = 1
355 355 self.nplots = 1
356 356 self.ylabel = 'Intensity [dB]'
357 357 self.xlabel = 'Time'
358 358 self.titles = ['Noise']
359 359 self.colorbar = False
360 360 self.plots_adjust.update({'right': 0.85 })
361 361
362 362 def update(self, dataOut):
363 363
364 364 data = {}
365 365 meta = {}
366 366 data['noise'] = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor).reshape(dataOut.nChannels, 1)
367 367 meta['yrange'] = numpy.array([])
368 368
369 369 return data, meta
370 370
371 371 def plot(self):
372 372
373 373 x = self.data.times
374 374 xmin = self.data.min_time
375 375 xmax = xmin + self.xrange * 60 * 60
376 376 Y = self.data['noise']
377 377
378 378 if self.axes[0].firsttime:
379 379 self.ymin = numpy.nanmin(Y) - 5
380 380 self.ymax = numpy.nanmax(Y) + 5
381 381 for ch in self.data.channels:
382 382 y = Y[ch]
383 383 self.axes[0].plot(x, y, lw=1, label='Ch{}'.format(ch))
384 384 plt.legend(bbox_to_anchor=(1.18, 1.0))
385 385 else:
386 386 for ch in self.data.channels:
387 387 y = Y[ch]
388 388 self.axes[0].lines[ch].set_data(x, y)
389 389
390 390
391 391 class PowerProfilePlot(Plot):
392 392
393 393 CODE = 'pow_profile'
394 394 plot_type = 'scatter'
395 395
396 396 def setup(self):
397 397
398 398 self.ncols = 1
399 399 self.nrows = 1
400 400 self.nplots = 1
401 401 self.height = 4
402 402 self.width = 3
403 403 self.ylabel = 'Range [km]'
404 404 self.xlabel = 'Intensity [dB]'
405 405 self.titles = ['Power Profile']
406 406 self.colorbar = False
407 407
408 408 def update(self, dataOut):
409 409
410 410 data = {}
411 411 meta = {}
412 412 data[self.CODE] = dataOut.getPower()
413 413
414 414 return data, meta
415 415
416 416 def plot(self):
417 417
418 418 y = self.data.yrange
419 419 self.y = y
420 420
421 421 x = self.data[-1][self.CODE]
422 422
423 423 if self.xmin is None: self.xmin = numpy.nanmin(x)*0.9
424 424 if self.xmax is None: self.xmax = numpy.nanmax(x)*1.1
425 425
426 426 if self.axes[0].firsttime:
427 427 for ch in self.data.channels:
428 428 self.axes[0].plot(x[ch], y, lw=1, label='Ch{}'.format(ch))
429 429 plt.legend()
430 430 else:
431 431 for ch in self.data.channels:
432 432 self.axes[0].lines[ch].set_data(x[ch], y)
433 433
434 434
435 435 class SpectraCutPlot(Plot):
436 436
437 437 CODE = 'spc_cut'
438 438 plot_type = 'scatter'
439 439 buffering = False
440 440
441 441 def setup(self):
442 442
443 443 self.nplots = len(self.data.channels)
444 444 self.ncols = int(numpy.sqrt(self.nplots) + 0.9)
445 445 self.nrows = int((1.0 * self.nplots / self.ncols) + 0.9)
446 446 self.width = 3.4 * self.ncols + 1.5
447 447 self.height = 3 * self.nrows
448 448 self.ylabel = 'Power [dB]'
449 449 self.colorbar = False
450 450 self.plots_adjust.update({'left':0.1, 'hspace':0.3, 'right': 0.75, 'bottom':0.08})
451 451
452 452 def update(self, dataOut):
453 453
454 454 data = {}
455 455 meta = {}
456 456 spc = 10*numpy.log10(dataOut.data_spc/dataOut.normFactor)
457 457 data['spc'] = spc
458 458 meta['xrange'] = (dataOut.getFreqRange(1)/1000., dataOut.getAcfRange(1), dataOut.getVelRange(1))
459 459
460 460 return data, meta
461 461
462 462 def plot(self):
463 463 if self.xaxis == "frequency":
464 464 x = self.data.xrange[0][1:]
465 465 self.xlabel = "Frequency (kHz)"
466 466 elif self.xaxis == "time":
467 467 x = self.data.xrange[1]
468 468 self.xlabel = "Time (ms)"
469 469 else:
470 470 x = self.data.xrange[2]
471 471 self.xlabel = "Velocity (m/s)"
472 472
473 473 self.titles = []
474 474
475 475 y = self.data.yrange
476 476 z = self.data[-1]['spc']
477 477
478 478 if self.height_index:
479 479 index = numpy.array(self.height_index)
480 480 else:
481 481 index = numpy.arange(0, len(y), int((len(y))/9))
482 482
483 483 for n, ax in enumerate(self.axes):
484 484 if ax.firsttime:
485 485 self.xmax = self.xmax if self.xmax else numpy.nanmax(x)
486 486 self.xmin = self.xmin if self.xmin else -self.xmax
487 487 self.ymin = self.ymin if self.ymin else numpy.nanmin(z)
488 488 self.ymax = self.ymax if self.ymax else numpy.nanmax(z)
489 489 ax.plt = ax.plot(x, z[n, :, index].T)
490 490 labels = ['Range = {:2.1f}km'.format(y[i]) for i in index]
491 491 self.figures[0].legend(ax.plt, labels, loc='center right')
492 492 else:
493 493 for i, line in enumerate(ax.plt):
494 494 line.set_data(x, z[n, :, index[i]])
495 495 self.titles.append('CH {}'.format(n))
496 496
497 497
498 498 class BeaconPhase(Plot):
499 499
500 500 __isConfig = None
501 501 __nsubplots = None
502 502
503 503 PREFIX = 'beacon_phase'
504 504
505 505 def __init__(self):
506 506 Plot.__init__(self)
507 507 self.timerange = 24*60*60
508 508 self.isConfig = False
509 509 self.__nsubplots = 1
510 510 self.counter_imagwr = 0
511 511 self.WIDTH = 800
512 512 self.HEIGHT = 400
513 513 self.WIDTHPROF = 120
514 514 self.HEIGHTPROF = 0
515 515 self.xdata = None
516 516 self.ydata = None
517 517
518 518 self.PLOT_CODE = BEACON_CODE
519 519
520 520 self.FTP_WEI = None
521 521 self.EXP_CODE = None
522 522 self.SUB_EXP_CODE = None
523 523 self.PLOT_POS = None
524 524
525 525 self.filename_phase = None
526 526
527 527 self.figfile = None
528 528
529 529 self.xmin = None
530 530 self.xmax = None
531 531
532 532 def getSubplots(self):
533 533
534 534 ncol = 1
535 535 nrow = 1
536 536
537 537 return nrow, ncol
538 538
539 539 def setup(self, id, nplots, wintitle, showprofile=True, show=True):
540 540
541 541 self.__showprofile = showprofile
542 542 self.nplots = nplots
543 543
544 544 ncolspan = 7
545 545 colspan = 6
546 546 self.__nsubplots = 2
547 547
548 548 self.createFigure(id = id,
549 549 wintitle = wintitle,
550 550 widthplot = self.WIDTH+self.WIDTHPROF,
551 551 heightplot = self.HEIGHT+self.HEIGHTPROF,
552 552 show=show)
553 553
554 554 nrow, ncol = self.getSubplots()
555 555
556 556 self.addAxes(nrow, ncol*ncolspan, 0, 0, colspan, 1)
557 557
558 558 def save_phase(self, filename_phase):
559 559 f = open(filename_phase,'w+')
560 560 f.write('\n\n')
561 561 f.write('JICAMARCA RADIO OBSERVATORY - Beacon Phase \n')
562 562 f.write('DD MM YYYY HH MM SS pair(2,0) pair(2,1) pair(2,3) pair(2,4)\n\n' )
563 563 f.close()
564 564
565 565 def save_data(self, filename_phase, data, data_datetime):
566 566 f=open(filename_phase,'a')
567 567 timetuple_data = data_datetime.timetuple()
568 568 day = str(timetuple_data.tm_mday)
569 569 month = str(timetuple_data.tm_mon)
570 570 year = str(timetuple_data.tm_year)
571 571 hour = str(timetuple_data.tm_hour)
572 572 minute = str(timetuple_data.tm_min)
573 573 second = str(timetuple_data.tm_sec)
574 574 f.write(day+' '+month+' '+year+' '+hour+' '+minute+' '+second+' '+str(data[0])+' '+str(data[1])+' '+str(data[2])+' '+str(data[3])+'\n')
575 575 f.close()
576 576
577 577 def plot(self):
578 578 log.warning('TODO: Not yet implemented...')
579 579
580 580 def run(self, dataOut, id, wintitle="", pairsList=None, showprofile='True',
581 581 xmin=None, xmax=None, ymin=None, ymax=None, hmin=None, hmax=None,
582 582 timerange=None,
583 583 save=False, figpath='./', figfile=None, show=True, ftp=False, wr_period=1,
584 584 server=None, folder=None, username=None, password=None,
585 585 ftp_wei=0, exp_code=0, sub_exp_code=0, plot_pos=0):
586 586
587 587 if dataOut.flagNoData:
588 588 return dataOut
589 589
590 590 if not isTimeInHourRange(dataOut.datatime, xmin, xmax):
591 591 return
592 592
593 593 if pairsList == None:
594 594 pairsIndexList = dataOut.pairsIndexList[:10]
595 595 else:
596 596 pairsIndexList = []
597 597 for pair in pairsList:
598 598 if pair not in dataOut.pairsList:
599 599 raise ValueError("Pair %s is not in dataOut.pairsList" %(pair))
600 600 pairsIndexList.append(dataOut.pairsList.index(pair))
601 601
602 602 if pairsIndexList == []:
603 603 return
604 604
605 605 # if len(pairsIndexList) > 4:
606 606 # pairsIndexList = pairsIndexList[0:4]
607 607
608 608 hmin_index = None
609 609 hmax_index = None
610 610
611 611 if hmin != None and hmax != None:
612 612 indexes = numpy.arange(dataOut.nHeights)
613 613 hmin_list = indexes[dataOut.heightList >= hmin]
614 614 hmax_list = indexes[dataOut.heightList <= hmax]
615 615
616 616 if hmin_list.any():
617 617 hmin_index = hmin_list[0]
618 618
619 619 if hmax_list.any():
620 620 hmax_index = hmax_list[-1]+1
621 621
622 622 x = dataOut.getTimeRange()
623 623
624 624 thisDatetime = dataOut.datatime
625 625
626 626 title = wintitle + " Signal Phase" # : %s" %(thisDatetime.strftime("%d-%b-%Y"))
627 627 xlabel = "Local Time"
628 628 ylabel = "Phase (degrees)"
629 629
630 630 update_figfile = False
631 631
632 632 nplots = len(pairsIndexList)
633 633 #phase = numpy.zeros((len(pairsIndexList),len(dataOut.beacon_heiIndexList)))
634 634 phase_beacon = numpy.zeros(len(pairsIndexList))
635 635 for i in range(nplots):
636 636 pair = dataOut.pairsList[pairsIndexList[i]]
637 637 ccf = numpy.average(dataOut.data_cspc[pairsIndexList[i], :, hmin_index:hmax_index], axis=0)
638 638 powa = numpy.average(dataOut.data_spc[pair[0], :, hmin_index:hmax_index], axis=0)
639 639 powb = numpy.average(dataOut.data_spc[pair[1], :, hmin_index:hmax_index], axis=0)
640 640 avgcoherenceComplex = ccf/numpy.sqrt(powa*powb)
641 641 phase = numpy.arctan2(avgcoherenceComplex.imag, avgcoherenceComplex.real)*180/numpy.pi
642 642
643 643 if dataOut.beacon_heiIndexList:
644 644 phase_beacon[i] = numpy.average(phase[dataOut.beacon_heiIndexList])
645 645 else:
646 646 phase_beacon[i] = numpy.average(phase)
647 647
648 648 if not self.isConfig:
649 649
650 650 nplots = len(pairsIndexList)
651 651
652 652 self.setup(id=id,
653 653 nplots=nplots,
654 654 wintitle=wintitle,
655 655 showprofile=showprofile,
656 656 show=show)
657 657
658 658 if timerange != None:
659 659 self.timerange = timerange
660 660
661 661 self.xmin, self.xmax = self.getTimeLim(x, xmin, xmax, timerange)
662 662
663 663 if ymin == None: ymin = 0
664 664 if ymax == None: ymax = 360
665 665
666 666 self.FTP_WEI = ftp_wei
667 667 self.EXP_CODE = exp_code
668 668 self.SUB_EXP_CODE = sub_exp_code
669 669 self.PLOT_POS = plot_pos
670 670
671 671 self.name = thisDatetime.strftime("%Y%m%d_%H%M%S")
672 672 self.isConfig = True
673 673 self.figfile = figfile
674 674 self.xdata = numpy.array([])
675 675 self.ydata = numpy.array([])
676 676
677 677 update_figfile = True
678 678
679 679 #open file beacon phase
680 680 path = '%s%03d' %(self.PREFIX, self.id)
681 681 beacon_file = os.path.join(path,'%s.txt'%self.name)
682 682 self.filename_phase = os.path.join(figpath,beacon_file)
683 683 #self.save_phase(self.filename_phase)
684 684
685 685
686 686 #store data beacon phase
687 687 #self.save_data(self.filename_phase, phase_beacon, thisDatetime)
688 688
689 689 self.setWinTitle(title)
690 690
691 691
692 692 title = "Phase Plot %s" %(thisDatetime.strftime("%Y/%m/%d %H:%M:%S"))
693 693
694 694 legendlabels = ["Pair (%d,%d)"%(pair[0], pair[1]) for pair in dataOut.pairsList]
695 695
696 696 axes = self.axesList[0]
697 697
698 698 self.xdata = numpy.hstack((self.xdata, x[0:1]))
699 699
700 700 if len(self.ydata)==0:
701 701 self.ydata = phase_beacon.reshape(-1,1)
702 702 else:
703 703 self.ydata = numpy.hstack((self.ydata, phase_beacon.reshape(-1,1)))
704 704
705 705
706 706 axes.pmultilineyaxis(x=self.xdata, y=self.ydata,
707 707 xmin=self.xmin, xmax=self.xmax, ymin=ymin, ymax=ymax,
708 708 xlabel=xlabel, ylabel=ylabel, title=title, legendlabels=legendlabels, marker='x', markersize=8, linestyle="solid",
709 709 XAxisAsTime=True, grid='both'
710 710 )
711 711
712 712 self.draw()
713 713
714 714 if dataOut.ltctime >= self.xmax:
715 715 self.counter_imagwr = wr_period
716 716 self.isConfig = False
717 717 update_figfile = True
718 718
719 719 self.save(figpath=figpath,
720 720 figfile=figfile,
721 721 save=save,
722 722 ftp=ftp,
723 723 wr_period=wr_period,
724 724 thisDatetime=thisDatetime,
725 725 update_figfile=update_figfile)
726 726
727 727 return dataOut
@@ -1,1575 +1,1576
1 1 """
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 """
6 6 import os
7 7 import sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import inspect
13 13 import time
14 14 import datetime
15 15 import zmq
16 16
17 17 from schainpy.model.proc.jroproc_base import Operation, MPDecorator
18 18 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
19 19 from schainpy.model.data.jroheaderIO import get_dtype_index, get_numpy_dtype, get_procflag_dtype, get_dtype_width
20 20 from schainpy.utils import log
21 21 import schainpy.admin
22 22
23 23 LOCALTIME = True
24 24 DT_DIRECTIVES = {
25 25 '%Y': 4,
26 26 '%y': 2,
27 27 '%m': 2,
28 28 '%d': 2,
29 29 '%j': 3,
30 30 '%H': 2,
31 31 '%M': 2,
32 32 '%S': 2,
33 33 '%f': 6
34 34 }
35 35
36 36
37 37 def isNumber(cad):
38 38 """
39 39 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
40 40
41 41 Excepciones:
42 42 Si un determinado string no puede ser convertido a numero
43 43 Input:
44 44 str, string al cual se le analiza para determinar si convertible a un numero o no
45 45
46 46 Return:
47 47 True : si el string es uno numerico
48 48 False : no es un string numerico
49 49 """
50 50 try:
51 51 float(cad)
52 52 return True
53 53 except:
54 54 return False
55 55
56 56
57 57 def isFileInEpoch(filename, startUTSeconds, endUTSeconds):
58 58 """
59 59 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
60 60
61 61 Inputs:
62 62 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
63 63
64 64 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
65 65 segundos contados desde 01/01/1970.
66 66 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
67 67 segundos contados desde 01/01/1970.
68 68
69 69 Return:
70 70 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
71 71 fecha especificado, de lo contrario retorna False.
72 72
73 73 Excepciones:
74 74 Si el archivo no existe o no puede ser abierto
75 75 Si la cabecera no puede ser leida.
76 76
77 77 """
78 78 basicHeaderObj = BasicHeader(LOCALTIME)
79 79
80 80 try:
81 81 fp = open(filename, 'rb')
82 82 except IOError:
83 83 print("The file %s can't be opened" % (filename))
84 84 return 0
85 85
86 86 sts = basicHeaderObj.read(fp)
87 87 fp.close()
88 88
89 89 if not(sts):
90 90 print("Skipping the file %s because it has not a valid header" % (filename))
91 91 return 0
92 92
93 93 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
94 94 return 0
95 95
96 96 return 1
97 97
98 98
99 99 def isTimeInRange(thisTime, startTime, endTime):
100 100 if endTime >= startTime:
101 101 if (thisTime < startTime) or (thisTime > endTime):
102 102 return 0
103 103 return 1
104 104 else:
105 105 if (thisTime < startTime) and (thisTime > endTime):
106 106 return 0
107 107 return 1
108 108
109 109
110 110 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
111 111 """
112 112 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
113 113
114 114 Inputs:
115 115 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
116 116
117 117 startDate : fecha inicial del rango seleccionado en formato datetime.date
118 118
119 119 endDate : fecha final del rango seleccionado en formato datetime.date
120 120
121 121 startTime : tiempo inicial del rango seleccionado en formato datetime.time
122 122
123 123 endTime : tiempo final del rango seleccionado en formato datetime.time
124 124
125 125 Return:
126 126 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
127 127 fecha especificado, de lo contrario retorna False.
128 128
129 129 Excepciones:
130 130 Si el archivo no existe o no puede ser abierto
131 131 Si la cabecera no puede ser leida.
132 132
133 133 """
134 134
135 135 try:
136 136 fp = open(filename, 'rb')
137 137 except IOError:
138 138 print("The file %s can't be opened" % (filename))
139 139 return None
140 140
141 141 firstBasicHeaderObj = BasicHeader(LOCALTIME)
142 142 systemHeaderObj = SystemHeader()
143 143 radarControllerHeaderObj = RadarControllerHeader()
144 144 processingHeaderObj = ProcessingHeader()
145 145
146 146 lastBasicHeaderObj = BasicHeader(LOCALTIME)
147 147
148 148 sts = firstBasicHeaderObj.read(fp)
149 149
150 150 if not(sts):
151 151 print("[Reading] Skipping the file %s because it has not a valid header" % (filename))
152 152 return None
153 153
154 154 if not systemHeaderObj.read(fp):
155 155 return None
156 156
157 157 if not radarControllerHeaderObj.read(fp):
158 158 return None
159 159
160 160 if not processingHeaderObj.read(fp):
161 161 return None
162 162
163 163 filesize = os.path.getsize(filename)
164 164
165 165 offset = processingHeaderObj.blockSize + 24 # header size
166 166
167 167 if filesize <= offset:
168 168 print("[Reading] %s: This file has not enough data" % filename)
169 169 return None
170 170
171 171 fp.seek(-offset, 2)
172 172
173 173 sts = lastBasicHeaderObj.read(fp)
174 174
175 175 fp.close()
176 176
177 177 thisDatetime = lastBasicHeaderObj.datatime
178 178 thisTime_last_block = thisDatetime.time()
179 179
180 180 thisDatetime = firstBasicHeaderObj.datatime
181 181 thisDate = thisDatetime.date()
182 182 thisTime_first_block = thisDatetime.time()
183 183
184 184 # General case
185 185 # o>>>>>>>>>>>>>><<<<<<<<<<<<<<o
186 186 #-----------o----------------------------o-----------
187 187 # startTime endTime
188 188
189 189 if endTime >= startTime:
190 190 if (thisTime_last_block < startTime) or (thisTime_first_block > endTime):
191 191 return None
192 192
193 193 return thisDatetime
194 194
195 195 # If endTime < startTime then endTime belongs to the next day
196 196
197 197 #<<<<<<<<<<<o o>>>>>>>>>>>
198 198 #-----------o----------------------------o-----------
199 199 # endTime startTime
200 200
201 201 if (thisDate == startDate) and (thisTime_last_block < startTime):
202 202 return None
203 203
204 204 if (thisDate == endDate) and (thisTime_first_block > endTime):
205 205 return None
206 206
207 207 if (thisTime_last_block < startTime) and (thisTime_first_block > endTime):
208 208 return None
209 209
210 210 return thisDatetime
211 211
212 212
213 213 def isFolderInDateRange(folder, startDate=None, endDate=None):
214 214 """
215 215 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
216 216
217 217 Inputs:
218 218 folder : nombre completo del directorio.
219 219 Su formato deberia ser "/path_root/?YYYYDDD"
220 220
221 221 siendo:
222 222 YYYY : Anio (ejemplo 2015)
223 223 DDD : Dia del anio (ejemplo 305)
224 224
225 225 startDate : fecha inicial del rango seleccionado en formato datetime.date
226 226
227 227 endDate : fecha final del rango seleccionado en formato datetime.date
228 228
229 229 Return:
230 230 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
231 231 fecha especificado, de lo contrario retorna False.
232 232 Excepciones:
233 233 Si el directorio no tiene el formato adecuado
234 234 """
235 235
236 236 basename = os.path.basename(folder)
237 237
238 238 if not isRadarFolder(basename):
239 239 print("The folder %s has not the rigth format" % folder)
240 240 return 0
241 241
242 242 if startDate and endDate:
243 243 thisDate = getDateFromRadarFolder(basename)
244 244
245 245 if thisDate < startDate:
246 246 return 0
247 247
248 248 if thisDate > endDate:
249 249 return 0
250 250
251 251 return 1
252 252
253 253
254 254 def isFileInDateRange(filename, startDate=None, endDate=None):
255 255 """
256 256 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
257 257
258 258 Inputs:
259 259 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
260 260
261 261 Su formato deberia ser "?YYYYDDDsss"
262 262
263 263 siendo:
264 264 YYYY : Anio (ejemplo 2015)
265 265 DDD : Dia del anio (ejemplo 305)
266 266 sss : set
267 267
268 268 startDate : fecha inicial del rango seleccionado en formato datetime.date
269 269
270 270 endDate : fecha final del rango seleccionado en formato datetime.date
271 271
272 272 Return:
273 273 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
274 274 fecha especificado, de lo contrario retorna False.
275 275 Excepciones:
276 276 Si el archivo no tiene el formato adecuado
277 277 """
278 278
279 279 basename = os.path.basename(filename)
280 280
281 281 if not isRadarFile(basename):
282 282 print("The filename %s has not the rigth format" % filename)
283 283 return 0
284 284
285 285 if startDate and endDate:
286 286 thisDate = getDateFromRadarFile(basename)
287 287
288 288 if thisDate < startDate:
289 289 return 0
290 290
291 291 if thisDate > endDate:
292 292 return 0
293 293
294 294 return 1
295 295
296 296
297 297 def getFileFromSet(path, ext, set):
298 298 validFilelist = []
299 299 fileList = os.listdir(path)
300 300
301 301 # 0 1234 567 89A BCDE
302 302 # H YYYY DDD SSS .ext
303 303
304 304 for thisFile in fileList:
305 305 try:
306 306 year = int(thisFile[1:5])
307 307 doy = int(thisFile[5:8])
308 308 except:
309 309 continue
310 310
311 311 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
312 312 continue
313 313
314 314 validFilelist.append(thisFile)
315 315
316 316 myfile = fnmatch.filter(
317 317 validFilelist, '*%4.4d%3.3d%3.3d*' % (year, doy, set))
318 318
319 319 if len(myfile) != 0:
320 320 return myfile[0]
321 321 else:
322 322 filename = '*%4.4d%3.3d%3.3d%s' % (year, doy, set, ext.lower())
323 323 print('the filename %s does not exist' % filename)
324 324 print('...going to the last file: ')
325 325
326 326 if validFilelist:
327 327 validFilelist = sorted(validFilelist, key=str.lower)
328 328 return validFilelist[-1]
329 329
330 330 return None
331 331
332 332
333 333 def getlastFileFromPath(path, ext):
334 334 """
335 335 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
336 336 al final de la depuracion devuelve el ultimo file de la lista que quedo.
337 337
338 338 Input:
339 339 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
340 340 ext : extension de los files contenidos en una carpeta
341 341
342 342 Return:
343 343 El ultimo file de una determinada carpeta, no se considera el path.
344 344 """
345 345 validFilelist = []
346 346 fileList = os.listdir(path)
347 347
348 348 # 0 1234 567 89A BCDE
349 349 # H YYYY DDD SSS .ext
350 350
351 351 for thisFile in fileList:
352 352
353 353 year = thisFile[1:5]
354 354 if not isNumber(year):
355 355 continue
356 356
357 357 doy = thisFile[5:8]
358 358 if not isNumber(doy):
359 359 continue
360 360
361 361 year = int(year)
362 362 doy = int(doy)
363 363
364 364 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
365 365 continue
366 366
367 367 validFilelist.append(thisFile)
368 368
369 369 if validFilelist:
370 370 validFilelist = sorted(validFilelist, key=str.lower)
371 371 return validFilelist[-1]
372 372
373 373 return None
374 374
375 375
376 376 def isRadarFolder(folder):
377 377 try:
378 378 year = int(folder[1:5])
379 379 doy = int(folder[5:8])
380 380 except:
381 381 return 0
382 382
383 383 return 1
384 384
385 385
386 386 def isRadarFile(file):
387 387 try:
388 388 year = int(file[1:5])
389 389 doy = int(file[5:8])
390 390 set = int(file[8:11])
391 391 except:
392 392 return 0
393 393
394 394 return 1
395 395
396 396
397 397 def getDateFromRadarFile(file):
398 398 try:
399 399 year = int(file[1:5])
400 400 doy = int(file[5:8])
401 401 set = int(file[8:11])
402 402 except:
403 403 return None
404 404
405 405 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1)
406 406 return thisDate
407 407
408 408
409 409 def getDateFromRadarFolder(folder):
410 410 try:
411 411 year = int(folder[1:5])
412 412 doy = int(folder[5:8])
413 413 except:
414 414 return None
415 415
416 416 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1)
417 417 return thisDate
418 418
419 419 def parse_format(s, fmt):
420 420
421 421 for i in range(fmt.count('%')):
422 422 x = fmt.index('%')
423 423 d = DT_DIRECTIVES[fmt[x:x+2]]
424 424 fmt = fmt.replace(fmt[x:x+2], s[x:x+d])
425 425 return fmt
426 426
427 427 class Reader(object):
428 428
429 429 c = 3E8
430 430 isConfig = False
431 431 dtype = None
432 432 pathList = []
433 433 filenameList = []
434 434 datetimeList = []
435 435 filename = None
436 436 ext = None
437 437 flagIsNewFile = 1
438 438 flagDiscontinuousBlock = 0
439 439 flagIsNewBlock = 0
440 440 flagNoMoreFiles = 0
441 441 fp = None
442 442 firstHeaderSize = 0
443 443 basicHeaderSize = 24
444 444 versionFile = 1103
445 445 fileSize = None
446 446 fileSizeByHeader = None
447 447 fileIndex = -1
448 448 profileIndex = None
449 449 blockIndex = 0
450 450 nTotalBlocks = 0
451 451 maxTimeStep = 30
452 452 lastUTTime = None
453 453 datablock = None
454 454 dataOut = None
455 455 getByBlock = False
456 456 path = None
457 457 startDate = None
458 458 endDate = None
459 459 startTime = datetime.time(0, 0, 0)
460 460 endTime = datetime.time(23, 59, 59)
461 461 set = None
462 462 expLabel = ""
463 463 online = False
464 464 delay = 60
465 465 nTries = 3 # quantity tries
466 466 nFiles = 3 # number of files for searching
467 467 walk = True
468 468 getblock = False
469 469 nTxs = 1
470 470 realtime = False
471 471 blocksize = 0
472 472 blocktime = None
473 473 warnings = True
474 474 verbose = True
475 475 server = None
476 476 format = None
477 477 oneDDict = None
478 478 twoDDict = None
479 479 independentParam = None
480 480 filefmt = None
481 481 folderfmt = None
482 482 open_file = open
483 483 open_mode = 'rb'
484 484
485 485 def run(self):
486 486
487 487 raise NotImplementedError
488 488
489 489 def getAllowedArgs(self):
490 490 if hasattr(self, '__attrs__'):
491 491 return self.__attrs__
492 492 else:
493 493 return inspect.getargspec(self.run).args
494 494
495 495 def set_kwargs(self, **kwargs):
496 496
497 497 for key, value in kwargs.items():
498 498 setattr(self, key, value)
499 499
500 500 def find_folders(self, path, startDate, endDate, folderfmt, last=False):
501 501
502 502 folders = [x for f in path.split(',')
503 503 for x in os.listdir(f) if os.path.isdir(os.path.join(f, x))]
504 504 folders.sort()
505 505
506 506 if last:
507 507 folders = [folders[-1]]
508 508
509 509 for folder in folders:
510 510 try:
511 511 dt = datetime.datetime.strptime(parse_format(folder, folderfmt), folderfmt).date()
512 512 if dt >= startDate and dt <= endDate:
513 513 yield os.path.join(path, folder)
514 514 else:
515 515 log.log('Skiping folder {}'.format(folder), self.name)
516 516 except Exception as e:
517 517 log.log('Skiping folder {}'.format(folder), self.name)
518 518 continue
519 519 return
520 520
521 521 def find_files(self, folders, ext, filefmt, startDate=None, endDate=None,
522 522 expLabel='', last=False):
523 523
524 524 for path in folders:
525 525 files = glob.glob1(path, '*{}'.format(ext))
526 526 files.sort()
527 527 if last:
528 528 if files:
529 529 fo = files[-1]
530 530 try:
531 531 dt = datetime.datetime.strptime(parse_format(fo, filefmt), filefmt).date()
532 532 yield os.path.join(path, expLabel, fo)
533 533 except Exception as e:
534 534 pass
535 535 return
536 536 else:
537 537 return
538 538
539 539 for fo in files:
540 540 try:
541 541 dt = datetime.datetime.strptime(parse_format(fo, filefmt), filefmt).date()
542 542 if dt >= startDate and dt <= endDate:
543 543 yield os.path.join(path, expLabel, fo)
544 544 else:
545 545 log.log('Skiping file {}'.format(fo), self.name)
546 546 except Exception as e:
547 547 log.log('Skiping file {}'.format(fo), self.name)
548 548 continue
549 549
550 550 def searchFilesOffLine(self, path, startDate, endDate,
551 551 expLabel, ext, walk,
552 552 filefmt, folderfmt):
553 553 """Search files in offline mode for the given arguments
554 554
555 555 Return:
556 556 Generator of files
557 557 """
558 558
559 559 if walk:
560 560 folders = self.find_folders(
561 561 path, startDate, endDate, folderfmt)
562 562 else:
563 563 folders = path.split(',')
564 564
565 565 return self.find_files(
566 566 folders, ext, filefmt, startDate, endDate, expLabel)
567 567
568 568 def searchFilesOnLine(self, path, startDate, endDate,
569 569 expLabel, ext, walk,
570 570 filefmt, folderfmt):
571 571 """Search for the last file of the last folder
572 572
573 573 Arguments:
574 574 path : carpeta donde estan contenidos los files que contiene data
575 575 expLabel : Nombre del subexperimento (subfolder)
576 576 ext : extension de los files
577 577 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
578 578
579 579 Return:
580 580 generator with the full path of last filename
581 581 """
582 582
583 583 if walk:
584 584 folders = self.find_folders(
585 585 path, startDate, endDate, folderfmt, last=True)
586 586 else:
587 587 folders = path.split(',')
588
588
589 589 return self.find_files(
590 590 folders, ext, filefmt, startDate, endDate, expLabel, last=True)
591 591
592 592 def setNextFile(self):
593 593 """Set the next file to be readed open it and parse de file header"""
594 594
595 595 while True:
596 596 if self.fp != None:
597 597 self.fp.close()
598 598
599 599 if self.online:
600 600 newFile = self.setNextFileOnline()
601 601 else:
602 602 newFile = self.setNextFileOffline()
603 603
604 604 if not(newFile):
605 605 if self.online:
606 606 raise schainpy.admin.SchainError('Time to wait for new files reach')
607 607 else:
608 608 if self.fileIndex == -1:
609 609 raise schainpy.admin.SchainWarning('No files found in the given path')
610 610 else:
611 611 raise schainpy.admin.SchainWarning('No more files to read')
612 612
613 613 if self.verifyFile(self.filename):
614 614 break
615 615
616 616 log.log('Opening file: %s' % self.filename, self.name)
617 617
618 618 self.readFirstHeader()
619 619 self.nReadBlocks = 0
620 620
621 621 def setNextFileOnline(self):
622 622 """Check for the next file to be readed in online mode.
623 623
624 624 Set:
625 625 self.filename
626 626 self.fp
627 627 self.filesize
628 628
629 629 Return:
630 630 boolean
631 631
632 632 """
633 633 nextFile = True
634 634 nextDay = False
635 635
636 636 for nFiles in range(self.nFiles+1):
637 637 for nTries in range(self.nTries):
638 638 fullfilename, filename = self.checkForRealPath(nextFile, nextDay)
639 639 if fullfilename is not None:
640 640 break
641 641 log.warning(
642 642 "Waiting %0.2f sec for the next file: \"%s\" , try %02d ..." % (self.delay, filename, nTries + 1),
643 643 self.name)
644 644 time.sleep(self.delay)
645 645 nextFile = False
646 646 continue
647 647
648 648 if fullfilename is not None:
649 649 break
650 650
651 651 self.nTries = 1
652 652 nextFile = True
653 653
654 654 if nFiles == (self.nFiles - 1):
655 655 log.log('Trying with next day...', self.name)
656 656 nextDay = True
657 657 self.nTries = 3
658 658
659 659 if fullfilename:
660 660 self.fileSize = os.path.getsize(fullfilename)
661 661 self.filename = fullfilename
662 662 self.flagIsNewFile = 1
663 663 if self.fp != None:
664 664 self.fp.close()
665 665 self.fp = self.open_file(fullfilename, self.open_mode)
666 666 self.flagNoMoreFiles = 0
667 667 self.fileIndex += 1
668 668 return 1
669 669 else:
670 670 return 0
671 671
672 672 def setNextFileOffline(self):
673 673 """Open the next file to be readed in offline mode"""
674 674
675 675 try:
676 676 filename = next(self.filenameList)
677 677 self.fileIndex +=1
678 678 except StopIteration:
679 679 self.flagNoMoreFiles = 1
680 680 return 0
681 681
682 682 self.filename = filename
683 683 self.fileSize = os.path.getsize(filename)
684 684 self.fp = self.open_file(filename, self.open_mode)
685 685 self.flagIsNewFile = 1
686 686
687 687 return 1
688 688
689 689 @staticmethod
690 690 def isDateTimeInRange(dt, startDate, endDate, startTime, endTime):
691 691 """Check if the given datetime is in range"""
692 692 startDateTime= datetime.datetime.combine(startDate,startTime)
693 693 endDateTime = datetime.datetime.combine(endDate,endTime)
694 694 if startDateTime <= dt <= endDateTime:
695 695 return True
696 696 return False
697 697
698 698 def verifyFile(self, filename):
699 699 """Check for a valid file
700 700
701 701 Arguments:
702 702 filename -- full path filename
703 703
704 704 Return:
705 705 boolean
706 706 """
707 707
708 708 return True
709 709
710 710 def checkForRealPath(self, nextFile, nextDay):
711 711 """Check if the next file to be readed exists"""
712 712
713 713 raise NotImplementedError
714 714
715 715 def readFirstHeader(self):
716 716 """Parse the file header"""
717 717
718 718 pass
719 719
720 720 def waitDataBlock(self, pointer_location, blocksize=None):
721 721 """
722 722 """
723 723
724 724 currentPointer = pointer_location
725 725 if blocksize is None:
726 726 neededSize = self.processingHeaderObj.blockSize # + self.basicHeaderSize
727 727 else:
728 728 neededSize = blocksize
729 729
730 730 for nTries in range(self.nTries):
731 731 self.fp.close()
732 732 self.fp = open(self.filename, 'rb')
733 733 self.fp.seek(currentPointer)
734 734
735 735 self.fileSize = os.path.getsize(self.filename)
736 736 currentSize = self.fileSize - currentPointer
737 737
738 738 if (currentSize >= neededSize):
739 739 return 1
740 740
741 741 log.warning(
742 742 "Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1),
743 743 self.name
744 744 )
745 745 time.sleep(self.delay)
746 746
747 747 return 0
748 748
749 749 class JRODataReader(Reader):
750 750
751 751 utc = 0
752 752 nReadBlocks = 0
753 753 foldercounter = 0
754 754 firstHeaderSize = 0
755 755 basicHeaderSize = 24
756 756 __isFirstTimeOnline = 1
757 757 filefmt = "*%Y%j***"
758 758 folderfmt = "*%Y%j"
759 759 __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'online', 'delay', 'walk']
760 760
761 761 def getDtypeWidth(self):
762 762
763 763 dtype_index = get_dtype_index(self.dtype)
764 764 dtype_width = get_dtype_width(dtype_index)
765 765
766 766 return dtype_width
767 767
768 768 def checkForRealPath(self, nextFile, nextDay):
769 769 """Check if the next file to be readed exists.
770 770
771 771 Example :
772 772 nombre correcto del file es .../.../D2009307/P2009307367.ext
773 773
774 774 Entonces la funcion prueba con las siguientes combinaciones
775 775 .../.../y2009307367.ext
776 776 .../.../Y2009307367.ext
777 777 .../.../x2009307/y2009307367.ext
778 778 .../.../x2009307/Y2009307367.ext
779 779 .../.../X2009307/y2009307367.ext
780 780 .../.../X2009307/Y2009307367.ext
781 781 siendo para este caso, la ultima combinacion de letras, identica al file buscado
782 782
783 783 Return:
784 784 str -- fullpath of the file
785 785 """
786 786
787 787
788 788 if nextFile:
789 789 self.set += 1
790 790 if nextDay:
791 791 self.set = 0
792 792 self.doy += 1
793 793 foldercounter = 0
794 794 prefixDirList = [None, 'd', 'D']
795 795 if self.ext.lower() == ".r": # voltage
796 796 prefixFileList = ['d', 'D']
797 797 elif self.ext.lower() == ".pdata": # spectra
798 798 prefixFileList = ['p', 'P']
799 799
800 800 # barrido por las combinaciones posibles
801 801 for prefixDir in prefixDirList:
802 802 thispath = self.path
803 803 if prefixDir != None:
804 804 # formo el nombre del directorio xYYYYDDD (x=d o x=D)
805 805 if foldercounter == 0:
806 806 thispath = os.path.join(self.path, "%s%04d%03d" %
807 807 (prefixDir, self.year, self.doy))
808 808 else:
809 809 thispath = os.path.join(self.path, "%s%04d%03d_%02d" % (
810 810 prefixDir, self.year, self.doy, foldercounter))
811 811 for prefixFile in prefixFileList: # barrido por las dos combinaciones posibles de "D"
812 812 # formo el nombre del file xYYYYDDDSSS.ext
813 813 filename = "%s%04d%03d%03d%s" % (prefixFile, self.year, self.doy, self.set, self.ext)
814 814 fullfilename = os.path.join(
815 815 thispath, filename)
816 816
817 817 if os.path.exists(fullfilename):
818 818 return fullfilename, filename
819 819
820 820 return None, filename
821 821
822 822 def __waitNewBlock(self):
823 823 """
824 824 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
825 825
826 826 Si el modo de lectura es OffLine siempre retorn 0
827 827 """
828 828 if not self.online:
829 829 return 0
830 830
831 831 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
832 832 return 0
833 833
834 834 currentPointer = self.fp.tell()
835 835
836 836 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
837 837
838 838 for nTries in range(self.nTries):
839 839
840 840 self.fp.close()
841 841 self.fp = open(self.filename, 'rb')
842 842 self.fp.seek(currentPointer)
843 843
844 844 self.fileSize = os.path.getsize(self.filename)
845 845 currentSize = self.fileSize - currentPointer
846 846
847 847 if (currentSize >= neededSize):
848 848 self.basicHeaderObj.read(self.fp)
849 849 return 1
850 850
851 851 if self.fileSize == self.fileSizeByHeader:
852 852 # self.flagEoF = True
853 853 return 0
854 854
855 855 print("[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1))
856 856 time.sleep(self.delay)
857 857
858 858 return 0
859 859
860 860 def __setNewBlock(self):
861 861
862 862 if self.fp == None:
863 863 return 0
864 864
865 865 if self.flagIsNewFile:
866 866 self.lastUTTime = self.basicHeaderObj.utc
867 867 return 1
868 868
869 869 if self.realtime:
870 870 self.flagDiscontinuousBlock = 1
871 871 if not(self.setNextFile()):
872 872 return 0
873 873 else:
874 874 return 1
875 875
876 876 currentSize = self.fileSize - self.fp.tell()
877 877 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
878 878
879 879 if (currentSize >= neededSize):
880 880 self.basicHeaderObj.read(self.fp)
881 881 self.lastUTTime = self.basicHeaderObj.utc
882 882 return 1
883 883
884 884 if self.__waitNewBlock():
885 885 self.lastUTTime = self.basicHeaderObj.utc
886 886 return 1
887 887
888 888 if not(self.setNextFile()):
889 889 return 0
890 890
891 891 deltaTime = self.basicHeaderObj.utc - self.lastUTTime
892 892 self.lastUTTime = self.basicHeaderObj.utc
893 893
894 894 self.flagDiscontinuousBlock = 0
895 895
896 896 if deltaTime > self.maxTimeStep:
897 897 self.flagDiscontinuousBlock = 1
898 898
899 899 return 1
900 900
901 901 def readNextBlock(self):
902 902
903 903 while True:
904 904 if not(self.__setNewBlock()):
905 905 continue
906 906
907 907 if not(self.readBlock()):
908 908 return 0
909 909
910 910 self.getBasicHeader()
911 911
912 912 if not self.isDateTimeInRange(self.dataOut.datatime, self.startDate, self.endDate, self.startTime, self.endTime):
913 913 print("[Reading] Block No. %d/%d -> %s [Skipping]" % (self.nReadBlocks,
914 914 self.processingHeaderObj.dataBlocksPerFile,
915 915 self.dataOut.datatime.ctime()))
916 916 continue
917 917
918 918 break
919 919
920 920 if self.verbose:
921 921 print("[Reading] Block No. %d/%d -> %s" % (self.nReadBlocks,
922 922 self.processingHeaderObj.dataBlocksPerFile,
923 923 self.dataOut.datatime.ctime()))
924 924 return 1
925 925
926 926 def readFirstHeader(self):
927 927
928 928 self.basicHeaderObj.read(self.fp)
929 929 self.systemHeaderObj.read(self.fp)
930 930 self.radarControllerHeaderObj.read(self.fp)
931 931 self.processingHeaderObj.read(self.fp)
932 932 self.firstHeaderSize = self.basicHeaderObj.size
933 933
934 934 datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
935 935 PROCFLAG.DATATYPE_MASK)) - numpy.log2(PROCFLAG.DATATYPE_CHAR))
936 936 if datatype == 0:
937 937 datatype_str = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
938 938 elif datatype == 1:
939 939 datatype_str = numpy.dtype([('real', '<i2'), ('imag', '<i2')])
940 940 elif datatype == 2:
941 941 datatype_str = numpy.dtype([('real', '<i4'), ('imag', '<i4')])
942 942 elif datatype == 3:
943 943 datatype_str = numpy.dtype([('real', '<i8'), ('imag', '<i8')])
944 944 elif datatype == 4:
945 945 datatype_str = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
946 946 elif datatype == 5:
947 947 datatype_str = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
948 948 else:
949 949 raise ValueError('Data type was not defined')
950 950
951 951 self.dtype = datatype_str
952 952 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
953 953 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + \
954 954 self.firstHeaderSize + self.basicHeaderSize * \
955 955 (self.processingHeaderObj.dataBlocksPerFile - 1)
956 956 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
957 957 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
958 958 self.getBlockDimension()
959 959
960 960 def verifyFile(self, filename):
961 961
962 962 flag = True
963 963
964 964 try:
965 965 fp = open(filename, 'rb')
966 966 except IOError:
967 967 log.error("File {} can't be opened".format(filename), self.name)
968 968 return False
969 969
970 970 if self.online and self.waitDataBlock(0):
971 971 pass
972 972
973 973 basicHeaderObj = BasicHeader(LOCALTIME)
974 974 systemHeaderObj = SystemHeader()
975 975 radarControllerHeaderObj = RadarControllerHeader()
976 976 processingHeaderObj = ProcessingHeader()
977 977
978 978 if not(basicHeaderObj.read(fp)):
979 979 flag = False
980 980 if not(systemHeaderObj.read(fp)):
981 981 flag = False
982 982 if not(radarControllerHeaderObj.read(fp)):
983 983 flag = False
984 984 if not(processingHeaderObj.read(fp)):
985 985 flag = False
986 986 if not self.online:
987 987 dt1 = basicHeaderObj.datatime
988 988 pos = self.fileSize-processingHeaderObj.blockSize-24
989 989 if pos<0:
990 990 flag = False
991 991 log.error('Invalid size for file: {}'.format(self.filename), self.name)
992 992 else:
993 993 fp.seek(pos)
994 994 if not(basicHeaderObj.read(fp)):
995 995 flag = False
996 996 dt2 = basicHeaderObj.datatime
997 997 if not self.isDateTimeInRange(dt1, self.startDate, self.endDate, self.startTime, self.endTime) and not \
998 998 self.isDateTimeInRange(dt2, self.startDate, self.endDate, self.startTime, self.endTime):
999 999 flag = False
1000 1000
1001 1001 fp.close()
1002 1002 return flag
1003 1003
1004 1004 def findDatafiles(self, path, startDate=None, endDate=None, expLabel='', ext='.r', walk=True, include_path=False):
1005 1005
1006 1006 path_empty = True
1007 1007
1008 1008 dateList = []
1009 1009 pathList = []
1010 1010
1011 1011 multi_path = path.split(',')
1012 1012
1013 1013 if not walk:
1014 1014
1015 1015 for single_path in multi_path:
1016 1016
1017 1017 if not os.path.isdir(single_path):
1018 1018 continue
1019 1019
1020 1020 fileList = glob.glob1(single_path, "*" + ext)
1021 1021
1022 1022 if not fileList:
1023 1023 continue
1024 1024
1025 1025 path_empty = False
1026 1026
1027 1027 fileList.sort()
1028 1028
1029 1029 for thisFile in fileList:
1030 1030
1031 1031 if not os.path.isfile(os.path.join(single_path, thisFile)):
1032 1032 continue
1033 1033
1034 1034 if not isRadarFile(thisFile):
1035 1035 continue
1036 1036
1037 1037 if not isFileInDateRange(thisFile, startDate, endDate):
1038 1038 continue
1039 1039
1040 1040 thisDate = getDateFromRadarFile(thisFile)
1041 1041
1042 1042 if thisDate in dateList or single_path in pathList:
1043 1043 continue
1044 1044
1045 1045 dateList.append(thisDate)
1046 1046 pathList.append(single_path)
1047 1047
1048 1048 else:
1049 1049 for single_path in multi_path:
1050 1050
1051 1051 if not os.path.isdir(single_path):
1052 1052 continue
1053 1053
1054 1054 dirList = []
1055 1055
1056 1056 for thisPath in os.listdir(single_path):
1057 1057
1058 1058 if not os.path.isdir(os.path.join(single_path, thisPath)):
1059 1059 continue
1060 1060
1061 1061 if not isRadarFolder(thisPath):
1062 1062 continue
1063 1063
1064 1064 if not isFolderInDateRange(thisPath, startDate, endDate):
1065 1065 continue
1066 1066
1067 1067 dirList.append(thisPath)
1068 1068
1069 1069 if not dirList:
1070 1070 continue
1071 1071
1072 1072 dirList.sort()
1073 1073
1074 1074 for thisDir in dirList:
1075 1075
1076 1076 datapath = os.path.join(single_path, thisDir, expLabel)
1077 1077 fileList = glob.glob1(datapath, "*" + ext)
1078 1078
1079 1079 if not fileList:
1080 1080 continue
1081 1081
1082 1082 path_empty = False
1083 1083
1084 1084 thisDate = getDateFromRadarFolder(thisDir)
1085 1085
1086 1086 pathList.append(datapath)
1087 1087 dateList.append(thisDate)
1088 1088
1089 1089 dateList.sort()
1090 1090
1091 1091 if walk:
1092 1092 pattern_path = os.path.join(multi_path[0], "[dYYYYDDD]", expLabel)
1093 1093 else:
1094 1094 pattern_path = multi_path[0]
1095 1095
1096 1096 if path_empty:
1097 1097 raise schainpy.admin.SchainError("[Reading] No *%s files in %s for %s to %s" % (ext, pattern_path, startDate, endDate))
1098 1098 else:
1099 1099 if not dateList:
1100 1100 raise schainpy.admin.SchainError("[Reading] Date range selected invalid [%s - %s]: No *%s files in %s)" % (startDate, endDate, ext, path))
1101 1101
1102 1102 if include_path:
1103 1103 return dateList, pathList
1104 1104
1105 1105 return dateList
1106 1106
1107 1107 def setup(self, **kwargs):
1108 1108
1109 1109 self.set_kwargs(**kwargs)
1110 1110 if not self.ext.startswith('.'):
1111 1111 self.ext = '.{}'.format(self.ext)
1112 1112
1113 1113 if self.server is not None:
1114 1114 if 'tcp://' in self.server:
1115 1115 address = server
1116 1116 else:
1117 1117 address = 'ipc:///tmp/%s' % self.server
1118 1118 self.server = address
1119 1119 self.context = zmq.Context()
1120 1120 self.receiver = self.context.socket(zmq.PULL)
1121 1121 self.receiver.connect(self.server)
1122 1122 time.sleep(0.5)
1123 1123 print('[Starting] ReceiverData from {}'.format(self.server))
1124 1124 else:
1125 1125 self.server = None
1126 1126 if self.path == None:
1127 1127 raise ValueError("[Reading] The path is not valid")
1128 1128
1129 1129 if self.online:
1130 1130 log.log("[Reading] Searching files in online mode...", self.name)
1131 1131
1132 1132 for nTries in range(self.nTries):
1133 1133 fullpath = self.searchFilesOnLine(self.path, self.startDate,
1134 1134 self.endDate, self.expLabel, self.ext, self.walk,
1135 1135 self.filefmt, self.folderfmt)
1136 1136
1137 1137 try:
1138 1138 fullpath = next(fullpath)
1139 1139 except:
1140 1140 fullpath = None
1141 1141
1142 1142 if fullpath:
1143 1143 break
1144 1144
1145 1145 log.warning(
1146 1146 'Waiting {} sec for a valid file in {}: try {} ...'.format(
1147 1147 self.delay, self.path, nTries + 1),
1148 1148 self.name)
1149 1149 time.sleep(self.delay)
1150 1150
1151 1151 if not(fullpath):
1152 1152 raise schainpy.admin.SchainError(
1153 1153 'There isn\'t any valid file in {}'.format(self.path))
1154 1154
1155 1155 pathname, filename = os.path.split(fullpath)
1156 1156 self.year = int(filename[1:5])
1157 1157 self.doy = int(filename[5:8])
1158 1158 self.set = int(filename[8:11]) - 1
1159 1159 else:
1160 1160 log.log("Searching files in {}".format(self.path), self.name)
1161 1161 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
1162 1162 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
1163 1163
1164 1164 self.setNextFile()
1165 1165
1166 1166 return
1167 1167
1168 1168 def getBasicHeader(self):
1169 1169
1170 1170 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond / \
1171 1171 1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds
1172 1172
1173 1173 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
1174 1174
1175 1175 self.dataOut.timeZone = self.basicHeaderObj.timeZone
1176 1176
1177 1177 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
1178 1178
1179 1179 self.dataOut.errorCount = self.basicHeaderObj.errorCount
1180 1180
1181 1181 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
1182 1182
1183 1183 self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds / self.nTxs
1184 1184
1185 1185 def getFirstHeader(self):
1186 1186
1187 1187 raise NotImplementedError
1188 1188
1189 1189 def getData(self):
1190 1190
1191 1191 raise NotImplementedError
1192 1192
1193 1193 def hasNotDataInBuffer(self):
1194 1194
1195 1195 raise NotImplementedError
1196 1196
1197 1197 def readBlock(self):
1198 1198
1199 1199 raise NotImplementedError
1200 1200
1201 1201 def isEndProcess(self):
1202 1202
1203 1203 return self.flagNoMoreFiles
1204 1204
1205 1205 def printReadBlocks(self):
1206 1206
1207 1207 print("[Reading] Number of read blocks per file %04d" % self.nReadBlocks)
1208 1208
1209 1209 def printTotalBlocks(self):
1210 1210
1211 1211 print("[Reading] Number of read blocks %04d" % self.nTotalBlocks)
1212 1212
1213 1213 def run(self, **kwargs):
1214 1214 """
1215 1215
1216 1216 Arguments:
1217 1217 path :
1218 1218 startDate :
1219 1219 endDate :
1220 1220 startTime :
1221 1221 endTime :
1222 1222 set :
1223 1223 expLabel :
1224 1224 ext :
1225 1225 online :
1226 1226 delay :
1227 1227 walk :
1228 1228 getblock :
1229 1229 nTxs :
1230 1230 realtime :
1231 1231 blocksize :
1232 1232 blocktime :
1233 1233 skip :
1234 1234 cursor :
1235 1235 warnings :
1236 1236 server :
1237 1237 verbose :
1238 1238 format :
1239 1239 oneDDict :
1240 1240 twoDDict :
1241 1241 independentParam :
1242 1242 """
1243 1243
1244 1244 if not(self.isConfig):
1245 1245 self.setup(**kwargs)
1246 1246 self.isConfig = True
1247 1247 if self.server is None:
1248 1248 self.getData()
1249 1249 else:
1250 1250 self.getFromServer()
1251 1251
1252 1252
1253 1253 class JRODataWriter(Reader):
1254 1254
1255 1255 """
1256 1256 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
1257 1257 de los datos siempre se realiza por bloques.
1258 1258 """
1259 1259
1260 1260 setFile = None
1261 1261 profilesPerBlock = None
1262 1262 blocksPerFile = None
1263 1263 nWriteBlocks = 0
1264 1264 fileDate = None
1265 1265
1266 1266 def __init__(self, dataOut=None):
1267 1267 raise NotImplementedError
1268 1268
1269 1269 def hasAllDataInBuffer(self):
1270 1270 raise NotImplementedError
1271 1271
1272 1272 def setBlockDimension(self):
1273 1273 raise NotImplementedError
1274 1274
1275 1275 def writeBlock(self):
1276 1276 raise NotImplementedError
1277 1277
1278 1278 def putData(self):
1279 1279 raise NotImplementedError
1280 1280
1281 1281 def getDtypeWidth(self):
1282 1282
1283 1283 dtype_index = get_dtype_index(self.dtype)
1284 1284 dtype_width = get_dtype_width(dtype_index)
1285 1285
1286 1286 return dtype_width
1287 1287
1288 1288 def getProcessFlags(self):
1289 1289
1290 1290 processFlags = 0
1291 1291
1292 1292 dtype_index = get_dtype_index(self.dtype)
1293 1293 procflag_dtype = get_procflag_dtype(dtype_index)
1294 1294
1295 1295 processFlags += procflag_dtype
1296 1296
1297 1297 if self.dataOut.flagDecodeData:
1298 1298 processFlags += PROCFLAG.DECODE_DATA
1299 1299
1300 1300 if self.dataOut.flagDeflipData:
1301 1301 processFlags += PROCFLAG.DEFLIP_DATA
1302 1302
1303 1303 if self.dataOut.code is not None:
1304 1304 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1305 1305
1306 1306 if self.dataOut.nCohInt > 1:
1307 1307 processFlags += PROCFLAG.COHERENT_INTEGRATION
1308 1308
1309 1309 if self.dataOut.type == "Spectra":
1310 1310 if self.dataOut.nIncohInt > 1:
1311 1311 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
1312 1312
1313 1313 if self.dataOut.data_dc is not None:
1314 1314 processFlags += PROCFLAG.SAVE_CHANNELS_DC
1315 1315
1316 1316 if self.dataOut.flagShiftFFT:
1317 1317 processFlags += PROCFLAG.SHIFT_FFT_DATA
1318 1318
1319 1319 return processFlags
1320 1320
1321 1321 def setBasicHeader(self):
1322 1322
1323 1323 self.basicHeaderObj.size = self.basicHeaderSize # bytes
1324 1324 self.basicHeaderObj.version = self.versionFile
1325 1325 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1326 1326 utc = numpy.floor(self.dataOut.utctime)
1327 1327 milisecond = (self.dataOut.utctime - utc) * 1000.0
1328 1328 self.basicHeaderObj.utc = utc
1329 1329 self.basicHeaderObj.miliSecond = milisecond
1330 1330 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1331 1331 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1332 1332 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1333 1333
1334 1334 def setFirstHeader(self):
1335 1335 """
1336 1336 Obtiene una copia del First Header
1337 1337
1338 1338 Affected:
1339 1339
1340 1340 self.basicHeaderObj
1341 1341 self.systemHeaderObj
1342 1342 self.radarControllerHeaderObj
1343 1343 self.processingHeaderObj self.
1344 1344
1345 1345 Return:
1346 1346 None
1347 1347 """
1348 1348
1349 1349 raise NotImplementedError
1350 1350
1351 1351 def __writeFirstHeader(self):
1352 1352 """
1353 1353 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1354 1354
1355 1355 Affected:
1356 1356 __dataType
1357 1357
1358 1358 Return:
1359 1359 None
1360 1360 """
1361 1361
1362 1362 # CALCULAR PARAMETROS
1363 1363
1364 1364 sizeLongHeader = self.systemHeaderObj.size + \
1365 1365 self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1366 1366 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1367 1367
1368 1368 self.basicHeaderObj.write(self.fp)
1369 1369 self.systemHeaderObj.write(self.fp)
1370 1370 self.radarControllerHeaderObj.write(self.fp)
1371 1371 self.processingHeaderObj.write(self.fp)
1372 1372
1373 1373 def __setNewBlock(self):
1374 1374 """
1375 1375 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1376 1376
1377 1377 Return:
1378 1378 0 : si no pudo escribir nada
1379 1379 1 : Si escribio el Basic el First Header
1380 1380 """
1381 1381 if self.fp == None:
1382 1382 self.setNextFile()
1383 1383
1384 1384 if self.flagIsNewFile:
1385 1385 return 1
1386 1386
1387 1387 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1388 1388 self.basicHeaderObj.write(self.fp)
1389 1389 return 1
1390 1390
1391 1391 if not(self.setNextFile()):
1392 1392 return 0
1393 1393
1394 1394 return 1
1395 1395
1396 1396 def writeNextBlock(self):
1397 1397 """
1398 1398 Selecciona el bloque siguiente de datos y los escribe en un file
1399 1399
1400 1400 Return:
1401 1401 0 : Si no hizo pudo escribir el bloque de datos
1402 1402 1 : Si no pudo escribir el bloque de datos
1403 1403 """
1404 1404 if not(self.__setNewBlock()):
1405 1405 return 0
1406 1406
1407 1407 self.writeBlock()
1408 1408
1409 1409 print("[Writing] Block No. %d/%d" % (self.blockIndex,
1410 1410 self.processingHeaderObj.dataBlocksPerFile))
1411 1411
1412 1412 return 1
1413 1413
1414 1414 def setNextFile(self):
1415 1415 """Determina el siguiente file que sera escrito
1416 1416
1417 1417 Affected:
1418 1418 self.filename
1419 1419 self.subfolder
1420 1420 self.fp
1421 1421 self.setFile
1422 1422 self.flagIsNewFile
1423 1423
1424 1424 Return:
1425 1425 0 : Si el archivo no puede ser escrito
1426 1426 1 : Si el archivo esta listo para ser escrito
1427 1427 """
1428 1428 ext = self.ext
1429 1429 path = self.path
1430 1430
1431 1431 if self.fp != None:
1432 1432 self.fp.close()
1433 1433
1434
1434 1435 if not os.path.exists(path):
1435 1436 os.mkdir(path)
1436 1437
1437 1438 timeTuple = time.localtime(self.dataOut.utctime)
1438 1439 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year, timeTuple.tm_yday)
1439 1440
1440 1441 fullpath = os.path.join(path, subfolder)
1441 1442 setFile = self.setFile
1442 1443
1443 1444 if not(os.path.exists(fullpath)):
1444 1445 os.mkdir(fullpath)
1445 1446 setFile = -1 # inicializo mi contador de seteo
1446 1447 else:
1447 1448 filesList = os.listdir(fullpath)
1448 1449 if len(filesList) > 0:
1449 1450 filesList = sorted(filesList, key=str.lower)
1450 1451 filen = filesList[-1]
1451 1452 # el filename debera tener el siguiente formato
1452 1453 # 0 1234 567 89A BCDE (hex)
1453 1454 # x YYYY DDD SSS .ext
1454 1455 if isNumber(filen[8:11]):
1455 1456 # inicializo mi contador de seteo al seteo del ultimo file
1456 1457 setFile = int(filen[8:11])
1457 1458 else:
1458 1459 setFile = -1
1459 1460 else:
1460 1461 setFile = -1 # inicializo mi contador de seteo
1461 1462
1462 1463 setFile += 1
1463 1464
1464 1465 # If this is a new day it resets some values
1465 1466 if self.dataOut.datatime.date() > self.fileDate:
1466 1467 setFile = 0
1467 1468 self.nTotalBlocks = 0
1468 1469
1469 1470 filen = '{}{:04d}{:03d}{:03d}{}'.format(
1470 1471 self.optchar, timeTuple.tm_year, timeTuple.tm_yday, setFile, ext)
1471 1472
1472 1473 filename = os.path.join(path, subfolder, filen)
1473 1474
1474 1475 fp = open(filename, 'wb')
1475 1476
1476 1477 self.blockIndex = 0
1477 1478 self.filename = filename
1478 1479 self.subfolder = subfolder
1479 1480 self.fp = fp
1480 1481 self.setFile = setFile
1481 1482 self.flagIsNewFile = 1
1482 1483 self.fileDate = self.dataOut.datatime.date()
1483 1484 self.setFirstHeader()
1484 1485
1485 1486 print('[Writing] Opening file: %s' % self.filename)
1486 1487
1487 1488 self.__writeFirstHeader()
1488 1489
1489 1490 return 1
1490 1491
1491 1492 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=None, ext=None, datatype=4):
1492 1493 """
1493 1494 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1494 1495
1495 1496 Inputs:
1496 1497 path : directory where data will be saved
1497 1498 profilesPerBlock : number of profiles per block
1498 1499 set : initial file set
1499 1500 datatype : An integer number that defines data type:
1500 1501 0 : int8 (1 byte)
1501 1502 1 : int16 (2 bytes)
1502 1503 2 : int32 (4 bytes)
1503 1504 3 : int64 (8 bytes)
1504 1505 4 : float32 (4 bytes)
1505 1506 5 : double64 (8 bytes)
1506 1507
1507 1508 Return:
1508 1509 0 : Si no realizo un buen seteo
1509 1510 1 : Si realizo un buen seteo
1510 1511 """
1511 1512
1512 1513 if ext == None:
1513 1514 ext = self.ext
1514 1515
1515 1516 self.ext = ext.lower()
1516 1517
1517 1518 self.path = path
1518 1519
1519 1520 if set is None:
1520 1521 self.setFile = -1
1521 1522 else:
1522 1523 self.setFile = set - 1
1523 1524
1524 1525 self.blocksPerFile = blocksPerFile
1525 1526 self.profilesPerBlock = profilesPerBlock
1526 1527 self.dataOut = dataOut
1527 1528 self.fileDate = self.dataOut.datatime.date()
1528 1529 self.dtype = self.dataOut.dtype
1529 1530
1530 1531 if datatype is not None:
1531 1532 self.dtype = get_numpy_dtype(datatype)
1532 1533
1533 1534 if not(self.setNextFile()):
1534 1535 print("[Writing] There isn't a next file")
1535 1536 return 0
1536 1537
1537 1538 self.setBlockDimension()
1538 1539
1539 1540 return 1
1540 1541
1541 1542 def run(self, dataOut, path, blocksPerFile=100, profilesPerBlock=64, set=None, ext=None, datatype=4, **kwargs):
1542 1543
1543 1544 if not(self.isConfig):
1544 1545
1545 1546 self.setup(dataOut, path, blocksPerFile, profilesPerBlock=profilesPerBlock,
1546 1547 set=set, ext=ext, datatype=datatype, **kwargs)
1547 1548 self.isConfig = True
1548 1549
1549 1550 self.dataOut = dataOut
1550 1551 self.putData()
1551 1552 return self.dataOut
1552 1553
1553 1554 @MPDecorator
1554 1555 class printInfo(Operation):
1555 1556
1556 1557 def __init__(self):
1557 1558
1558 1559 Operation.__init__(self)
1559 1560 self.__printInfo = True
1560 1561
1561 1562 def run(self, dataOut, headers = ['systemHeaderObj', 'radarControllerHeaderObj', 'processingHeaderObj']):
1562 1563 if self.__printInfo == False:
1563 1564 return
1564 1565
1565 1566 for header in headers:
1566 1567 if hasattr(dataOut, header):
1567 1568 obj = getattr(dataOut, header)
1568 1569 if hasattr(obj, 'printInfo'):
1569 1570 obj.printInfo()
1570 1571 else:
1571 1572 print(obj)
1572 1573 else:
1573 1574 log.warning('Header {} Not found in object'.format(header))
1574 1575
1575 1576 self.__printInfo = False
@@ -1,661 +1,659
1 '''
1 ''''
2 2 Created on Set 9, 2015
3 3
4 4 @author: roj-idl71 Karim Kuyeng
5 5
6 6 @update: 2021, Joab Apaza
7 7 '''
8 8
9 9 import os
10 10 import sys
11 11 import glob
12 12 import fnmatch
13 13 import datetime
14 14 import time
15 15 import re
16 16 import h5py
17 17 import numpy
18 18
19 19 try:
20 20 from gevent import sleep
21 21 except:
22 22 from time import sleep
23 23
24 24 from schainpy.model.data.jroheaderIO import RadarControllerHeader, SystemHeader
25 25 from schainpy.model.data.jrodata import Voltage
26 26 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
27 27 from numpy import imag
28 28
29 29
30 30 class AMISRReader(ProcessingUnit):
31 31 '''
32 32 classdocs
33 33 '''
34 34
35 35 def __init__(self):
36 36 '''
37 37 Constructor
38 38 '''
39 39
40 40 ProcessingUnit.__init__(self)
41 41
42 42 self.set = None
43 43 self.subset = None
44 44 self.extension_file = '.h5'
45 45 self.dtc_str = 'dtc'
46 46 self.dtc_id = 0
47 47 self.status = True
48 48 self.isConfig = False
49 49 self.dirnameList = []
50 50 self.filenameList = []
51 51 self.fileIndex = None
52 52 self.flagNoMoreFiles = False
53 53 self.flagIsNewFile = 0
54 54 self.filename = ''
55 55 self.amisrFilePointer = None
56 56 self.realBeamCode = []
57 57 self.beamCodeMap = None
58 58 self.azimuthList = []
59 59 self.elevationList = []
60 60 self.dataShape = None
61 61
62 62
63 63
64 64 self.profileIndex = 0
65 65
66 66
67 67 self.beamCodeByFrame = None
68 68 self.radacTimeByFrame = None
69 69
70 70 self.dataset = None
71 71
72 72 self.__firstFile = True
73 73
74 74 self.buffer = None
75 75
76 76 self.timezone = 'ut'
77 77
78 78 self.__waitForNewFile = 20
79 79 self.__filename_online = None
80 80 #Is really necessary create the output object in the initializer
81 81 self.dataOut = Voltage()
82 82 self.dataOut.error=False
83 83
84 84
85 85 def setup(self,path=None,
86 86 startDate=None,
87 87 endDate=None,
88 88 startTime=None,
89 89 endTime=None,
90 90 walk=True,
91 91 timezone='ut',
92 92 all=0,
93 93 code = None,
94 94 nCode = 0,
95 95 nBaud = 0,
96 96 online=False):
97 97
98 98
99 99
100 100 self.timezone = timezone
101 101 self.all = all
102 102 self.online = online
103 103
104 104 self.code = code
105 105 self.nCode = int(nCode)
106 106 self.nBaud = int(nBaud)
107 107
108 108
109 109
110 110 #self.findFiles()
111 111 if not(online):
112 112 #Busqueda de archivos offline
113 113 self.searchFilesOffLine(path, startDate, endDate, startTime, endTime, walk)
114 114 else:
115 115 self.searchFilesOnLine(path, startDate, endDate, startTime,endTime,walk)
116 116
117 117 if not(self.filenameList):
118 118 print("There is no files into the folder: %s"%(path))
119 119 sys.exit()
120 120
121 121 self.fileIndex = 0
122 122
123 123 self.readNextFile(online)
124 124
125 125 '''
126 126 Add code
127 127 '''
128 128 self.isConfig = True
129 129 # print("Setup Done")
130 130 pass
131 131
132 132
133 133 def readAMISRHeader(self,fp):
134 134
135 135 if self.isConfig and (not self.flagNoMoreFiles):
136 136 newShape = fp.get('Raw11/Data/Samples/Data').shape[1:]
137 137 if self.dataShape != newShape and newShape != None:
138 138 print("\nNEW FILE HAS A DIFFERENT SHAPE")
139 139 print(self.dataShape,newShape,"\n")
140 140 return 0
141 141 else:
142 142 self.dataShape = fp.get('Raw11/Data/Samples/Data').shape[1:]
143 143
144 144
145 145 header = 'Raw11/Data/RadacHeader'
146 146 self.beamCodeByPulse = fp.get(header+'/BeamCode') # LIST OF BEAMS PER PROFILE, TO BE USED ON REARRANGE
147 147 if (self.startDate> datetime.date(2021, 7, 15)): #Se cambió la forma de extracción de Apuntes el 17
148 148 self.beamcodeFile = fp['Setup/Beamcodefile'][()].decode()
149 149 self.trueBeams = self.beamcodeFile.split("\n")
150 150 self.trueBeams.pop()#remove last
151 151 [self.realBeamCode.append(x) for x in self.trueBeams if x not in self.realBeamCode]
152 152 self.beamCode = [int(x, 16) for x in self.realBeamCode]
153 153 else:
154 154 _beamCode= fp.get('Raw11/Data/Beamcodes') #se usa la manera previa al cambio de apuntes
155 155 self.beamCode = _beamCode[0,:]
156 156
157 157 if self.beamCodeMap == None:
158 158 self.beamCodeMap = fp['Setup/BeamcodeMap']
159 159 for beam in self.beamCode:
160 160 beamAziElev = numpy.where(self.beamCodeMap[:,0]==beam)
161 161 beamAziElev = beamAziElev[0].squeeze()
162 162 self.azimuthList.append(self.beamCodeMap[beamAziElev,1])
163 163 self.elevationList.append(self.beamCodeMap[beamAziElev,2])
164 164 #print("Beamssss: ",self.beamCodeMap[beamAziElev,1],self.beamCodeMap[beamAziElev,2])
165 165 #print(self.beamCode)
166 166 #self.code = fp.get(header+'/Code') # NOT USE FOR THIS
167 167 self.frameCount = fp.get(header+'/FrameCount')# NOT USE FOR THIS
168 168 self.modeGroup = fp.get(header+'/ModeGroup')# NOT USE FOR THIS
169 169 self.nsamplesPulse = fp.get(header+'/NSamplesPulse')# TO GET NSA OR USING DATA FOR THAT
170 170 self.pulseCount = fp.get(header+'/PulseCount')# NOT USE FOR THIS
171 171 self.radacTime = fp.get(header+'/RadacTime')# 1st TIME ON FILE ANDE CALCULATE THE REST WITH IPP*nindexprofile
172 172 self.timeCount = fp.get(header+'/TimeCount')# NOT USE FOR THIS
173 173 self.timeStatus = fp.get(header+'/TimeStatus')# NOT USE FOR THIS
174 174 self.rangeFromFile = fp.get('Raw11/Data/Samples/Range')
175 175 self.frequency = fp.get('Rx/Frequency')
176 176 txAus = fp.get('Raw11/Data/Pulsewidth')
177 177
178 178
179 179 self.nblocks = self.pulseCount.shape[0] #nblocks
180 180
181 181 self.nprofiles = self.pulseCount.shape[1] #nprofile
182 182 self.nsa = self.nsamplesPulse[0,0] #ngates
183 183 self.nchannels = len(self.beamCode)
184 184 self.ippSeconds = (self.radacTime[0][1] -self.radacTime[0][0]) #Ipp in seconds
185 185 #self.__waitForNewFile = self.nblocks # wait depending on the number of blocks since each block is 1 sec
186 186 self.__waitForNewFile = self.nblocks * self.nprofiles * self.ippSeconds # wait until new file is created
187 187
188 188 #filling radar controller header parameters
189 189 self.__ippKm = self.ippSeconds *.15*1e6 # in km
190 190 self.__txA = (txAus.value)*.15 #(ipp[us]*.15km/1us) in km
191 191 self.__txB = 0
192 192 nWindows=1
193 193 self.__nSamples = self.nsa
194 194 self.__firstHeight = self.rangeFromFile[0][0]/1000 #in km
195 195 self.__deltaHeight = (self.rangeFromFile[0][1] - self.rangeFromFile[0][0])/1000
196 196
197 197 #for now until understand why the code saved is different (code included even though code not in tuf file)
198 198 #self.__codeType = 0
199 199 # self.__nCode = None
200 200 # self.__nBaud = None
201 201 self.__code = self.code
202 202 self.__codeType = 0
203 203 if self.code != None:
204 204 self.__codeType = 1
205 205 self.__nCode = self.nCode
206 206 self.__nBaud = self.nBaud
207 207 #self.__code = 0
208 208
209 209 #filling system header parameters
210 210 self.__nSamples = self.nsa
211 211 self.newProfiles = self.nprofiles/self.nchannels
212 212 self.__channelList = list(range(self.nchannels))
213 213
214 214 self.__frequency = self.frequency[0][0]
215 215
216 216
217 217 return 1
218 218
219 219
220 220 def createBuffers(self):
221 221
222 222 pass
223 223
224 224 def __setParameters(self,path='', startDate='',endDate='',startTime='', endTime='', walk=''):
225 225 self.path = path
226 226 self.startDate = startDate
227 227 self.endDate = endDate
228 228 self.startTime = startTime
229 229 self.endTime = endTime
230 230 self.walk = walk
231 231
232 232 def __checkPath(self):
233 233 if os.path.exists(self.path):
234 234 self.status = 1
235 235 else:
236 236 self.status = 0
237 237 print('Path:%s does not exists'%self.path)
238 238
239 239 return
240 240
241 241
242 242 def __selDates(self, amisr_dirname_format):
243 243 try:
244 244 year = int(amisr_dirname_format[0:4])
245 245 month = int(amisr_dirname_format[4:6])
246 246 dom = int(amisr_dirname_format[6:8])
247 247 thisDate = datetime.date(year,month,dom)
248
249 if (thisDate>=self.startDate and thisDate <= self.endDate):
248 #margen de un día extra, igual luego se filtra for fecha y hora
249 if (thisDate>=(self.startDate - datetime.timedelta(days=1)) and thisDate <= (self.endDate)+ datetime.timedelta(days=1)):
250 250 return amisr_dirname_format
251 251 except:
252 252 return None
253 253
254 254
255 255 def __findDataForDates(self,online=False):
256 256
257 257 if not(self.status):
258 258 return None
259 259
260 260 pat = '\d+.\d+'
261 261 dirnameList = [re.search(pat,x) for x in os.listdir(self.path)]
262 262 dirnameList = [x for x in dirnameList if x!=None]
263 263 dirnameList = [x.string for x in dirnameList]
264 264 if not(online):
265 265 dirnameList = [self.__selDates(x) for x in dirnameList]
266 266 dirnameList = [x for x in dirnameList if x!=None]
267 267 if len(dirnameList)>0:
268 268 self.status = 1
269 269 self.dirnameList = dirnameList
270 270 self.dirnameList.sort()
271 271 else:
272 272 self.status = 0
273 273 return None
274 274
275 275 def __getTimeFromData(self):
276 276 startDateTime_Reader = datetime.datetime.combine(self.startDate,self.startTime)
277 277 endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
278 278
279 279 print('Filtering Files from %s to %s'%(startDateTime_Reader, endDateTime_Reader))
280 280 print('........................................')
281 281 filter_filenameList = []
282 282 self.filenameList.sort()
283 283 #for i in range(len(self.filenameList)-1):
284 284 for i in range(len(self.filenameList)):
285 285 filename = self.filenameList[i]
286 286 fp = h5py.File(filename,'r')
287 287 time_str = fp.get('Time/RadacTimeString')
288 288
289 289 startDateTimeStr_File = time_str[0][0].decode('UTF-8').split('.')[0]
290 290 #startDateTimeStr_File = "2019-12-16 09:21:11"
291 291 junk = time.strptime(startDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
292 292 startDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
293 293
294 294 #endDateTimeStr_File = "2019-12-16 11:10:11"
295 295 endDateTimeStr_File = time_str[-1][-1].decode('UTF-8').split('.')[0]
296 296 junk = time.strptime(endDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
297 297 endDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
298 298
299 299 fp.close()
300 300
301 301 #print("check time", startDateTime_File)
302 302 if self.timezone == 'lt':
303 303 startDateTime_File = startDateTime_File - datetime.timedelta(minutes = 300)
304 304 endDateTime_File = endDateTime_File - datetime.timedelta(minutes = 300)
305 if (endDateTime_File>=startDateTime_Reader and endDateTime_File<=endDateTime_Reader):
305 if (startDateTime_File >=startDateTime_Reader and endDateTime_File<=endDateTime_Reader):
306 306 filter_filenameList.append(filename)
307 307
308 if (endDateTime_File>endDateTime_Reader):
308 if (startDateTime_File>endDateTime_Reader):
309 309 break
310 310
311 311
312 312 filter_filenameList.sort()
313 313 self.filenameList = filter_filenameList
314
314 315 return 1
315 316
316 317 def __filterByGlob1(self, dirName):
317 318 filter_files = glob.glob1(dirName, '*.*%s'%self.extension_file)
318 319 filter_files.sort()
319 320 filterDict = {}
320 321 filterDict.setdefault(dirName)
321 322 filterDict[dirName] = filter_files
322 323 return filterDict
323 324
324 325 def __getFilenameList(self, fileListInKeys, dirList):
325 326 for value in fileListInKeys:
326 327 dirName = list(value.keys())[0]
327 328 for file in value[dirName]:
328 329 filename = os.path.join(dirName, file)
329 330 self.filenameList.append(filename)
330 331
331 332
332 333 def __selectDataForTimes(self, online=False):
333 334 #aun no esta implementado el filtro for tiempo
334 335 if not(self.status):
335 336 return None
336 337
337 338 dirList = [os.path.join(self.path,x) for x in self.dirnameList]
338
339 339 fileListInKeys = [self.__filterByGlob1(x) for x in dirList]
340
341 340 self.__getFilenameList(fileListInKeys, dirList)
342 341 if not(online):
343 342 #filtro por tiempo
344 343 if not(self.all):
345 344 self.__getTimeFromData()
346 345
347 346 if len(self.filenameList)>0:
348 347 self.status = 1
349 348 self.filenameList.sort()
350 349 else:
351 350 self.status = 0
352 351 return None
353 352
354 353 else:
355 354 #get the last file - 1
356 355 self.filenameList = [self.filenameList[-2]]
357 356 new_dirnameList = []
358 357 for dirname in self.dirnameList:
359 358 junk = numpy.array([dirname in x for x in self.filenameList])
360 359 junk_sum = junk.sum()
361 360 if junk_sum > 0:
362 361 new_dirnameList.append(dirname)
363 362 self.dirnameList = new_dirnameList
364 363 return 1
365 364
366 365 def searchFilesOnLine(self, path, startDate, endDate, startTime=datetime.time(0,0,0),
367 366 endTime=datetime.time(23,59,59),walk=True):
368 367
369 368 if endDate ==None:
370 369 startDate = datetime.datetime.utcnow().date()
371 370 endDate = datetime.datetime.utcnow().date()
372 371
373 372 self.__setParameters(path=path, startDate=startDate, endDate=endDate,startTime = startTime,endTime=endTime, walk=walk)
374 373
375 374 self.__checkPath()
376 375
377 376 self.__findDataForDates(online=True)
378 377
379 378 self.dirnameList = [self.dirnameList[-1]]
380 379
381 380 self.__selectDataForTimes(online=True)
382 381
383 382 return
384 383
385 384
386 385 def searchFilesOffLine(self,
387 386 path,
388 387 startDate,
389 388 endDate,
390 389 startTime=datetime.time(0,0,0),
391 390 endTime=datetime.time(23,59,59),
392 391 walk=True):
393 392
394 393 self.__setParameters(path, startDate, endDate, startTime, endTime, walk)
395 394
396 395 self.__checkPath()
397 396
398 397 self.__findDataForDates()
399 398
400 399 self.__selectDataForTimes()
401 400
402 401 for i in range(len(self.filenameList)):
403 402 print("%s" %(self.filenameList[i]))
404 403
405 404 return
406 405
407 406 def __setNextFileOffline(self):
408 407
409 408 try:
410 409 self.filename = self.filenameList[self.fileIndex]
411 410 self.amisrFilePointer = h5py.File(self.filename,'r')
412 411 self.fileIndex += 1
413 412 except:
414 413 self.flagNoMoreFiles = 1
415 414 print("No more Files")
416 415 return 0
417 416
418 417 self.flagIsNewFile = 1
419 418 print("Setting the file: %s"%self.filename)
420 419
421 420 return 1
422 421
423 422
424 423 def __setNextFileOnline(self):
425 424 filename = self.filenameList[0]
426 425 if self.__filename_online != None:
427 426 self.__selectDataForTimes(online=True)
428 427 filename = self.filenameList[0]
429 428 wait = 0
430 429 self.__waitForNewFile=300 ## DEBUG:
431 430 while self.__filename_online == filename:
432 431 print('waiting %d seconds to get a new file...'%(self.__waitForNewFile))
433 432 if wait == 5:
434 433 self.flagNoMoreFiles = 1
435 434 return 0
436 435 sleep(self.__waitForNewFile)
437 436 self.__selectDataForTimes(online=True)
438 437 filename = self.filenameList[0]
439 438 wait += 1
440 439
441 440 self.__filename_online = filename
442 441
443 442 self.amisrFilePointer = h5py.File(filename,'r')
444 443 self.flagIsNewFile = 1
445 444 self.filename = filename
446 445 print("Setting the file: %s"%self.filename)
447 446 return 1
448 447
449 448
450 449 def readData(self):
451 450 buffer = self.amisrFilePointer.get('Raw11/Data/Samples/Data')
452 451 re = buffer[:,:,:,0]
453 452 im = buffer[:,:,:,1]
454 453 dataset = re + im*1j
455 454
456 455 self.radacTime = self.amisrFilePointer.get('Raw11/Data/RadacHeader/RadacTime')
457 456 timeset = self.radacTime[:,0]
458 457
459 458 return dataset,timeset
460 459
461 460 def reshapeData(self):
462 461 #self.beamCodeByPulse, self.beamCode, self.nblocks, self.nprofiles, self.nsa,
463 462 channels = self.beamCodeByPulse[0,:]
464 463 nchan = self.nchannels
465 464 #self.newProfiles = self.nprofiles/nchan #must be defined on filljroheader
466 465 nblocks = self.nblocks
467 466 nsamples = self.nsa
468 467
469 468 #Dimensions : nChannels, nProfiles, nSamples
470 469 new_block = numpy.empty((nblocks, nchan, numpy.int_(self.newProfiles), nsamples), dtype="complex64")
471 470 ############################################
472 471
473 472 for thisChannel in range(nchan):
474 473 new_block[:,thisChannel,:,:] = self.dataset[:,numpy.where(channels==self.beamCode[thisChannel])[0],:]
475 474
476 475
477 476 new_block = numpy.transpose(new_block, (1,0,2,3))
478 477 new_block = numpy.reshape(new_block, (nchan,-1, nsamples))
479 478
480 479 return new_block
481 480
482 481 def updateIndexes(self):
483 482
484 483 pass
485 484
486 485 def fillJROHeader(self):
487 486
488 487 #fill radar controller header
489 488 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(ipp=self.__ippKm,
490 489 txA=self.__txA,
491 490 txB=0,
492 491 nWindows=1,
493 492 nHeights=self.__nSamples,
494 493 firstHeight=self.__firstHeight,
495 494 deltaHeight=self.__deltaHeight,
496 495 codeType=self.__codeType,
497 496 nCode=self.__nCode, nBaud=self.__nBaud,
498 497 code = self.__code,
499 498 fClock=1)
500 499
501 500 #fill system header
502 501 self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples,
503 502 nProfiles=self.newProfiles,
504 503 nChannels=len(self.__channelList),
505 504 adcResolution=14,
506 505 pciDioBusWidth=32)
507 506
508 507 self.dataOut.type = "Voltage"
509 508 self.dataOut.data = None
510 509 self.dataOut.dtype = numpy.dtype([('real','<i8'),('imag','<i8')])
511 510 # self.dataOut.nChannels = 0
512 511
513 512 # self.dataOut.nHeights = 0
514 513
515 514 self.dataOut.nProfiles = self.newProfiles*self.nblocks
516 515 #self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype = numpy.float)*self.__deltaHeigth
517 516 ranges = numpy.reshape(self.rangeFromFile.value,(-1))
518 517 self.dataOut.heightList = ranges/1000.0 #km
519 518 self.dataOut.channelList = self.__channelList
520 519 self.dataOut.blocksize = self.dataOut.nChannels * self.dataOut.nHeights
521 520
522 521 # self.dataOut.channelIndexList = None
523 522
524 523
525 524 self.dataOut.azimuthList = numpy.array(self.azimuthList)
526 525 self.dataOut.elevationList = numpy.array(self.elevationList)
527 526 self.dataOut.codeList = numpy.array(self.beamCode)
528 527 #print(self.dataOut.elevationList)
529 528 self.dataOut.flagNoData = True
530 529
531 530 #Set to TRUE if the data is discontinuous
532 531 self.dataOut.flagDiscontinuousBlock = False
533 532
534 533 self.dataOut.utctime = None
535 534
536 535 #self.dataOut.timeZone = -5 #self.__timezone/60 #timezone like jroheader, difference in minutes between UTC and localtime
537 536 if self.timezone == 'lt':
538 537 self.dataOut.timeZone = time.timezone / 60. #get the timezone in minutes
539 538 else:
540 539 self.dataOut.timeZone = 0 #by default time is UTC
541 540
542 541 self.dataOut.dstFlag = 0
543 542 self.dataOut.errorCount = 0
544 543 self.dataOut.nCohInt = 1
545 544 self.dataOut.flagDecodeData = False #asumo que la data esta decodificada
546 545 self.dataOut.flagDeflipData = False #asumo que la data esta sin flip
547 546 self.dataOut.flagShiftFFT = False
548 547 self.dataOut.ippSeconds = self.ippSeconds
549 548
550 549 #Time interval between profiles
551 550 #self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
552 551
553 552 self.dataOut.frequency = self.__frequency
554 553 self.dataOut.realtime = self.online
555 554 pass
556 555
557 556 def readNextFile(self,online=False):
558 557
559 558 if not(online):
560 559 newFile = self.__setNextFileOffline()
561 560 else:
562 561 newFile = self.__setNextFileOnline()
563 562
564 563 if not(newFile):
565 564 self.dataOut.error = True
566 565 return 0
567 566
568 567 if not self.readAMISRHeader(self.amisrFilePointer):
569 568 self.dataOut.error = True
570 569 return 0
571 570
572 571 self.createBuffers()
573 572 self.fillJROHeader()
574 573
575 574 #self.__firstFile = False
576 575
577 576
578 577
579 578 self.dataset,self.timeset = self.readData()
580 579
581 580 if self.endDate!=None:
582 581 endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
583 582 time_str = self.amisrFilePointer.get('Time/RadacTimeString')
584 583 startDateTimeStr_File = time_str[0][0].decode('UTF-8').split('.')[0]
585 584 junk = time.strptime(startDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
586 585 startDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
587 586 if self.timezone == 'lt':
588 587 startDateTime_File = startDateTime_File - datetime.timedelta(minutes = 300)
589 588 if (startDateTime_File>endDateTime_Reader):
590 589 return 0
591 590
592 591 self.jrodataset = self.reshapeData()
593 592 #----self.updateIndexes()
594 593 self.profileIndex = 0
595 594
596 595 return 1
597 596
598 597
599 598 def __hasNotDataInBuffer(self):
600 599 if self.profileIndex >= (self.newProfiles*self.nblocks):
601 600 return 1
602 601 return 0
603 602
604 603
605 604 def getData(self):
606 605
607 606 if self.flagNoMoreFiles:
608 607 self.dataOut.flagNoData = True
609 608 return 0
610 609
611 610 if self.__hasNotDataInBuffer():
612 611 if not (self.readNextFile(self.online)):
613 612 return 0
614 613
615 614
616 615 if self.dataset is None: # setear esta condicion cuando no hayan datos por leer
617 616 self.dataOut.flagNoData = True
618 617 return 0
619 618
620 619 #self.dataOut.data = numpy.reshape(self.jrodataset[self.profileIndex,:],(1,-1))
621 620
622 621 self.dataOut.data = self.jrodataset[:,self.profileIndex,:]
623 622
624 623 #print("R_t",self.timeset)
625 624
626 625 #self.dataOut.utctime = self.jrotimeset[self.profileIndex]
627 626 #verificar basic header de jro data y ver si es compatible con este valor
628 627 #self.dataOut.utctime = self.timeset + (self.profileIndex * self.ippSeconds * self.nchannels)
629 628 indexprof = numpy.mod(self.profileIndex, self.newProfiles)
630 629 indexblock = self.profileIndex/self.newProfiles
631 630 #print (indexblock, indexprof)
632 diffUTC = 1.8e4 #UTC diference from peru in seconds --Joab
633 631 diffUTC = 0
634 632 t_comp = (indexprof * self.ippSeconds * self.nchannels) + diffUTC #
635 633
636 634 #print("utc :",indexblock," __ ",t_comp)
637 635 #print(numpy.shape(self.timeset))
638 636 self.dataOut.utctime = self.timeset[numpy.int_(indexblock)] + t_comp
639 637 #self.dataOut.utctime = self.timeset[self.profileIndex] + t_comp
640 #print(self.dataOut.utctime)
638
641 639 self.dataOut.profileIndex = self.profileIndex
642 640 #print("N profile:",self.profileIndex,self.newProfiles,self.nblocks,self.dataOut.utctime)
643 641 self.dataOut.flagNoData = False
644 # if indexprof == 0:
645 # print self.dataOut.utctime
642 # if indexprof == 0:
643 # print("kamisr: ",self.dataOut.utctime)
646 644
647 645 self.profileIndex += 1
648 646
649 647 return self.dataOut.data
650 648
651 649
652 650 def run(self, **kwargs):
653 651 '''
654 652 This method will be called many times so here you should put all your code
655 653 '''
656 654 #print("running kamisr")
657 655 if not self.isConfig:
658 656 self.setup(**kwargs)
659 657 self.isConfig = True
660 658
661 659 self.getData()
@@ -1,651 +1,651
1 1 import os
2 2 import time
3 3 import datetime
4 4
5 5 import numpy
6 6 import h5py
7 7
8 8 import schainpy.admin
9 9 from schainpy.model.data.jrodata import *
10 10 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
11 11 from schainpy.model.io.jroIO_base import *
12 12 from schainpy.utils import log
13 13
14 14
15 15 class HDFReader(Reader, ProcessingUnit):
16 16 """Processing unit to read HDF5 format files
17 17
18 18 This unit reads HDF5 files created with `HDFWriter` operation contains
19 19 by default two groups Data and Metadata all variables would be saved as `dataOut`
20 20 attributes.
21 21 It is possible to read any HDF5 file by given the structure in the `description`
22 22 parameter, also you can add extra values to metadata with the parameter `extras`.
23 23
24 24 Parameters:
25 25 -----------
26 26 path : str
27 27 Path where files are located.
28 28 startDate : date
29 29 Start date of the files
30 30 endDate : list
31 31 End date of the files
32 32 startTime : time
33 33 Start time of the files
34 34 endTime : time
35 35 End time of the files
36 36 description : dict, optional
37 37 Dictionary with the description of the HDF5 file
38 38 extras : dict, optional
39 39 Dictionary with extra metadata to be be added to `dataOut`
40 40
41 41 Examples
42 42 --------
43 43
44 44 desc = {
45 45 'Data': {
46 46 'data_output': ['u', 'v', 'w'],
47 47 'utctime': 'timestamps',
48 48 } ,
49 49 'Metadata': {
50 50 'heightList': 'heights'
51 51 }
52 52 }
53 53
54 54 desc = {
55 55 'Data': {
56 56 'data_output': 'winds',
57 57 'utctime': 'timestamps'
58 58 },
59 59 'Metadata': {
60 60 'heightList': 'heights'
61 61 }
62 62 }
63 63
64 64 extras = {
65 65 'timeZone': 300
66 66 }
67 67
68 68 reader = project.addReadUnit(
69 69 name='HDFReader',
70 70 path='/path/to/files',
71 71 startDate='2019/01/01',
72 72 endDate='2019/01/31',
73 73 startTime='00:00:00',
74 74 endTime='23:59:59',
75 75 # description=json.dumps(desc),
76 76 # extras=json.dumps(extras),
77 77 )
78 78
79 79 """
80 80
81 81 __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'description', 'extras']
82 82
83 83 def __init__(self):
84 84 ProcessingUnit.__init__(self)
85 85 self.dataOut = Parameters()
86 86 self.ext = ".hdf5"
87 87 self.optchar = "D"
88 88 self.meta = {}
89 89 self.data = {}
90 90 self.open_file = h5py.File
91 91 self.open_mode = 'r'
92 92 self.description = {}
93 93 self.extras = {}
94 94 self.filefmt = "*%Y%j***"
95 95 self.folderfmt = "*%Y%j"
96 96 self.utcoffset = 0
97 97
98 98 def setup(self, **kwargs):
99 99
100 100 self.set_kwargs(**kwargs)
101 101 if not self.ext.startswith('.'):
102 102 self.ext = '.{}'.format(self.ext)
103 103
104 104 if self.online:
105 105 log.log("Searching files in online mode...", self.name)
106 106
107 107 for nTries in range(self.nTries):
108 108 fullpath = self.searchFilesOnLine(self.path, self.startDate,
109 109 self.endDate, self.expLabel, self.ext, self.walk,
110 110 self.filefmt, self.folderfmt)
111 111 pathname, filename = os.path.split(fullpath)
112 print(pathname,filename)
112 #print(pathname,filename)
113 113 try:
114 114 fullpath = next(fullpath)
115 115
116 116 except:
117 117 fullpath = None
118 118
119 119 if fullpath:
120 120 break
121 121
122 122 log.warning(
123 123 'Waiting {} sec for a valid file in {}: try {} ...'.format(
124 124 self.delay, self.path, nTries + 1),
125 125 self.name)
126 126 time.sleep(self.delay)
127 127
128 128 if not(fullpath):
129 129 raise schainpy.admin.SchainError(
130 130 'There isn\'t any valid file in {}'.format(self.path))
131 131
132 132 pathname, filename = os.path.split(fullpath)
133 133 self.year = int(filename[1:5])
134 134 self.doy = int(filename[5:8])
135 135 self.set = int(filename[8:11]) - 1
136 136 else:
137 137 log.log("Searching files in {}".format(self.path), self.name)
138 138 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
139 139 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
140 140
141 141 self.setNextFile()
142 142
143 143 return
144 144
145 145
146 146 def readFirstHeader(self):
147 147 '''Read metadata and data'''
148 148
149 149 self.__readMetadata()
150 150 self.__readData()
151 151 self.__setBlockList()
152 152
153 153 if 'type' in self.meta:
154 154 self.dataOut = eval(self.meta['type'])()
155 155
156 156 for attr in self.meta:
157 157 print("attr: ", attr)
158 158 setattr(self.dataOut, attr, self.meta[attr])
159 159
160 160
161 161 self.blockIndex = 0
162 162
163 163 return
164 164
165 165 def __setBlockList(self):
166 166 '''
167 167 Selects the data within the times defined
168 168
169 169 self.fp
170 170 self.startTime
171 171 self.endTime
172 172 self.blockList
173 173 self.blocksPerFile
174 174
175 175 '''
176 176
177 177 startTime = self.startTime
178 178 endTime = self.endTime
179 179 thisUtcTime = self.data['utctime'] + self.utcoffset
180 180 self.interval = numpy.min(thisUtcTime[1:] - thisUtcTime[:-1])
181 181 thisDatetime = datetime.datetime.utcfromtimestamp(thisUtcTime[0])
182 182 self.startFileDatetime = thisDatetime
183 print("datee ",self.startFileDatetime)
184 183 thisDate = thisDatetime.date()
185 184 thisTime = thisDatetime.time()
186 185
187 186 startUtcTime = (datetime.datetime.combine(thisDate, startTime) - datetime.datetime(1970, 1, 1)).total_seconds()
188 187 endUtcTime = (datetime.datetime.combine(thisDate, endTime) - datetime.datetime(1970, 1, 1)).total_seconds()
189 188
190 189 ind = numpy.where(numpy.logical_and(thisUtcTime >= startUtcTime, thisUtcTime < endUtcTime))[0]
191 190
192 191 self.blockList = ind
193 192 self.blocksPerFile = len(ind)
194 193 self.blocksPerFile = len(thisUtcTime)
195 194 return
196 195
197 196 def __readMetadata(self):
198 197 '''
199 198 Reads Metadata
200 199 '''
201 200
202 201 meta = {}
203 202
204 203 if self.description:
205 204 for key, value in self.description['Metadata'].items():
206 205 meta[key] = self.fp[value][()]
207 206 else:
208 207 grp = self.fp['Metadata']
209 208 for name in grp:
210 209 meta[name] = grp[name][()]
211 210
212 211 if self.extras:
213 212 for key, value in self.extras.items():
214 213 meta[key] = value
215 214 self.meta = meta
216 215
217 216 return
218 217
219 218
220 219
221 220 def checkForRealPath(self, nextFile, nextDay):
222 221
223 222 # print("check FRP")
224 223 # dt = self.startFileDatetime + datetime.timedelta(1)
225 224 # filename = '{}.{}{}'.format(self.path, dt.strftime('%Y%m%d'), self.ext)
226 225 # fullfilename = os.path.join(self.path, filename)
227 226 # print("check Path ",fullfilename,filename)
228 227 # if os.path.exists(fullfilename):
229 228 # return fullfilename, filename
230 229 # return None, filename
231 230 return None,None
232 231
233 232 def __readData(self):
234 233
235 234 data = {}
236 235
237 236 if self.description:
238 237 for key, value in self.description['Data'].items():
239 238 if isinstance(value, str):
240 239 if isinstance(self.fp[value], h5py.Dataset):
241 240 data[key] = self.fp[value][()]
242 241 elif isinstance(self.fp[value], h5py.Group):
243 242 array = []
244 243 for ch in self.fp[value]:
245 244 array.append(self.fp[value][ch][()])
246 245 data[key] = numpy.array(array)
247 246 elif isinstance(value, list):
248 247 array = []
249 248 for ch in value:
250 249 array.append(self.fp[ch][()])
251 250 data[key] = numpy.array(array)
252 251 else:
253 252 grp = self.fp['Data']
254 253 for name in grp:
255 254 if isinstance(grp[name], h5py.Dataset):
256 255 array = grp[name][()]
257 256 elif isinstance(grp[name], h5py.Group):
258 257 array = []
259 258 for ch in grp[name]:
260 259 array.append(grp[name][ch][()])
261 260 array = numpy.array(array)
262 261 else:
263 262 log.warning('Unknown type: {}'.format(name))
264 263
265 264 if name in self.description:
266 265 key = self.description[name]
267 266 else:
268 267 key = name
269 268 data[key] = array
270 269
271 270 self.data = data
272 271 return
273 272
274 273 def getData(self):
275 274 if not self.isDateTimeInRange(self.startFileDatetime, self.startDate, self.endDate, self.startTime, self.endTime):
276 275 self.dataOut.flagNoData = True
277 276 self.dataOut.error = True
278 277 return
279 278 for attr in self.data:
280 279 if self.data[attr].ndim == 1:
281 280 setattr(self.dataOut, attr, self.data[attr][self.blockIndex])
282 281 else:
283 282 setattr(self.dataOut, attr, self.data[attr][:, self.blockIndex])
284 283
285 284 self.dataOut.flagNoData = False
286 285 self.blockIndex += 1
287 286
288 287 log.log("Block No. {}/{} -> {}".format(
289 288 self.blockIndex,
290 289 self.blocksPerFile,
291 290 self.dataOut.datatime.ctime()), self.name)
292 291
293 292 return
294 293
295 294 def run(self, **kwargs):
296 295
297 296 if not(self.isConfig):
298 297 self.setup(**kwargs)
299 298 self.isConfig = True
300 299
301 300 if self.blockIndex == self.blocksPerFile:
302 301 self.setNextFile()
303 302
304 303 self.getData()
305 304
306 305 return
307 306
308 307 @MPDecorator
309 308 class HDFWriter(Operation):
310 309 """Operation to write HDF5 files.
311 310
312 311 The HDF5 file contains by default two groups Data and Metadata where
313 312 you can save any `dataOut` attribute specified by `dataList` and `metadataList`
314 313 parameters, data attributes are normaly time dependent where the metadata
315 314 are not.
316 315 It is possible to customize the structure of the HDF5 file with the
317 316 optional description parameter see the examples.
318 317
319 318 Parameters:
320 319 -----------
321 320 path : str
322 321 Path where files will be saved.
323 322 blocksPerFile : int
324 323 Number of blocks per file
325 324 metadataList : list
326 325 List of the dataOut attributes that will be saved as metadata
327 326 dataList : int
328 327 List of the dataOut attributes that will be saved as data
329 328 setType : bool
330 329 If True the name of the files corresponds to the timestamp of the data
331 330 description : dict, optional
332 331 Dictionary with the desired description of the HDF5 file
333 332
334 333 Examples
335 334 --------
336 335
337 336 desc = {
338 337 'data_output': {'winds': ['z', 'w', 'v']},
339 338 'utctime': 'timestamps',
340 339 'heightList': 'heights'
341 340 }
342 341 desc = {
343 342 'data_output': ['z', 'w', 'v'],
344 343 'utctime': 'timestamps',
345 344 'heightList': 'heights'
346 345 }
347 346 desc = {
348 347 'Data': {
349 348 'data_output': 'winds',
350 349 'utctime': 'timestamps'
351 350 },
352 351 'Metadata': {
353 352 'heightList': 'heights'
354 353 }
355 354 }
356 355
357 356 writer = proc_unit.addOperation(name='HDFWriter')
358 357 writer.addParameter(name='path', value='/path/to/file')
359 358 writer.addParameter(name='blocksPerFile', value='32')
360 359 writer.addParameter(name='metadataList', value='heightList,timeZone')
361 360 writer.addParameter(name='dataList',value='data_output,utctime')
362 361 # writer.addParameter(name='description',value=json.dumps(desc))
363 362
364 363 """
365 364
366 365 ext = ".hdf5"
367 366 optchar = "D"
368 367 filename = None
369 368 path = None
370 369 setFile = None
371 370 fp = None
372 371 firsttime = True
373 372 #Configurations
374 373 blocksPerFile = None
375 374 blockIndex = None
376 375 dataOut = None
377 376 #Data Arrays
378 377 dataList = None
379 378 metadataList = None
380 379 currentDay = None
381 380 lastTime = None
382 381
383 382 def __init__(self):
384 383
385 384 Operation.__init__(self)
386 385 return
387 386
388 387 def setup(self, path=None, blocksPerFile=10, metadataList=None, dataList=None, setType=None, description=None):
389 388 self.path = path
390 389 self.blocksPerFile = blocksPerFile
391 390 self.metadataList = metadataList
392 391 self.dataList = [s.strip() for s in dataList]
393 392 self.setType = setType
394 393 self.description = description
395 394
396 395 if self.metadataList is None:
397 396 self.metadataList = self.dataOut.metadata_list
398 397
399 398 tableList = []
400 399 dsList = []
401 400
402 401 for i in range(len(self.dataList)):
403 402 dsDict = {}
404 403 if hasattr(self.dataOut, self.dataList[i]):
405 404 dataAux = getattr(self.dataOut, self.dataList[i])
406 405 dsDict['variable'] = self.dataList[i]
407 406 else:
408 407 log.warning('Attribute {} not found in dataOut', self.name)
409 408 continue
410 409
411 410 if dataAux is None:
412 411 continue
413 412 elif isinstance(dataAux, (int, float, numpy.integer, numpy.float)):
414 413 dsDict['nDim'] = 0
415 414 else:
416 415 dsDict['nDim'] = len(dataAux.shape)
417 416 dsDict['shape'] = dataAux.shape
418 417 dsDict['dsNumber'] = dataAux.shape[0]
419 418 dsDict['dtype'] = dataAux.dtype
420 419
421 420 dsList.append(dsDict)
422 421
423 422 self.dsList = dsList
424 423 self.currentDay = self.dataOut.datatime.date()
425 424
426 425 def timeFlag(self):
427 426 currentTime = self.dataOut.utctime
428 427 timeTuple = time.localtime(currentTime)
429 428 dataDay = timeTuple.tm_yday
430
429 #print("time UTC: ",currentTime, self.dataOut.datatime)
431 430 if self.lastTime is None:
432 431 self.lastTime = currentTime
433 432 self.currentDay = dataDay
434 433 return False
435 434
436 435 timeDiff = currentTime - self.lastTime
437 436
438 437 #Si el dia es diferente o si la diferencia entre un dato y otro supera la hora
439 438 if dataDay != self.currentDay:
440 439 self.currentDay = dataDay
441 440 return True
442 441 elif timeDiff > 3*60*60:
443 442 self.lastTime = currentTime
444 443 return True
445 444 else:
446 445 self.lastTime = currentTime
447 446 return False
448 447
449 448 def run(self, dataOut, path, blocksPerFile=10, metadataList=None,
450 449 dataList=[], setType=None, description={}):
451 450
452 451 self.dataOut = dataOut
453 452 if not(self.isConfig):
454 453 self.setup(path=path, blocksPerFile=blocksPerFile,
455 454 metadataList=metadataList, dataList=dataList,
456 455 setType=setType, description=description)
457 456
458 457 self.isConfig = True
459 458 self.setNextFile()
460 459
461 460 self.putData()
462 461 return
463 462
464 463 def setNextFile(self):
465 464
466 465 ext = self.ext
467 466 path = self.path
468 467 setFile = self.setFile
469 468
470 timeTuple = time.localtime(self.dataOut.utctime)
469 timeTuple = time.gmtime(self.dataOut.utctime)
470 #print("path: ",timeTuple)
471 471 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
472 472 fullpath = os.path.join(path, subfolder)
473 473
474 474 if os.path.exists(fullpath):
475 475 filesList = os.listdir(fullpath)
476 476 filesList = [k for k in filesList if k.startswith(self.optchar)]
477 477 if len( filesList ) > 0:
478 478 filesList = sorted(filesList, key=str.lower)
479 479 filen = filesList[-1]
480 480 # el filename debera tener el siguiente formato
481 481 # 0 1234 567 89A BCDE (hex)
482 482 # x YYYY DDD SSS .ext
483 483 if isNumber(filen[8:11]):
484 484 setFile = int(filen[8:11]) #inicializo mi contador de seteo al seteo del ultimo file
485 485 else:
486 486 setFile = -1
487 487 else:
488 488 setFile = -1 #inicializo mi contador de seteo
489 489 else:
490 490 os.makedirs(fullpath)
491 491 setFile = -1 #inicializo mi contador de seteo
492 492
493 493 if self.setType is None:
494 494 setFile += 1
495 495 file = '%s%4.4d%3.3d%03d%s' % (self.optchar,
496 496 timeTuple.tm_year,
497 497 timeTuple.tm_yday,
498 498 setFile,
499 499 ext )
500 500 else:
501 501 setFile = timeTuple.tm_hour*60+timeTuple.tm_min
502 502 file = '%s%4.4d%3.3d%04d%s' % (self.optchar,
503 503 timeTuple.tm_year,
504 504 timeTuple.tm_yday,
505 505 setFile,
506 506 ext )
507 507
508 508 self.filename = os.path.join( path, subfolder, file )
509 509
510 510 #Setting HDF5 File
511 511 self.fp = h5py.File(self.filename, 'w')
512 512 #write metadata
513 513 self.writeMetadata(self.fp)
514 514 #Write data
515 515 self.writeData(self.fp)
516 516
517 517 def getLabel(self, name, x=None):
518 518
519 519 if x is None:
520 520 if 'Data' in self.description:
521 521 data = self.description['Data']
522 522 if 'Metadata' in self.description:
523 523 data.update(self.description['Metadata'])
524 524 else:
525 525 data = self.description
526 526 if name in data:
527 527 if isinstance(data[name], str):
528 528 return data[name]
529 529 elif isinstance(data[name], list):
530 530 return None
531 531 elif isinstance(data[name], dict):
532 532 for key, value in data[name].items():
533 533 return key
534 534 return name
535 535 else:
536 536 if 'Metadata' in self.description:
537 537 meta = self.description['Metadata']
538 538 else:
539 539 meta = self.description
540 540 if name in meta:
541 541 if isinstance(meta[name], list):
542 542 return meta[name][x]
543 543 elif isinstance(meta[name], dict):
544 544 for key, value in meta[name].items():
545 545 return value[x]
546 546 if 'cspc' in name:
547 547 return 'pair{:02d}'.format(x)
548 548 else:
549 549 return 'channel{:02d}'.format(x)
550 550
551 551 def writeMetadata(self, fp):
552 552
553 553 if self.description:
554 554 if 'Metadata' in self.description:
555 555 grp = fp.create_group('Metadata')
556 556 else:
557 557 grp = fp
558 558 else:
559 559 grp = fp.create_group('Metadata')
560 560
561 561 for i in range(len(self.metadataList)):
562 562 if not hasattr(self.dataOut, self.metadataList[i]):
563 563 log.warning('Metadata: `{}` not found'.format(self.metadataList[i]), self.name)
564 564 continue
565 565 value = getattr(self.dataOut, self.metadataList[i])
566 566 if isinstance(value, bool):
567 567 if value is True:
568 568 value = 1
569 569 else:
570 570 value = 0
571 571 grp.create_dataset(self.getLabel(self.metadataList[i]), data=value)
572 572 return
573 573
574 574 def writeData(self, fp):
575 575
576 576 if self.description:
577 577 if 'Data' in self.description:
578 578 grp = fp.create_group('Data')
579 579 else:
580 580 grp = fp
581 581 else:
582 582 grp = fp.create_group('Data')
583 583
584 584 dtsets = []
585 585 data = []
586 586
587 587 for dsInfo in self.dsList:
588 588 if dsInfo['nDim'] == 0:
589 589 ds = grp.create_dataset(
590 590 self.getLabel(dsInfo['variable']),
591 591 (self.blocksPerFile, ),
592 592 chunks=True,
593 593 dtype=numpy.float64)
594 594 dtsets.append(ds)
595 595 data.append((dsInfo['variable'], -1))
596 596 else:
597 597 label = self.getLabel(dsInfo['variable'])
598 598 if label is not None:
599 599 sgrp = grp.create_group(label)
600 600 else:
601 601 sgrp = grp
602 602 for i in range(dsInfo['dsNumber']):
603 603 ds = sgrp.create_dataset(
604 604 self.getLabel(dsInfo['variable'], i),
605 605 (self.blocksPerFile, ) + dsInfo['shape'][1:],
606 606 chunks=True,
607 607 dtype=dsInfo['dtype'])
608 608 dtsets.append(ds)
609 609 data.append((dsInfo['variable'], i))
610 610 fp.flush()
611 611
612 612 log.log('Creating file: {}'.format(fp.filename), self.name)
613 613
614 614 self.ds = dtsets
615 615 self.data = data
616 616 self.firsttime = True
617 617 self.blockIndex = 0
618 618 return
619 619
620 620 def putData(self):
621 621
622 622 if (self.blockIndex == self.blocksPerFile) or self.timeFlag():
623 623 self.closeFile()
624 624 self.setNextFile()
625 625
626 626 for i, ds in enumerate(self.ds):
627 627 attr, ch = self.data[i]
628 628 if ch == -1:
629 629 ds[self.blockIndex] = getattr(self.dataOut, attr)
630 630 else:
631 631 ds[self.blockIndex] = getattr(self.dataOut, attr)[ch]
632 632
633 633 self.fp.flush()
634 634 self.blockIndex += 1
635 635 log.log('Block No. {}/{}'.format(self.blockIndex, self.blocksPerFile), self.name)
636 636
637 637 return
638 638
639 639 def closeFile(self):
640 640
641 641 if self.blockIndex != self.blocksPerFile:
642 642 for ds in self.ds:
643 643 ds.resize(self.blockIndex, axis=0)
644 644
645 645 if self.fp:
646 646 self.fp.flush()
647 647 self.fp.close()
648 648
649 649 def close(self):
650 650
651 651 self.closeFile()
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,1439 +1,1357
1 1 # Copyright (c) 2012-2020 Jicamarca Radio Observatory
2 2 # All rights reserved.
3 3 #
4 4 # Distributed under the terms of the BSD 3-clause license.
5 5 """Spectra processing Unit and operations
6 6
7 7 Here you will find the processing unit `SpectraProc` and several operations
8 8 to work with Spectra data type
9 9 """
10 10
11 11 import time
12 12 import itertools
13 13
14 14 import numpy
15 15 import math
16 16
17 17 from schainpy.model.proc.jroproc_base import ProcessingUnit, MPDecorator, Operation
18 18 from schainpy.model.data.jrodata import Spectra
19 19 from schainpy.model.data.jrodata import hildebrand_sekhon
20 20 from schainpy.utils import log
21 21
22 22 from scipy.optimize import curve_fit
23 23
24 24
25 25 class SpectraProc(ProcessingUnit):
26 26
27 27 def __init__(self):
28 28
29 29 ProcessingUnit.__init__(self)
30 30
31 31 self.buffer = None
32 32 self.firstdatatime = None
33 33 self.profIndex = 0
34 34 self.dataOut = Spectra()
35 35 self.id_min = None
36 36 self.id_max = None
37 37 self.setupReq = False #Agregar a todas las unidades de proc
38 38
39 39 def __updateSpecFromVoltage(self):
40 40
41 41 self.dataOut.timeZone = self.dataIn.timeZone
42 42 self.dataOut.dstFlag = self.dataIn.dstFlag
43 43 self.dataOut.errorCount = self.dataIn.errorCount
44 44 self.dataOut.useLocalTime = self.dataIn.useLocalTime
45 45 try:
46 46 self.dataOut.processingHeaderObj = self.dataIn.processingHeaderObj.copy()
47 47 except:
48 48 pass
49 49 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
50 50 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
51 51 self.dataOut.channelList = self.dataIn.channelList
52 52 self.dataOut.heightList = self.dataIn.heightList
53 53 self.dataOut.dtype = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
54 54 self.dataOut.nProfiles = self.dataOut.nFFTPoints
55 55 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
56 56 self.dataOut.utctime = self.firstdatatime
57 57 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData
58 58 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData
59 59 self.dataOut.flagShiftFFT = False
60 60 self.dataOut.nCohInt = self.dataIn.nCohInt
61 61 self.dataOut.nIncohInt = 1
62 62 self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
63 63 self.dataOut.frequency = self.dataIn.frequency
64 64 self.dataOut.realtime = self.dataIn.realtime
65 65 self.dataOut.azimuth = self.dataIn.azimuth
66 66 self.dataOut.zenith = self.dataIn.zenith
67 67 self.dataOut.codeList = self.dataIn.codeList
68 68 self.dataOut.azimuthList = self.dataIn.azimuthList
69 69 self.dataOut.elevationList = self.dataIn.elevationList
70 70
71 71 def __getFft(self):
72 72 """
73 73 Convierte valores de Voltaje a Spectra
74 74
75 75 Affected:
76 76 self.dataOut.data_spc
77 77 self.dataOut.data_cspc
78 78 self.dataOut.data_dc
79 79 self.dataOut.heightList
80 80 self.profIndex
81 81 self.buffer
82 82 self.dataOut.flagNoData
83 83 """
84 84 fft_volt = numpy.fft.fft(
85 85 self.buffer, n=self.dataOut.nFFTPoints, axis=1)
86 86 fft_volt = fft_volt.astype(numpy.dtype('complex'))
87 87 dc = fft_volt[:, 0, :]
88 88
89 89 # calculo de self-spectra
90 90 fft_volt = numpy.fft.fftshift(fft_volt, axes=(1,))
91 91 spc = fft_volt * numpy.conjugate(fft_volt)
92 92 spc = spc.real
93 93
94 94 blocksize = 0
95 95 blocksize += dc.size
96 96 blocksize += spc.size
97 97
98 98 cspc = None
99 99 pairIndex = 0
100 100 if self.dataOut.pairsList != None:
101 101 # calculo de cross-spectra
102 102 cspc = numpy.zeros(
103 103 (self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex')
104 104 for pair in self.dataOut.pairsList:
105 105 if pair[0] not in self.dataOut.channelList:
106 106 raise ValueError("Error getting CrossSpectra: pair 0 of %s is not in channelList = %s" % (
107 107 str(pair), str(self.dataOut.channelList)))
108 108 if pair[1] not in self.dataOut.channelList:
109 109 raise ValueError("Error getting CrossSpectra: pair 1 of %s is not in channelList = %s" % (
110 110 str(pair), str(self.dataOut.channelList)))
111 111
112 112 cspc[pairIndex, :, :] = fft_volt[pair[0], :, :] * \
113 113 numpy.conjugate(fft_volt[pair[1], :, :])
114 114 pairIndex += 1
115 115 blocksize += cspc.size
116 116
117 117 self.dataOut.data_spc = spc
118 118 self.dataOut.data_cspc = cspc
119 119 self.dataOut.data_dc = dc
120 120 self.dataOut.blockSize = blocksize
121 121 self.dataOut.flagShiftFFT = False
122 122
123 123 def run(self, nProfiles=None, nFFTPoints=None, pairsList=None, ippFactor=None, shift_fft=False):
124 124
125 125 if self.dataIn.type == "Spectra":
126 126 self.dataOut.copy(self.dataIn)
127 127 if shift_fft:
128 128 #desplaza a la derecha en el eje 2 determinadas posiciones
129 129 shift = int(self.dataOut.nFFTPoints/2)
130 130 self.dataOut.data_spc = numpy.roll(self.dataOut.data_spc, shift , axis=1)
131 131
132 132 if self.dataOut.data_cspc is not None:
133 133 #desplaza a la derecha en el eje 2 determinadas posiciones
134 134 self.dataOut.data_cspc = numpy.roll(self.dataOut.data_cspc, shift, axis=1)
135 135 if pairsList:
136 136 self.__selectPairs(pairsList)
137 137
138 138 elif self.dataIn.type == "Voltage":
139 139
140 140 self.dataOut.flagNoData = True
141 141
142 142 if nFFTPoints == None:
143 143 raise ValueError("This SpectraProc.run() need nFFTPoints input variable")
144 144
145 145 if nProfiles == None:
146 146 nProfiles = nFFTPoints
147 147
148 148 if ippFactor == None:
149 149 self.dataOut.ippFactor = 1
150 150
151 151 self.dataOut.nFFTPoints = nFFTPoints
152 152
153 153 if self.buffer is None:
154 154 self.buffer = numpy.zeros((self.dataIn.nChannels,
155 155 nProfiles,
156 156 self.dataIn.nHeights),
157 157 dtype='complex')
158 158
159 159 if self.dataIn.flagDataAsBlock:
160 160 nVoltProfiles = self.dataIn.data.shape[1]
161 161
162 162 if nVoltProfiles == nProfiles:
163 163 self.buffer = self.dataIn.data.copy()
164 164 self.profIndex = nVoltProfiles
165 165
166 166 elif nVoltProfiles < nProfiles:
167 167
168 168 if self.profIndex == 0:
169 169 self.id_min = 0
170 170 self.id_max = nVoltProfiles
171 171
172 172 self.buffer[:, self.id_min:self.id_max,
173 173 :] = self.dataIn.data
174 174 self.profIndex += nVoltProfiles
175 175 self.id_min += nVoltProfiles
176 176 self.id_max += nVoltProfiles
177 177 else:
178 178 raise ValueError("The type object %s has %d profiles, it should just has %d profiles" % (
179 179 self.dataIn.type, self.dataIn.data.shape[1], nProfiles))
180 180 self.dataOut.flagNoData = True
181 181 else:
182 182 self.buffer[:, self.profIndex, :] = self.dataIn.data.copy()
183 183 self.profIndex += 1
184 184
185 185 if self.firstdatatime == None:
186 186 self.firstdatatime = self.dataIn.utctime
187 187
188 188 if self.profIndex == nProfiles:
189 189 self.__updateSpecFromVoltage()
190 190 if pairsList == None:
191 191 self.dataOut.pairsList = [pair for pair in itertools.combinations(self.dataOut.channelList, 2)]
192 192 else:
193 193 self.dataOut.pairsList = pairsList
194 194 self.__getFft()
195 195 self.dataOut.flagNoData = False
196 196 self.firstdatatime = None
197 197 self.profIndex = 0
198 198 else:
199 199 raise ValueError("The type of input object '%s' is not valid".format(
200 200 self.dataIn.type))
201 201
202 202 def __selectPairs(self, pairsList):
203 203
204 204 if not pairsList:
205 205 return
206 206
207 207 pairs = []
208 208 pairsIndex = []
209 209
210 210 for pair in pairsList:
211 211 if pair[0] not in self.dataOut.channelList or pair[1] not in self.dataOut.channelList:
212 212 continue
213 213 pairs.append(pair)
214 214 pairsIndex.append(pairs.index(pair))
215 215
216 216 self.dataOut.data_cspc = self.dataOut.data_cspc[pairsIndex]
217 217 self.dataOut.pairsList = pairs
218 218
219 219 return
220 220
221 221 def selectFFTs(self, minFFT, maxFFT ):
222 222 """
223 223 Selecciona un bloque de datos en base a un grupo de valores de puntos FFTs segun el rango
224 224 minFFT<= FFT <= maxFFT
225 225 """
226 226
227 227 if (minFFT > maxFFT):
228 228 raise ValueError("Error selecting heights: Height range (%d,%d) is not valid" % (minFFT, maxFFT))
229 229
230 230 if (minFFT < self.dataOut.getFreqRange()[0]):
231 231 minFFT = self.dataOut.getFreqRange()[0]
232 232
233 233 if (maxFFT > self.dataOut.getFreqRange()[-1]):
234 234 maxFFT = self.dataOut.getFreqRange()[-1]
235 235
236 236 minIndex = 0
237 237 maxIndex = 0
238 238 FFTs = self.dataOut.getFreqRange()
239 239
240 240 inda = numpy.where(FFTs >= minFFT)
241 241 indb = numpy.where(FFTs <= maxFFT)
242 242
243 243 try:
244 244 minIndex = inda[0][0]
245 245 except:
246 246 minIndex = 0
247 247
248 248 try:
249 249 maxIndex = indb[0][-1]
250 250 except:
251 251 maxIndex = len(FFTs)
252 252
253 253 self.selectFFTsByIndex(minIndex, maxIndex)
254 254
255 255 return 1
256 256
257 257 def getBeaconSignal(self, tauindex=0, channelindex=0, hei_ref=None):
258 258 newheis = numpy.where(
259 259 self.dataOut.heightList > self.dataOut.radarControllerHeaderObj.Taus[tauindex])
260 260
261 261 if hei_ref != None:
262 262 newheis = numpy.where(self.dataOut.heightList > hei_ref)
263 263
264 264 minIndex = min(newheis[0])
265 265 maxIndex = max(newheis[0])
266 266 data_spc = self.dataOut.data_spc[:, :, minIndex:maxIndex + 1]
267 267 heightList = self.dataOut.heightList[minIndex:maxIndex + 1]
268 268
269 269 # determina indices
270 270 nheis = int(self.dataOut.radarControllerHeaderObj.txB /
271 271 (self.dataOut.heightList[1] - self.dataOut.heightList[0]))
272 272 avg_dB = 10 * \
273 273 numpy.log10(numpy.sum(data_spc[channelindex, :, :], axis=0))
274 274 beacon_dB = numpy.sort(avg_dB)[-nheis:]
275 275 beacon_heiIndexList = []
276 276 for val in avg_dB.tolist():
277 277 if val >= beacon_dB[0]:
278 278 beacon_heiIndexList.append(avg_dB.tolist().index(val))
279 279
280 280 #data_spc = data_spc[:,:,beacon_heiIndexList]
281 281 data_cspc = None
282 282 if self.dataOut.data_cspc is not None:
283 283 data_cspc = self.dataOut.data_cspc[:, :, minIndex:maxIndex + 1]
284 284 #data_cspc = data_cspc[:,:,beacon_heiIndexList]
285 285
286 286 data_dc = None
287 287 if self.dataOut.data_dc is not None:
288 288 data_dc = self.dataOut.data_dc[:, minIndex:maxIndex + 1]
289 289 #data_dc = data_dc[:,beacon_heiIndexList]
290 290
291 291 self.dataOut.data_spc = data_spc
292 292 self.dataOut.data_cspc = data_cspc
293 293 self.dataOut.data_dc = data_dc
294 294 self.dataOut.heightList = heightList
295 295 self.dataOut.beacon_heiIndexList = beacon_heiIndexList
296 296
297 297 return 1
298 298
299 299 def selectFFTsByIndex(self, minIndex, maxIndex):
300 300 """
301 301
302 302 """
303 303
304 304 if (minIndex < 0) or (minIndex > maxIndex):
305 305 raise ValueError("Error selecting heights: Index range (%d,%d) is not valid" % (minIndex, maxIndex))
306 306
307 307 if (maxIndex >= self.dataOut.nProfiles):
308 308 maxIndex = self.dataOut.nProfiles-1
309 309
310 310 #Spectra
311 311 data_spc = self.dataOut.data_spc[:,minIndex:maxIndex+1,:]
312 312
313 313 data_cspc = None
314 314 if self.dataOut.data_cspc is not None:
315 315 data_cspc = self.dataOut.data_cspc[:,minIndex:maxIndex+1,:]
316 316
317 317 data_dc = None
318 318 if self.dataOut.data_dc is not None:
319 319 data_dc = self.dataOut.data_dc[minIndex:maxIndex+1,:]
320 320
321 321 self.dataOut.data_spc = data_spc
322 322 self.dataOut.data_cspc = data_cspc
323 323 self.dataOut.data_dc = data_dc
324 324
325 325 self.dataOut.ippSeconds = self.dataOut.ippSeconds*(self.dataOut.nFFTPoints / numpy.shape(data_cspc)[1])
326 326 self.dataOut.nFFTPoints = numpy.shape(data_cspc)[1]
327 327 self.dataOut.profilesPerBlock = numpy.shape(data_cspc)[1]
328 328
329 329 return 1
330 330
331 331 def getNoise(self, minHei=None, maxHei=None, minVel=None, maxVel=None):
332 332 # validacion de rango
333 333 if minHei == None:
334 334 minHei = self.dataOut.heightList[0]
335 335
336 336 if maxHei == None:
337 337 maxHei = self.dataOut.heightList[-1]
338 338
339 339 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
340 340 print('minHei: %.2f is out of the heights range' % (minHei))
341 341 print('minHei is setting to %.2f' % (self.dataOut.heightList[0]))
342 342 minHei = self.dataOut.heightList[0]
343 343
344 344 if (maxHei > self.dataOut.heightList[-1]) or (maxHei < minHei):
345 345 print('maxHei: %.2f is out of the heights range' % (maxHei))
346 346 print('maxHei is setting to %.2f' % (self.dataOut.heightList[-1]))
347 347 maxHei = self.dataOut.heightList[-1]
348 348
349 349 # validacion de velocidades
350 350 velrange = self.dataOut.getVelRange(1)
351 351
352 352 if minVel == None:
353 353 minVel = velrange[0]
354 354
355 355 if maxVel == None:
356 356 maxVel = velrange[-1]
357 357
358 358 if (minVel < velrange[0]) or (minVel > maxVel):
359 359 print('minVel: %.2f is out of the velocity range' % (minVel))
360 360 print('minVel is setting to %.2f' % (velrange[0]))
361 361 minVel = velrange[0]
362 362
363 363 if (maxVel > velrange[-1]) or (maxVel < minVel):
364 364 print('maxVel: %.2f is out of the velocity range' % (maxVel))
365 365 print('maxVel is setting to %.2f' % (velrange[-1]))
366 366 maxVel = velrange[-1]
367 367
368 368 # seleccion de indices para rango
369 369 minIndex = 0
370 370 maxIndex = 0
371 371 heights = self.dataOut.heightList
372 372
373 373 inda = numpy.where(heights >= minHei)
374 374 indb = numpy.where(heights <= maxHei)
375 375
376 376 try:
377 377 minIndex = inda[0][0]
378 378 except:
379 379 minIndex = 0
380 380
381 381 try:
382 382 maxIndex = indb[0][-1]
383 383 except:
384 384 maxIndex = len(heights)
385 385
386 386 if (minIndex < 0) or (minIndex > maxIndex):
387 387 raise ValueError("some value in (%d,%d) is not valid" % (
388 388 minIndex, maxIndex))
389 389
390 390 if (maxIndex >= self.dataOut.nHeights):
391 391 maxIndex = self.dataOut.nHeights - 1
392 392
393 393 # seleccion de indices para velocidades
394 394 indminvel = numpy.where(velrange >= minVel)
395 395 indmaxvel = numpy.where(velrange <= maxVel)
396 396 try:
397 397 minIndexVel = indminvel[0][0]
398 398 except:
399 399 minIndexVel = 0
400 400
401 401 try:
402 402 maxIndexVel = indmaxvel[0][-1]
403 403 except:
404 404 maxIndexVel = len(velrange)
405 405
406 406 # seleccion del espectro
407 407 data_spc = self.dataOut.data_spc[:,
408 408 minIndexVel:maxIndexVel + 1, minIndex:maxIndex + 1]
409 409 # estimacion de ruido
410 410 noise = numpy.zeros(self.dataOut.nChannels)
411 411
412 412 for channel in range(self.dataOut.nChannels):
413 413 daux = data_spc[channel, :, :]
414 414 sortdata = numpy.sort(daux, axis=None)
415 415 noise[channel] = hildebrand_sekhon(sortdata, self.dataOut.nIncohInt)
416 416
417 417 self.dataOut.noise_estimation = noise.copy()
418 418
419 419 return 1
420 420
421 421 class removeDC(Operation):
422 422
423 423 def run(self, dataOut, mode=2):
424 424 self.dataOut = dataOut
425 425 jspectra = self.dataOut.data_spc
426 426 jcspectra = self.dataOut.data_cspc
427 427
428 428 num_chan = jspectra.shape[0]
429 429 num_hei = jspectra.shape[2]
430 430
431 431 if jcspectra is not None:
432 432 jcspectraExist = True
433 433 num_pairs = jcspectra.shape[0]
434 434 else:
435 435 jcspectraExist = False
436 436
437 437 freq_dc = int(jspectra.shape[1] / 2)
438 438 ind_vel = numpy.array([-2, -1, 1, 2]) + freq_dc
439 439 ind_vel = ind_vel.astype(int)
440 440
441 441 if ind_vel[0] < 0:
442 442 ind_vel[list(range(0, 1))] = ind_vel[list(range(0, 1))] + self.num_prof
443 443
444 444 if mode == 1:
445 445 jspectra[:, freq_dc, :] = (
446 446 jspectra[:, ind_vel[1], :] + jspectra[:, ind_vel[2], :]) / 2 # CORRECCION
447 447
448 448 if jcspectraExist:
449 449 jcspectra[:, freq_dc, :] = (
450 450 jcspectra[:, ind_vel[1], :] + jcspectra[:, ind_vel[2], :]) / 2
451 451
452 452 if mode == 2:
453 453
454 454 vel = numpy.array([-2, -1, 1, 2])
455 455 xx = numpy.zeros([4, 4])
456 456
457 457 for fil in range(4):
458 458 xx[fil, :] = vel[fil]**numpy.asarray(list(range(4)))
459 459
460 460 xx_inv = numpy.linalg.inv(xx)
461 461 xx_aux = xx_inv[0, :]
462 462
463 463 for ich in range(num_chan):
464 464 yy = jspectra[ich, ind_vel, :]
465 465 jspectra[ich, freq_dc, :] = numpy.dot(xx_aux, yy)
466 466
467 467 junkid = jspectra[ich, freq_dc, :] <= 0
468 468 cjunkid = sum(junkid)
469 469
470 470 if cjunkid.any():
471 471 jspectra[ich, freq_dc, junkid.nonzero()] = (
472 472 jspectra[ich, ind_vel[1], junkid] + jspectra[ich, ind_vel[2], junkid]) / 2
473 473
474 474 if jcspectraExist:
475 475 for ip in range(num_pairs):
476 476 yy = jcspectra[ip, ind_vel, :]
477 477 jcspectra[ip, freq_dc, :] = numpy.dot(xx_aux, yy)
478 478
479 479 self.dataOut.data_spc = jspectra
480 480 self.dataOut.data_cspc = jcspectra
481 481
482 482 return self.dataOut
483 483
484 484 # import matplotlib.pyplot as plt
485 485
486 486 def fit_func( x, a0, a1, a2): #, a3, a4, a5):
487 487 z = (x - a1) / a2
488 488 y = a0 * numpy.exp(-z**2 / a2) #+ a3 + a4 * x + a5 * x**2
489 489 return y
490 490
491 491
492 492 class CleanRayleigh(Operation):
493 493
494 494 def __init__(self):
495 495
496 496 Operation.__init__(self)
497 497 self.i=0
498 498 self.isConfig = False
499 499 self.__dataReady = False
500 500 self.__profIndex = 0
501 501 self.byTime = False
502 502 self.byProfiles = False
503 503
504 504 self.bloques = None
505 505 self.bloque0 = None
506 506
507 507 self.index = 0
508 508
509 509 self.buffer = 0
510 510 self.buffer2 = 0
511 511 self.buffer3 = 0
512 512
513 513
514 514 def setup(self,dataOut,min_hei,max_hei,n, timeInterval,factor_stdv):
515 515
516 516 self.nChannels = dataOut.nChannels
517 517 self.nProf = dataOut.nProfiles
518 518 self.nPairs = dataOut.data_cspc.shape[0]
519 519 self.pairsArray = numpy.array(dataOut.pairsList)
520 520 self.spectra = dataOut.data_spc
521 521 self.cspectra = dataOut.data_cspc
522 522 self.heights = dataOut.heightList #alturas totales
523 523 self.nHeights = len(self.heights)
524 524 self.min_hei = min_hei
525 525 self.max_hei = max_hei
526 526 if (self.min_hei == None):
527 527 self.min_hei = 0
528 528 if (self.max_hei == None):
529 529 self.max_hei = dataOut.heightList[-1]
530 530 self.hval = ((self.max_hei>=self.heights) & (self.heights >= self.min_hei)).nonzero()
531 531 self.heightsClean = self.heights[self.hval] #alturas filtradas
532 532 self.hval = self.hval[0] # forma (N,), an solo N elementos -> Indices de alturas
533 533 self.nHeightsClean = len(self.heightsClean)
534 534 self.channels = dataOut.channelList
535 535 self.nChan = len(self.channels)
536 536 self.nIncohInt = dataOut.nIncohInt
537 537 self.__initime = dataOut.utctime
538 538 self.maxAltInd = self.hval[-1]+1
539 539 self.minAltInd = self.hval[0]
540 540
541 541 self.crosspairs = dataOut.pairsList
542 542 self.nPairs = len(self.crosspairs)
543 543 self.normFactor = dataOut.normFactor
544 544 self.nFFTPoints = dataOut.nFFTPoints
545 545 self.ippSeconds = dataOut.ippSeconds
546 546 self.currentTime = self.__initime
547 547 self.pairsArray = numpy.array(dataOut.pairsList)
548 548 self.factor_stdv = factor_stdv
549 549 #print("CHANNELS: ",[x for x in self.channels])
550 550
551 551 if n != None :
552 552 self.byProfiles = True
553 553 self.nIntProfiles = n
554 554 else:
555 555 self.__integrationtime = timeInterval
556 556
557 557 self.__dataReady = False
558 558 self.isConfig = True
559 559
560 560
561 561
562 562 def run(self, dataOut,min_hei=None,max_hei=None, n=None, timeInterval=10,factor_stdv=2.5):
563 563 #print (dataOut.utctime)
564 564 if not self.isConfig :
565 565 #print("Setting config")
566 566 self.setup(dataOut, min_hei,max_hei,n,timeInterval,factor_stdv)
567 567 #print("Config Done")
568 568 tini=dataOut.utctime
569 569
570 570 if self.byProfiles:
571 571 if self.__profIndex == self.nIntProfiles:
572 572 self.__dataReady = True
573 573 else:
574 574 if (tini - self.__initime) >= self.__integrationtime:
575 575 #print(tini - self.__initime,self.__profIndex)
576 576 self.__dataReady = True
577 577 self.__initime = tini
578 578
579 579 #if (tini.tm_min % 2) == 0 and (tini.tm_sec < 5 and self.fint==0):
580 580
581 581 if self.__dataReady:
582 582 #print("Data ready",self.__profIndex)
583 583 self.__profIndex = 0
584 584 jspc = self.buffer
585 585 jcspc = self.buffer2
586 586 #jnoise = self.buffer3
587 587 self.buffer = dataOut.data_spc
588 588 self.buffer2 = dataOut.data_cspc
589 589 #self.buffer3 = dataOut.noise
590 590 self.currentTime = dataOut.utctime
591 591 if numpy.any(jspc) :
592 592 #print( jspc.shape, jcspc.shape)
593 593 jspc = numpy.reshape(jspc,(int(len(jspc)/self.nChannels),self.nChannels,self.nFFTPoints,self.nHeights))
594 594 jcspc= numpy.reshape(jcspc,(int(len(jcspc)/self.nPairs),self.nPairs,self.nFFTPoints,self.nHeights))
595 595 self.__dataReady = False
596 596 #print( jspc.shape, jcspc.shape)
597 597 dataOut.flagNoData = False
598 598 else:
599 599 dataOut.flagNoData = True
600 600 self.__dataReady = False
601 601 return dataOut
602 602 else:
603 603 #print( len(self.buffer))
604 604 if numpy.any(self.buffer):
605 605 self.buffer = numpy.concatenate((self.buffer,dataOut.data_spc), axis=0)
606 606 self.buffer2 = numpy.concatenate((self.buffer2,dataOut.data_cspc), axis=0)
607 607 self.buffer3 += dataOut.data_dc
608 608 else:
609 609 self.buffer = dataOut.data_spc
610 610 self.buffer2 = dataOut.data_cspc
611 611 self.buffer3 = dataOut.data_dc
612 612 #print self.index, self.fint
613 613 #print self.buffer2.shape
614 614 dataOut.flagNoData = True ## NOTE: ?? revisar LUEGO
615 615 self.__profIndex += 1
616 616 return dataOut ## NOTE: REV
617 617
618 618
619 619 #index = tini.tm_hour*12+tini.tm_min/5
620 620 '''REVISAR'''
621 621 # jspc = jspc/self.nFFTPoints/self.normFactor
622 622 # jcspc = jcspc/self.nFFTPoints/self.normFactor
623 623
624 624
625 625
626 626 tmp_spectra,tmp_cspectra = self.cleanRayleigh(dataOut,jspc,jcspc,self.factor_stdv)
627 627 dataOut.data_spc = tmp_spectra
628 628 dataOut.data_cspc = tmp_cspectra
629 629
630 630 #dataOut.data_spc,dataOut.data_cspc = self.cleanRayleigh(dataOut,jspc,jcspc,self.factor_stdv)
631 631
632 632 dataOut.data_dc = self.buffer3
633 633 dataOut.nIncohInt *= self.nIntProfiles
634 634 dataOut.utctime = self.currentTime #tiempo promediado
635 635 #print("Time: ",time.localtime(dataOut.utctime))
636 636 # dataOut.data_spc = sat_spectra
637 637 # dataOut.data_cspc = sat_cspectra
638 638 self.buffer = 0
639 639 self.buffer2 = 0
640 640 self.buffer3 = 0
641 641
642 642 return dataOut
643 643
644 644 def cleanRayleigh(self,dataOut,spectra,cspectra,factor_stdv):
645 645 #print("OP cleanRayleigh")
646 import matplotlib.pyplot as plt
646 #import matplotlib.pyplot as plt
647 647 #for k in range(149):
648 channelsProcssd = []
649 channelA_ok = False
650 rfunc = cspectra.copy() #self.bloques
648 #channelsProcssd = []
649 #channelA_ok = False
650 #rfunc = cspectra.copy() #self.bloques
651 rfunc = spectra.copy()
651 652 #rfunc = cspectra
652 653 #val_spc = spectra*0.0 #self.bloque0*0.0
653 654 #val_cspc = cspectra*0.0 #self.bloques*0.0
654 655 #in_sat_spectra = spectra.copy() #self.bloque0
655 656 #in_sat_cspectra = cspectra.copy() #self.bloques
656 657
657 658
658 659 ###ONLY FOR TEST:
659 660 raxs = math.ceil(math.sqrt(self.nPairs))
660 661 caxs = math.ceil(self.nPairs/raxs)
661 662 if self.nPairs <4:
662 663 raxs = 2
663 664 caxs = 2
664 665 #print(raxs, caxs)
665 666 fft_rev = 14 #nFFT to plot
666 667 hei_rev = ((self.heights >= 550) & (self.heights <= 551)).nonzero() #hei to plot
667 668 hei_rev = hei_rev[0]
668 669 #print(hei_rev)
669 670
670 671 #print numpy.absolute(rfunc[:,0,0,14])
671 672
672 673 gauss_fit, covariance = None, None
673 674 for ih in range(self.minAltInd,self.maxAltInd):
674 675 for ifreq in range(self.nFFTPoints):
676 '''
675 677 ###ONLY FOR TEST:
676 678 if ifreq ==fft_rev and ih==hei_rev: #TO VIEW A SIGNLE FREQUENCY
677 679 fig, axs = plt.subplots(raxs, caxs)
678 680 fig2, axs2 = plt.subplots(raxs, caxs)
679 681 col_ax = 0
680 682 row_ax = 0
683 '''
681 684 #print(self.nPairs)
682 for ii in range(self.nPairs): #PARES DE CANALES SELF y CROSS
683 if self.crosspairs[ii][1]-self.crosspairs[ii][0] > 1: # APLICAR SOLO EN PARES CONTIGUOS
684 continue
685 if not self.crosspairs[ii][0] in channelsProcssd:
686 channelA_ok = True
685 for ii in range(self.nChan): #PARES DE CANALES SELF y CROSS
686 # if self.crosspairs[ii][1]-self.crosspairs[ii][0] > 1: # APLICAR SOLO EN PARES CONTIGUOS
687 # continue
688 # if not self.crosspairs[ii][0] in channelsProcssd:
689 # channelA_ok = True
687 690 #print("pair: ",self.crosspairs[ii])
688 if (col_ax%caxs==0 and col_ax!=0 and self.nPairs !=1): ###ONLY FOR TEST:
691 '''
692 ###ONLY FOR TEST:
693 if (col_ax%caxs==0 and col_ax!=0 and self.nPairs !=1):
689 694 col_ax = 0
690 695 row_ax += 1
696 '''
691 697 func2clean = 10*numpy.log10(numpy.absolute(rfunc[:,ii,ifreq,ih])) #Potencia?
692 698 #print(func2clean.shape)
693 699 val = (numpy.isfinite(func2clean)==True).nonzero()
694 700
695 701 if len(val)>0: #limitador
696 702 min_val = numpy.around(numpy.amin(func2clean)-2) #> (-40)
697 703 if min_val <= -40 :
698 704 min_val = -40
699 705 max_val = numpy.around(numpy.amax(func2clean)+2) #< 200
700 706 if max_val >= 200 :
701 707 max_val = 200
702 708 #print min_val, max_val
703 709 step = 1
704 710 #print("Getting bins and the histogram")
705 711 x_dist = min_val + numpy.arange(1 + ((max_val-(min_val))/step))*step
706 712 y_dist,binstep = numpy.histogram(func2clean,bins=range(int(min_val),int(max_val+2),step))
707 713 #print(len(y_dist),len(binstep[:-1]))
708 714 #print(row_ax,col_ax, " ..")
709 715 #print(self.pairsArray[ii][0],self.pairsArray[ii][1])
710 716 mean = numpy.sum(x_dist * y_dist) / numpy.sum(y_dist)
711 717 sigma = numpy.sqrt(numpy.sum(y_dist * (x_dist - mean)**2) / numpy.sum(y_dist))
712 718 parg = [numpy.amax(y_dist),mean,sigma]
713 719
714 720 newY = None
715 721
716 722 try :
717 723 gauss_fit, covariance = curve_fit(fit_func, x_dist, y_dist,p0=parg)
718 724 mode = gauss_fit[1]
719 725 stdv = gauss_fit[2]
720 726 #print(" FIT OK",gauss_fit)
721
727 '''
722 728 ###ONLY FOR TEST:
723 729 if ifreq ==fft_rev and ih==hei_rev: #TO VIEW A SIGNLE FREQUENCY
724 730 newY = fit_func(x_dist,gauss_fit[0],gauss_fit[1],gauss_fit[2])
725 731 axs[row_ax,col_ax].plot(binstep[:-1],y_dist,color='green')
726 732 axs[row_ax,col_ax].plot(binstep[:-1],newY,color='red')
727 axs[row_ax,col_ax].set_title("Pair "+str(self.crosspairs[ii]))
728
733 axs[row_ax,col_ax].set_title("CH "+str(self.channels[ii]))
734 '''
729 735 except:
730 736 mode = mean
731 737 stdv = sigma
732 738 #print("FIT FAIL")
733 continue
739 #continue
734 740
735 741
736 742 #print(mode,stdv)
737 743 #Removing echoes greater than mode + std_factor*stdv
738 744 noval = (abs(func2clean - mode)>=(factor_stdv*stdv)).nonzero()
739 745 #noval tiene los indices que se van a remover
740 #print("Pair ",ii," novals: ",len(noval[0]))
746 #print("Chan ",ii," novals: ",len(noval[0]))
741 747 if len(noval[0]) > 0: #forma de array (N,) es igual a longitud (N)
742 748 novall = ((func2clean - mode) >= (factor_stdv*stdv)).nonzero()
743 749 #print(novall)
744 750 #print(" ",self.pairsArray[ii])
745 cross_pairs = self.pairsArray[ii]
751 #cross_pairs = self.pairsArray[ii]
746 752 #Getting coherent echoes which are removed.
747 753 # if len(novall[0]) > 0:
748 754 #
749 755 # val_spc[novall[0],cross_pairs[0],ifreq,ih] = 1
750 756 # val_spc[novall[0],cross_pairs[1],ifreq,ih] = 1
751 757 # val_cspc[novall[0],ii,ifreq,ih] = 1
752 758 #print("OUT NOVALL 1")
753
754 new_a = numpy.delete(cspectra[:,ii,ifreq,ih], noval[0])
755 cspectra[noval,ii,ifreq,ih] = numpy.mean(new_a) #mean CrossSpectra
756
757 if channelA_ok:
758 chA = self.channels.index(cross_pairs[0])
759 new_b = numpy.delete(spectra[:,chA,ifreq,ih], noval[0])
760 spectra[noval,chA,ifreq,ih] = numpy.mean(new_b) #mean Spectra Pair A
761 channelA_ok = False
762 chB = self.channels.index(cross_pairs[1])
763 new_c = numpy.delete(spectra[:,chB,ifreq,ih], noval[0])
764 spectra[noval,chB,ifreq,ih] = numpy.mean(new_c) #mean Spectra Pair B
765
766 channelsProcssd.append(self.crosspairs[ii][0]) # save channel A
767 channelsProcssd.append(self.crosspairs[ii][1]) # save channel B
768
759 try:
760 pair = (self.channels[ii],self.channels[ii + 1])
761 except:
762 pair = (99,99)
763 #print("par ", pair)
764 if ( pair in self.crosspairs):
765 q = self.crosspairs.index(pair)
766 #print("está aqui: ", q, (ii,ii + 1))
767 new_a = numpy.delete(cspectra[:,q,ifreq,ih], noval[0])
768 cspectra[noval,q,ifreq,ih] = numpy.mean(new_a) #mean CrossSpectra
769
770 #if channelA_ok:
771 #chA = self.channels.index(cross_pairs[0])
772 new_b = numpy.delete(spectra[:,ii,ifreq,ih], noval[0])
773 spectra[noval,ii,ifreq,ih] = numpy.mean(new_b) #mean Spectra Pair A
774 #channelA_ok = False
775
776 # chB = self.channels.index(cross_pairs[1])
777 # new_c = numpy.delete(spectra[:,chB,ifreq,ih], noval[0])
778 # spectra[noval,chB,ifreq,ih] = numpy.mean(new_c) #mean Spectra Pair B
779 #
780 # channelsProcssd.append(self.crosspairs[ii][0]) # save channel A
781 # channelsProcssd.append(self.crosspairs[ii][1]) # save channel B
782 '''
769 783 ###ONLY FOR TEST:
770 784 if ifreq ==fft_rev and ih==hei_rev: #TO VIEW A SIGNLE FREQUENCY
771 func2clean = 10*numpy.log10(numpy.absolute(cspectra[:,ii,ifreq,ih]))
785 func2clean = 10*numpy.log10(numpy.absolute(spectra[:,ii,ifreq,ih]))
772 786 y_dist,binstep = numpy.histogram(func2clean,bins=range(int(min_val),int(max_val+2),step))
773 787 axs2[row_ax,col_ax].plot(binstep[:-1],newY,color='red')
774 788 axs2[row_ax,col_ax].plot(binstep[:-1],y_dist,color='green')
775 axs2[row_ax,col_ax].set_title("Pair "+str(self.crosspairs[ii]))
776
789 axs2[row_ax,col_ax].set_title("CH "+str(self.channels[ii]))
790 '''
791 '''
777 792 ###ONLY FOR TEST:
778 793 col_ax += 1 #contador de ploteo columnas
779 794 ##print(col_ax)
780 795 ###ONLY FOR TEST:
781 796 if ifreq ==fft_rev and ih==hei_rev: #TO VIEW A SIGNLE FREQUENCY
782 797 title = str(dataOut.datatime)+" nFFT: "+str(ifreq)+" Alt: "+str(self.heights[ih])+ " km"
783 798 title2 = str(dataOut.datatime)+" nFFT: "+str(ifreq)+" Alt: "+str(self.heights[ih])+ " km CLEANED"
784 799 fig.suptitle(title)
785 800 fig2.suptitle(title2)
786 801 plt.show()
787
788
789 '''
790
791 channels = channels
792 cross_pairs = cross_pairs
793 #print("OUT NOVALL 2")
794
795 vcross0 = (cross_pairs[0] == channels[ii]).nonzero()
796 vcross1 = (cross_pairs[1] == channels[ii]).nonzero()
797 vcross = numpy.concatenate((vcross0,vcross1),axis=None)
798 #print('vcros =', vcross)
799
800 #Getting coherent echoes which are removed.
801 if len(novall) > 0:
802 #val_spc[novall,ii,ifreq,ih] = 1
803 val_spc[ii,ifreq,ih,novall] = 1
804 if len(vcross) > 0:
805 val_cspc[vcross,ifreq,ih,novall] = 1
806
807 #Removing coherent from ISR data.
808 self.bloque0[ii,ifreq,ih,noval] = numpy.nan
809 if len(vcross) > 0:
810 self.bloques[vcross,ifreq,ih,noval] = numpy.nan
811 802 '''
803 ##################################################################################################
812 804
813 805 #print("Getting average of the spectra and cross-spectra from incoherent echoes.")
814 806 out_spectra = numpy.zeros([self.nChan,self.nFFTPoints,self.nHeights], dtype=float) #+numpy.nan
815 807 out_cspectra = numpy.zeros([self.nPairs,self.nFFTPoints,self.nHeights], dtype=complex) #+numpy.nan
816 808 for ih in range(self.nHeights):
817 809 for ifreq in range(self.nFFTPoints):
818 810 for ich in range(self.nChan):
819 811 tmp = spectra[:,ich,ifreq,ih]
820 812 valid = (numpy.isfinite(tmp[:])==True).nonzero()
821 813
822 814 if len(valid[0]) >0 :
823 815 out_spectra[ich,ifreq,ih] = numpy.nansum(tmp)#/len(valid[0])
824 816
825 817 for icr in range(self.nPairs):
826 818 tmp = numpy.squeeze(cspectra[:,icr,ifreq,ih])
827 819 valid = (numpy.isfinite(tmp)==True).nonzero()
828 820 if len(valid[0]) > 0:
829 821 out_cspectra[icr,ifreq,ih] = numpy.nansum(tmp)#/len(valid[0])
830 '''
831 # print('##########################################################')
832 print("Removing fake coherent echoes (at least 4 points around the point)")
833
834 val_spectra = numpy.sum(val_spc,0)
835 val_cspectra = numpy.sum(val_cspc,0)
836
837 val_spectra = self.REM_ISOLATED_POINTS(val_spectra,4)
838 val_cspectra = self.REM_ISOLATED_POINTS(val_cspectra,4)
839
840 for i in range(nChan):
841 for j in range(nProf):
842 for k in range(nHeights):
843 if numpy.isfinite(val_spectra[i,j,k]) and val_spectra[i,j,k] < 1 :
844 val_spc[:,i,j,k] = 0.0
845 for i in range(nPairs):
846 for j in range(nProf):
847 for k in range(nHeights):
848 if numpy.isfinite(val_cspectra[i,j,k]) and val_cspectra[i,j,k] < 1 :
849 val_cspc[:,i,j,k] = 0.0
850
851 # val_spc = numpy.reshape(val_spc, (len(spectra[:,0,0,0]),nProf*nHeights*nChan))
852 # if numpy.isfinite(val_spectra)==str(True):
853 # noval = (val_spectra<1).nonzero()
854 # if len(noval) > 0:
855 # val_spc[:,noval] = 0.0
856 # val_spc = numpy.reshape(val_spc, (149,nChan,nProf,nHeights))
857
858 #val_cspc = numpy.reshape(val_spc, (149,nChan*nHeights*nProf))
859 #if numpy.isfinite(val_cspectra)==str(True):
860 # noval = (val_cspectra<1).nonzero()
861 # if len(noval) > 0:
862 # val_cspc[:,noval] = 0.0
863 # val_cspc = numpy.reshape(val_cspc, (149,nChan,nProf,nHeights))
864 tmp_sat_spectra = spectra.copy()
865 tmp_sat_spectra = tmp_sat_spectra*numpy.nan
866 tmp_sat_cspectra = cspectra.copy()
867 tmp_sat_cspectra = tmp_sat_cspectra*numpy.nan
868 '''
869 # fig = plt.figure(figsize=(6,5))
870 # left, bottom, width, height = 0.1, 0.1, 0.8, 0.8
871 # ax = fig.add_axes([left, bottom, width, height])
872 # cp = ax.contour(10*numpy.log10(numpy.absolute(spectra[0,0,:,:])))
873 # ax.clabel(cp, inline=True,fontsize=10)
874 # plt.show()
875 '''
876 val = (val_spc > 0).nonzero()
877 if len(val[0]) > 0:
878 tmp_sat_spectra[val] = in_sat_spectra[val]
879 val = (val_cspc > 0).nonzero()
880 if len(val[0]) > 0:
881 tmp_sat_cspectra[val] = in_sat_cspectra[val]
882
883 print("Getting average of the spectra and cross-spectra from incoherent echoes 2")
884 sat_spectra = numpy.zeros((nChan,nProf,nHeights), dtype=float)
885 sat_cspectra = numpy.zeros((nPairs,nProf,nHeights), dtype=complex)
886 for ih in range(nHeights):
887 for ifreq in range(nProf):
888 for ich in range(nChan):
889 tmp = numpy.squeeze(tmp_sat_spectra[:,ich,ifreq,ih])
890 valid = (numpy.isfinite(tmp)).nonzero()
891 if len(valid[0]) > 0:
892 sat_spectra[ich,ifreq,ih] = numpy.nansum(tmp)/len(valid[0])
893 822
894 for icr in range(nPairs):
895 tmp = numpy.squeeze(tmp_sat_cspectra[:,icr,ifreq,ih])
896 valid = (numpy.isfinite(tmp)).nonzero()
897 if len(valid[0]) > 0:
898 sat_cspectra[icr,ifreq,ih] = numpy.nansum(tmp)/len(valid[0])
899 '''
900 #self.__dataReady= True
901 #sat_spectra, sat_cspectra= sat_spectra, sat_cspectra
902 #if not self.__dataReady:
903 #return None, None
904 #return out_spectra, out_cspectra ,sat_spectra,sat_cspectra
905 823 return out_spectra, out_cspectra
906 824
907 825 def REM_ISOLATED_POINTS(self,array,rth):
908 826 # import matplotlib.pyplot as plt
909 827 if rth == None :
910 828 rth = 4
911 print("REM ISO")
829 #print("REM ISO")
912 830 num_prof = len(array[0,:,0])
913 831 num_hei = len(array[0,0,:])
914 832 n2d = len(array[:,0,0])
915 833
916 834 for ii in range(n2d) :
917 835 #print ii,n2d
918 836 tmp = array[ii,:,:]
919 837 #print tmp.shape, array[ii,101,:],array[ii,102,:]
920 838
921 839 # fig = plt.figure(figsize=(6,5))
922 840 # left, bottom, width, height = 0.1, 0.1, 0.8, 0.8
923 841 # ax = fig.add_axes([left, bottom, width, height])
924 842 # x = range(num_prof)
925 843 # y = range(num_hei)
926 844 # cp = ax.contour(y,x,tmp)
927 845 # ax.clabel(cp, inline=True,fontsize=10)
928 846 # plt.show()
929 847
930 848 #indxs = WHERE(FINITE(tmp) AND tmp GT 0,cindxs)
931 849 tmp = numpy.reshape(tmp,num_prof*num_hei)
932 850 indxs1 = (numpy.isfinite(tmp)==True).nonzero()
933 851 indxs2 = (tmp > 0).nonzero()
934 852
935 853 indxs1 = (indxs1[0])
936 854 indxs2 = indxs2[0]
937 855 #indxs1 = numpy.array(indxs1[0])
938 856 #indxs2 = numpy.array(indxs2[0])
939 857 indxs = None
940 858 #print indxs1 , indxs2
941 859 for iv in range(len(indxs2)):
942 860 indv = numpy.array((indxs1 == indxs2[iv]).nonzero())
943 861 #print len(indxs2), indv
944 862 if len(indv[0]) > 0 :
945 863 indxs = numpy.concatenate((indxs,indxs2[iv]), axis=None)
946 864 # print indxs
947 865 indxs = indxs[1:]
948 866 #print(indxs, len(indxs))
949 867 if len(indxs) < 4 :
950 868 array[ii,:,:] = 0.
951 869 return
952 870
953 871 xpos = numpy.mod(indxs ,num_hei)
954 872 ypos = (indxs / num_hei)
955 873 sx = numpy.argsort(xpos) # Ordering respect to "x" (time)
956 874 #print sx
957 875 xpos = xpos[sx]
958 876 ypos = ypos[sx]
959 877
960 878 # *********************************** Cleaning isolated points **********************************
961 879 ic = 0
962 880 while True :
963 881 r = numpy.sqrt(list(numpy.power((xpos[ic]-xpos),2)+ numpy.power((ypos[ic]-ypos),2)))
964 882 #no_coh = WHERE(FINITE(r) AND (r LE rth),cno_coh)
965 883 #plt.plot(r)
966 884 #plt.show()
967 885 no_coh1 = (numpy.isfinite(r)==True).nonzero()
968 886 no_coh2 = (r <= rth).nonzero()
969 887 #print r, no_coh1, no_coh2
970 888 no_coh1 = numpy.array(no_coh1[0])
971 889 no_coh2 = numpy.array(no_coh2[0])
972 890 no_coh = None
973 891 #print valid1 , valid2
974 892 for iv in range(len(no_coh2)):
975 893 indv = numpy.array((no_coh1 == no_coh2[iv]).nonzero())
976 894 if len(indv[0]) > 0 :
977 895 no_coh = numpy.concatenate((no_coh,no_coh2[iv]), axis=None)
978 896 no_coh = no_coh[1:]
979 897 #print len(no_coh), no_coh
980 898 if len(no_coh) < 4 :
981 899 #print xpos[ic], ypos[ic], ic
982 900 # plt.plot(r)
983 901 # plt.show()
984 902 xpos[ic] = numpy.nan
985 903 ypos[ic] = numpy.nan
986 904
987 905 ic = ic + 1
988 906 if (ic == len(indxs)) :
989 907 break
990 908 #print( xpos, ypos)
991 909
992 910 indxs = (numpy.isfinite(list(xpos))==True).nonzero()
993 911 #print indxs[0]
994 912 if len(indxs[0]) < 4 :
995 913 array[ii,:,:] = 0.
996 914 return
997 915
998 916 xpos = xpos[indxs[0]]
999 917 ypos = ypos[indxs[0]]
1000 918 for i in range(0,len(ypos)):
1001 919 ypos[i]=int(ypos[i])
1002 920 junk = tmp
1003 921 tmp = junk*0.0
1004 922
1005 923 tmp[list(xpos + (ypos*num_hei))] = junk[list(xpos + (ypos*num_hei))]
1006 924 array[ii,:,:] = numpy.reshape(tmp,(num_prof,num_hei))
1007 925
1008 926 #print array.shape
1009 927 #tmp = numpy.reshape(tmp,(num_prof,num_hei))
1010 928 #print tmp.shape
1011 929
1012 930 # fig = plt.figure(figsize=(6,5))
1013 931 # left, bottom, width, height = 0.1, 0.1, 0.8, 0.8
1014 932 # ax = fig.add_axes([left, bottom, width, height])
1015 933 # x = range(num_prof)
1016 934 # y = range(num_hei)
1017 935 # cp = ax.contour(y,x,array[ii,:,:])
1018 936 # ax.clabel(cp, inline=True,fontsize=10)
1019 937 # plt.show()
1020 938 return array
1021 939
1022 940 class removeInterference(Operation):
1023 941
1024 942 def removeInterference2(self):
1025 943
1026 944 cspc = self.dataOut.data_cspc
1027 945 spc = self.dataOut.data_spc
1028 946 Heights = numpy.arange(cspc.shape[2])
1029 947 realCspc = numpy.abs(cspc)
1030 948
1031 949 for i in range(cspc.shape[0]):
1032 950 LinePower= numpy.sum(realCspc[i], axis=0)
1033 951 Threshold = numpy.amax(LinePower)-numpy.sort(LinePower)[len(Heights)-int(len(Heights)*0.1)]
1034 952 SelectedHeights = Heights[ numpy.where( LinePower < Threshold ) ]
1035 953 InterferenceSum = numpy.sum( realCspc[i,:,SelectedHeights], axis=0 )
1036 954 InterferenceThresholdMin = numpy.sort(InterferenceSum)[int(len(InterferenceSum)*0.98)]
1037 955 InterferenceThresholdMax = numpy.sort(InterferenceSum)[int(len(InterferenceSum)*0.99)]
1038 956
1039 957
1040 958 InterferenceRange = numpy.where( ([InterferenceSum > InterferenceThresholdMin]))# , InterferenceSum < InterferenceThresholdMax]) )
1041 959 #InterferenceRange = numpy.where( ([InterferenceRange < InterferenceThresholdMax]))
1042 960 if len(InterferenceRange)<int(cspc.shape[1]*0.3):
1043 961 cspc[i,InterferenceRange,:] = numpy.NaN
1044 962
1045 963 self.dataOut.data_cspc = cspc
1046 964
1047 965 def removeInterference(self, interf = 2, hei_interf = None, nhei_interf = None, offhei_interf = None):
1048 966
1049 967 jspectra = self.dataOut.data_spc
1050 968 jcspectra = self.dataOut.data_cspc
1051 969 jnoise = self.dataOut.getNoise()
1052 970 num_incoh = self.dataOut.nIncohInt
1053 971
1054 972 num_channel = jspectra.shape[0]
1055 973 num_prof = jspectra.shape[1]
1056 974 num_hei = jspectra.shape[2]
1057 975
1058 976 # hei_interf
1059 977 if hei_interf is None:
1060 978 count_hei = int(num_hei / 2)
1061 979 hei_interf = numpy.asmatrix(list(range(count_hei))) + num_hei - count_hei
1062 980 hei_interf = numpy.asarray(hei_interf)[0]
1063 981 # nhei_interf
1064 982 if (nhei_interf == None):
1065 983 nhei_interf = 5
1066 984 if (nhei_interf < 1):
1067 985 nhei_interf = 1
1068 986 if (nhei_interf > count_hei):
1069 987 nhei_interf = count_hei
1070 988 if (offhei_interf == None):
1071 989 offhei_interf = 0
1072 990
1073 991 ind_hei = list(range(num_hei))
1074 992 # mask_prof = numpy.asarray(range(num_prof - 2)) + 1
1075 993 # mask_prof[range(num_prof/2 - 1,len(mask_prof))] += 1
1076 994 mask_prof = numpy.asarray(list(range(num_prof)))
1077 995 num_mask_prof = mask_prof.size
1078 996 comp_mask_prof = [0, num_prof / 2]
1079 997
1080 998 # noise_exist: Determina si la variable jnoise ha sido definida y contiene la informacion del ruido de cada canal
1081 999 if (jnoise.size < num_channel or numpy.isnan(jnoise).any()):
1082 1000 jnoise = numpy.nan
1083 1001 noise_exist = jnoise[0] < numpy.Inf
1084 1002
1085 1003 # Subrutina de Remocion de la Interferencia
1086 1004 for ich in range(num_channel):
1087 1005 # Se ordena los espectros segun su potencia (menor a mayor)
1088 1006 power = jspectra[ich, mask_prof, :]
1089 1007 power = power[:, hei_interf]
1090 1008 power = power.sum(axis=0)
1091 1009 psort = power.ravel().argsort()
1092 1010
1093 1011 # Se estima la interferencia promedio en los Espectros de Potencia empleando
1094 1012 junkspc_interf = jspectra[ich, :, hei_interf[psort[list(range(
1095 1013 offhei_interf, nhei_interf + offhei_interf))]]]
1096 1014
1097 1015 if noise_exist:
1098 1016 # tmp_noise = jnoise[ich] / num_prof
1099 1017 tmp_noise = jnoise[ich]
1100 1018 junkspc_interf = junkspc_interf - tmp_noise
1101 1019 #junkspc_interf[:,comp_mask_prof] = 0
1102 1020
1103 1021 jspc_interf = junkspc_interf.sum(axis=0) / nhei_interf
1104 1022 jspc_interf = jspc_interf.transpose()
1105 1023 # Calculando el espectro de interferencia promedio
1106 1024 noiseid = numpy.where(
1107 1025 jspc_interf <= tmp_noise / numpy.sqrt(num_incoh))
1108 1026 noiseid = noiseid[0]
1109 1027 cnoiseid = noiseid.size
1110 1028 interfid = numpy.where(
1111 1029 jspc_interf > tmp_noise / numpy.sqrt(num_incoh))
1112 1030 interfid = interfid[0]
1113 1031 cinterfid = interfid.size
1114 1032
1115 1033 if (cnoiseid > 0):
1116 1034 jspc_interf[noiseid] = 0
1117 1035
1118 1036 # Expandiendo los perfiles a limpiar
1119 1037 if (cinterfid > 0):
1120 1038 new_interfid = (
1121 1039 numpy.r_[interfid - 1, interfid, interfid + 1] + num_prof) % num_prof
1122 1040 new_interfid = numpy.asarray(new_interfid)
1123 1041 new_interfid = {x for x in new_interfid}
1124 1042 new_interfid = numpy.array(list(new_interfid))
1125 1043 new_cinterfid = new_interfid.size
1126 1044 else:
1127 1045 new_cinterfid = 0
1128 1046
1129 1047 for ip in range(new_cinterfid):
1130 1048 ind = junkspc_interf[:, new_interfid[ip]].ravel().argsort()
1131 1049 jspc_interf[new_interfid[ip]
1132 1050 ] = junkspc_interf[ind[nhei_interf // 2], new_interfid[ip]]
1133 1051
1134 1052 jspectra[ich, :, ind_hei] = jspectra[ich, :,
1135 1053 ind_hei] - jspc_interf # Corregir indices
1136 1054
1137 1055 # Removiendo la interferencia del punto de mayor interferencia
1138 1056 ListAux = jspc_interf[mask_prof].tolist()
1139 1057 maxid = ListAux.index(max(ListAux))
1140 1058
1141 1059 if cinterfid > 0:
1142 1060 for ip in range(cinterfid * (interf == 2) - 1):
1143 1061 ind = (jspectra[ich, interfid[ip], :] < tmp_noise *
1144 1062 (1 + 1 / numpy.sqrt(num_incoh))).nonzero()
1145 1063 cind = len(ind)
1146 1064
1147 1065 if (cind > 0):
1148 1066 jspectra[ich, interfid[ip], ind] = tmp_noise * \
1149 1067 (1 + (numpy.random.uniform(cind) - 0.5) /
1150 1068 numpy.sqrt(num_incoh))
1151 1069
1152 1070 ind = numpy.array([-2, -1, 1, 2])
1153 1071 xx = numpy.zeros([4, 4])
1154 1072
1155 1073 for id1 in range(4):
1156 1074 xx[:, id1] = ind[id1]**numpy.asarray(list(range(4)))
1157 1075
1158 1076 xx_inv = numpy.linalg.inv(xx)
1159 1077 xx = xx_inv[:, 0]
1160 1078 ind = (ind + maxid + num_mask_prof) % num_mask_prof
1161 1079 yy = jspectra[ich, mask_prof[ind], :]
1162 1080 jspectra[ich, mask_prof[maxid], :] = numpy.dot(
1163 1081 yy.transpose(), xx)
1164 1082
1165 1083 indAux = (jspectra[ich, :, :] < tmp_noise *
1166 1084 (1 - 1 / numpy.sqrt(num_incoh))).nonzero()
1167 1085 jspectra[ich, indAux[0], indAux[1]] = tmp_noise * \
1168 1086 (1 - 1 / numpy.sqrt(num_incoh))
1169 1087
1170 1088 # Remocion de Interferencia en el Cross Spectra
1171 1089 if jcspectra is None:
1172 1090 return jspectra, jcspectra
1173 1091 num_pairs = int(jcspectra.size / (num_prof * num_hei))
1174 1092 jcspectra = jcspectra.reshape(num_pairs, num_prof, num_hei)
1175 1093
1176 1094 for ip in range(num_pairs):
1177 1095
1178 1096 #-------------------------------------------
1179 1097
1180 1098 cspower = numpy.abs(jcspectra[ip, mask_prof, :])
1181 1099 cspower = cspower[:, hei_interf]
1182 1100 cspower = cspower.sum(axis=0)
1183 1101
1184 1102 cspsort = cspower.ravel().argsort()
1185 1103 junkcspc_interf = jcspectra[ip, :, hei_interf[cspsort[list(range(
1186 1104 offhei_interf, nhei_interf + offhei_interf))]]]
1187 1105 junkcspc_interf = junkcspc_interf.transpose()
1188 1106 jcspc_interf = junkcspc_interf.sum(axis=1) / nhei_interf
1189 1107
1190 1108 ind = numpy.abs(jcspc_interf[mask_prof]).ravel().argsort()
1191 1109
1192 1110 median_real = int(numpy.median(numpy.real(
1193 1111 junkcspc_interf[mask_prof[ind[list(range(3 * num_prof // 4))]], :])))
1194 1112 median_imag = int(numpy.median(numpy.imag(
1195 1113 junkcspc_interf[mask_prof[ind[list(range(3 * num_prof // 4))]], :])))
1196 1114 comp_mask_prof = [int(e) for e in comp_mask_prof]
1197 1115 junkcspc_interf[comp_mask_prof, :] = numpy.complex(
1198 1116 median_real, median_imag)
1199 1117
1200 1118 for iprof in range(num_prof):
1201 1119 ind = numpy.abs(junkcspc_interf[iprof, :]).ravel().argsort()
1202 1120 jcspc_interf[iprof] = junkcspc_interf[iprof, ind[nhei_interf // 2]]
1203 1121
1204 1122 # Removiendo la Interferencia
1205 1123 jcspectra[ip, :, ind_hei] = jcspectra[ip,
1206 1124 :, ind_hei] - jcspc_interf
1207 1125
1208 1126 ListAux = numpy.abs(jcspc_interf[mask_prof]).tolist()
1209 1127 maxid = ListAux.index(max(ListAux))
1210 1128
1211 1129 ind = numpy.array([-2, -1, 1, 2])
1212 1130 xx = numpy.zeros([4, 4])
1213 1131
1214 1132 for id1 in range(4):
1215 1133 xx[:, id1] = ind[id1]**numpy.asarray(list(range(4)))
1216 1134
1217 1135 xx_inv = numpy.linalg.inv(xx)
1218 1136 xx = xx_inv[:, 0]
1219 1137
1220 1138 ind = (ind + maxid + num_mask_prof) % num_mask_prof
1221 1139 yy = jcspectra[ip, mask_prof[ind], :]
1222 1140 jcspectra[ip, mask_prof[maxid], :] = numpy.dot(yy.transpose(), xx)
1223 1141
1224 1142 # Guardar Resultados
1225 1143 self.dataOut.data_spc = jspectra
1226 1144 self.dataOut.data_cspc = jcspectra
1227 1145
1228 1146 return 1
1229 1147
1230 1148 def run(self, dataOut, interf = 2,hei_interf = None, nhei_interf = None, offhei_interf = None, mode=1):
1231 1149
1232 1150 self.dataOut = dataOut
1233 1151
1234 1152 if mode == 1:
1235 1153 self.removeInterference(interf = 2,hei_interf = None, nhei_interf = None, offhei_interf = None)
1236 1154 elif mode == 2:
1237 1155 self.removeInterference2()
1238 1156
1239 1157 return self.dataOut
1240 1158
1241 1159
1242 1160 class IncohInt(Operation):
1243 1161
1244 1162 __profIndex = 0
1245 1163 __withOverapping = False
1246 1164
1247 1165 __byTime = False
1248 1166 __initime = None
1249 1167 __lastdatatime = None
1250 1168 __integrationtime = None
1251 1169
1252 1170 __buffer_spc = None
1253 1171 __buffer_cspc = None
1254 1172 __buffer_dc = None
1255 1173
1256 1174 __dataReady = False
1257 1175
1258 1176 __timeInterval = None
1259 1177
1260 1178 n = None
1261 1179
1262 1180 def __init__(self):
1263 1181
1264 1182 Operation.__init__(self)
1265 1183
1266 1184 def setup(self, n=None, timeInterval=None, overlapping=False):
1267 1185 """
1268 1186 Set the parameters of the integration class.
1269 1187
1270 1188 Inputs:
1271 1189
1272 1190 n : Number of coherent integrations
1273 1191 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
1274 1192 overlapping :
1275 1193
1276 1194 """
1277 1195
1278 1196 self.__initime = None
1279 1197 self.__lastdatatime = 0
1280 1198
1281 1199 self.__buffer_spc = 0
1282 1200 self.__buffer_cspc = 0
1283 1201 self.__buffer_dc = 0
1284 1202
1285 1203 self.__profIndex = 0
1286 1204 self.__dataReady = False
1287 1205 self.__byTime = False
1288 1206
1289 1207 if n is None and timeInterval is None:
1290 1208 raise ValueError("n or timeInterval should be specified ...")
1291 1209
1292 1210 if n is not None:
1293 1211 self.n = int(n)
1294 1212 else:
1295 1213
1296 1214 self.__integrationtime = int(timeInterval)
1297 1215 self.n = None
1298 1216 self.__byTime = True
1299 1217
1300 1218 def putData(self, data_spc, data_cspc, data_dc):
1301 1219 """
1302 1220 Add a profile to the __buffer_spc and increase in one the __profileIndex
1303 1221
1304 1222 """
1305 1223
1306 1224 self.__buffer_spc += data_spc
1307 1225
1308 1226 if data_cspc is None:
1309 1227 self.__buffer_cspc = None
1310 1228 else:
1311 1229 self.__buffer_cspc += data_cspc
1312 1230
1313 1231 if data_dc is None:
1314 1232 self.__buffer_dc = None
1315 1233 else:
1316 1234 self.__buffer_dc += data_dc
1317 1235
1318 1236 self.__profIndex += 1
1319 1237
1320 1238 return
1321 1239
1322 1240 def pushData(self):
1323 1241 """
1324 1242 Return the sum of the last profiles and the profiles used in the sum.
1325 1243
1326 1244 Affected:
1327 1245
1328 1246 self.__profileIndex
1329 1247
1330 1248 """
1331 1249
1332 1250 data_spc = self.__buffer_spc
1333 1251 data_cspc = self.__buffer_cspc
1334 1252 data_dc = self.__buffer_dc
1335 1253 n = self.__profIndex
1336 1254
1337 1255 self.__buffer_spc = 0
1338 1256 self.__buffer_cspc = 0
1339 1257 self.__buffer_dc = 0
1340 1258 self.__profIndex = 0
1341 1259
1342 1260 return data_spc, data_cspc, data_dc, n
1343 1261
1344 1262 def byProfiles(self, *args):
1345 1263
1346 1264 self.__dataReady = False
1347 1265 avgdata_spc = None
1348 1266 avgdata_cspc = None
1349 1267 avgdata_dc = None
1350 1268
1351 1269 self.putData(*args)
1352 1270
1353 1271 if self.__profIndex == self.n:
1354 1272
1355 1273 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1356 1274 self.n = n
1357 1275 self.__dataReady = True
1358 1276
1359 1277 return avgdata_spc, avgdata_cspc, avgdata_dc
1360 1278
1361 1279 def byTime(self, datatime, *args):
1362 1280
1363 1281 self.__dataReady = False
1364 1282 avgdata_spc = None
1365 1283 avgdata_cspc = None
1366 1284 avgdata_dc = None
1367 1285
1368 1286 self.putData(*args)
1369 1287
1370 1288 if (datatime - self.__initime) >= self.__integrationtime:
1371 1289 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1372 1290 self.n = n
1373 1291 self.__dataReady = True
1374 1292
1375 1293 return avgdata_spc, avgdata_cspc, avgdata_dc
1376 1294
1377 1295 def integrate(self, datatime, *args):
1378 1296
1379 1297 if self.__profIndex == 0:
1380 1298 self.__initime = datatime
1381 1299
1382 1300 if self.__byTime:
1383 1301 avgdata_spc, avgdata_cspc, avgdata_dc = self.byTime(
1384 1302 datatime, *args)
1385 1303 else:
1386 1304 avgdata_spc, avgdata_cspc, avgdata_dc = self.byProfiles(*args)
1387 1305
1388 1306 if not self.__dataReady:
1389 1307 return None, None, None, None
1390 1308
1391 1309 return self.__initime, avgdata_spc, avgdata_cspc, avgdata_dc
1392 1310
1393 1311 def run(self, dataOut, n=None, timeInterval=None, overlapping=False):
1394 1312 if n == 1:
1395 1313 return dataOut
1396 1314
1397 1315 dataOut.flagNoData = True
1398 1316
1399 1317 if not self.isConfig:
1400 1318 self.setup(n, timeInterval, overlapping)
1401 1319 self.isConfig = True
1402 1320
1403 1321 avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc = self.integrate(dataOut.utctime,
1404 1322 dataOut.data_spc,
1405 1323 dataOut.data_cspc,
1406 1324 dataOut.data_dc)
1407 1325
1408 1326 if self.__dataReady:
1409 1327
1410 1328 dataOut.data_spc = avgdata_spc
1411 1329 dataOut.data_cspc = avgdata_cspc
1412 1330 dataOut.data_dc = avgdata_dc
1413 1331 dataOut.nIncohInt *= self.n
1414 1332 dataOut.utctime = avgdatatime
1415 1333 dataOut.flagNoData = False
1416 1334
1417 1335 return dataOut
1418 1336
1419 1337 class dopplerFlip(Operation):
1420 1338
1421 1339 def run(self, dataOut):
1422 1340 # arreglo 1: (num_chan, num_profiles, num_heights)
1423 1341 self.dataOut = dataOut
1424 1342 # JULIA-oblicua, indice 2
1425 1343 # arreglo 2: (num_profiles, num_heights)
1426 1344 jspectra = self.dataOut.data_spc[2]
1427 1345 jspectra_tmp = numpy.zeros(jspectra.shape)
1428 1346 num_profiles = jspectra.shape[0]
1429 1347 freq_dc = int(num_profiles / 2)
1430 1348 # Flip con for
1431 1349 for j in range(num_profiles):
1432 1350 jspectra_tmp[num_profiles-j-1]= jspectra[j]
1433 1351 # Intercambio perfil de DC con perfil inmediato anterior
1434 1352 jspectra_tmp[freq_dc-1]= jspectra[freq_dc-1]
1435 1353 jspectra_tmp[freq_dc]= jspectra[freq_dc]
1436 1354 # canal modificado es re-escrito en el arreglo de canales
1437 1355 self.dataOut.data_spc[2] = jspectra_tmp
1438 1356
1439 1357 return self.dataOut
@@ -1,1624 +1,1622
1 1 import sys
2 2 import numpy,math
3 3 from scipy import interpolate
4 4 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
5 5 from schainpy.model.data.jrodata import Voltage,hildebrand_sekhon
6 6 from schainpy.utils import log
7 7 from time import time
8 8
9 9
10 10
11 11 class VoltageProc(ProcessingUnit):
12 12
13 13 def __init__(self):
14 14
15 15 ProcessingUnit.__init__(self)
16 16
17 17 self.dataOut = Voltage()
18 18 self.flip = 1
19 19 self.setupReq = False
20 20
21 21 def run(self):
22 22
23 23 if self.dataIn.type == 'AMISR':
24 24 self.__updateObjFromAmisrInput()
25 25
26 26 if self.dataIn.type == 'Voltage':
27 27 self.dataOut.copy(self.dataIn)
28 28
29 29 def __updateObjFromAmisrInput(self):
30 30
31 31 self.dataOut.timeZone = self.dataIn.timeZone
32 32 self.dataOut.dstFlag = self.dataIn.dstFlag
33 33 self.dataOut.errorCount = self.dataIn.errorCount
34 34 self.dataOut.useLocalTime = self.dataIn.useLocalTime
35 35
36 36 self.dataOut.flagNoData = self.dataIn.flagNoData
37 37 self.dataOut.data = self.dataIn.data
38 38 self.dataOut.utctime = self.dataIn.utctime
39 39 self.dataOut.channelList = self.dataIn.channelList
40 40 #self.dataOut.timeInterval = self.dataIn.timeInterval
41 41 self.dataOut.heightList = self.dataIn.heightList
42 42 self.dataOut.nProfiles = self.dataIn.nProfiles
43 43
44 44 self.dataOut.nCohInt = self.dataIn.nCohInt
45 45 self.dataOut.ippSeconds = self.dataIn.ippSeconds
46 46 self.dataOut.frequency = self.dataIn.frequency
47 47
48 48 self.dataOut.azimuth = self.dataIn.azimuth
49 49 self.dataOut.zenith = self.dataIn.zenith
50 50
51 51 self.dataOut.beam.codeList = self.dataIn.beam.codeList
52 52 self.dataOut.beam.azimuthList = self.dataIn.beam.azimuthList
53 53 self.dataOut.beam.zenithList = self.dataIn.beam.zenithList
54 54
55 55
56 56 class selectChannels(Operation):
57 57
58 58 def run(self, dataOut, channelList):
59 59
60 60 channelIndexList = []
61 61 self.dataOut = dataOut
62 62 for channel in channelList:
63 63 if channel not in self.dataOut.channelList:
64 64 raise ValueError("Channel %d is not in %s" %(channel, str(self.dataOut.channelList)))
65 65
66 66 index = self.dataOut.channelList.index(channel)
67 67 channelIndexList.append(index)
68 68 self.selectChannelsByIndex(channelIndexList)
69 69 return self.dataOut
70 70
71 71 def selectChannelsByIndex(self, channelIndexList):
72 72 """
73 73 Selecciona un bloque de datos en base a canales segun el channelIndexList
74 74
75 75 Input:
76 76 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
77 77
78 78 Affected:
79 79 self.dataOut.data
80 80 self.dataOut.channelIndexList
81 81 self.dataOut.nChannels
82 82 self.dataOut.m_ProcessingHeader.totalSpectra
83 83 self.dataOut.systemHeaderObj.numChannels
84 84 self.dataOut.m_ProcessingHeader.blockSize
85 85
86 86 Return:
87 87 None
88 88 """
89 89
90 90 for channelIndex in channelIndexList:
91 91 if channelIndex not in self.dataOut.channelIndexList:
92 92 raise ValueError("The value %d in channelIndexList is not valid" %channelIndex)
93 93
94 94 if self.dataOut.type == 'Voltage':
95 95 if self.dataOut.flagDataAsBlock:
96 96 """
97 97 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
98 98 """
99 99 data = self.dataOut.data[channelIndexList,:,:]
100 100 else:
101 101 data = self.dataOut.data[channelIndexList,:]
102 102
103 103 self.dataOut.data = data
104 104 # self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
105 105 self.dataOut.channelList = range(len(channelIndexList))
106 106
107 107 elif self.dataOut.type == 'Spectra':
108 108 data_spc = self.dataOut.data_spc[channelIndexList, :]
109 109 data_dc = self.dataOut.data_dc[channelIndexList, :]
110 110
111 111 self.dataOut.data_spc = data_spc
112 112 self.dataOut.data_dc = data_dc
113 113
114 114 # self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
115 115 self.dataOut.channelList = channelIndexList
116 116 self.__selectPairsByChannel(channelIndexList)
117 117
118 118 return 1
119 119
120 120 def __selectPairsByChannel(self, channelList=None):
121 121
122 122 if channelList == None:
123 123 return
124 124
125 125 pairsIndexListSelected = []
126 126 for pairIndex in self.dataOut.pairsIndexList:
127 127 # First pair
128 128 if self.dataOut.pairsList[pairIndex][0] not in channelList:
129 129 continue
130 130 # Second pair
131 131 if self.dataOut.pairsList[pairIndex][1] not in channelList:
132 132 continue
133 133
134 134 pairsIndexListSelected.append(pairIndex)
135 135
136 136 if not pairsIndexListSelected:
137 137 self.dataOut.data_cspc = None
138 138 self.dataOut.pairsList = []
139 139 return
140 140
141 141 self.dataOut.data_cspc = self.dataOut.data_cspc[pairsIndexListSelected]
142 142 self.dataOut.pairsList = [self.dataOut.pairsList[i]
143 143 for i in pairsIndexListSelected]
144 144
145 145 return
146 146
147 147 class selectHeights(Operation):
148 148
149 149 def run(self, dataOut, minHei=None, maxHei=None, minIndex=None, maxIndex=None):
150 150 """
151 151 Selecciona un bloque de datos en base a un grupo de valores de alturas segun el rango
152 152 minHei <= height <= maxHei
153 153
154 154 Input:
155 155 minHei : valor minimo de altura a considerar
156 156 maxHei : valor maximo de altura a considerar
157 157
158 158 Affected:
159 159 Indirectamente son cambiados varios valores a travez del metodo selectHeightsByIndex
160 160
161 161 Return:
162 162 1 si el metodo se ejecuto con exito caso contrario devuelve 0
163 163 """
164 164
165 165 self.dataOut = dataOut
166 166
167 167 if minHei and maxHei:
168 168
169 169 if (minHei < self.dataOut.heightList[0]):
170 170 minHei = self.dataOut.heightList[0]
171 171
172 172 if (maxHei > self.dataOut.heightList[-1]):
173 173 maxHei = self.dataOut.heightList[-1]
174 174
175 175 minIndex = 0
176 176 maxIndex = 0
177 177 heights = self.dataOut.heightList
178 178
179 179 inda = numpy.where(heights >= minHei)
180 180 indb = numpy.where(heights <= maxHei)
181 181
182 182 try:
183 183 minIndex = inda[0][0]
184 184 except:
185 185 minIndex = 0
186 186
187 187 try:
188 188 maxIndex = indb[0][-1]
189 189 except:
190 190 maxIndex = len(heights)
191 191
192 192 self.selectHeightsByIndex(minIndex, maxIndex)
193 193
194 194 return self.dataOut
195 195
196 196 def selectHeightsByIndex(self, minIndex, maxIndex):
197 197 """
198 198 Selecciona un bloque de datos en base a un grupo indices de alturas segun el rango
199 199 minIndex <= index <= maxIndex
200 200
201 201 Input:
202 202 minIndex : valor de indice minimo de altura a considerar
203 203 maxIndex : valor de indice maximo de altura a considerar
204 204
205 205 Affected:
206 206 self.dataOut.data
207 207 self.dataOut.heightList
208 208
209 209 Return:
210 210 1 si el metodo se ejecuto con exito caso contrario devuelve 0
211 211 """
212 212
213 213 if self.dataOut.type == 'Voltage':
214 214 if (minIndex < 0) or (minIndex > maxIndex):
215 215 raise ValueError("Height index range (%d,%d) is not valid" % (minIndex, maxIndex))
216 216
217 217 if (maxIndex >= self.dataOut.nHeights):
218 218 maxIndex = self.dataOut.nHeights
219 219
220 220 #voltage
221 221 if self.dataOut.flagDataAsBlock:
222 222 """
223 223 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
224 224 """
225 225 data = self.dataOut.data[:,:, minIndex:maxIndex]
226 226 else:
227 227 data = self.dataOut.data[:, minIndex:maxIndex]
228 228
229 229 # firstHeight = self.dataOut.heightList[minIndex]
230 230
231 231 self.dataOut.data = data
232 232 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex]
233 233
234 234 if self.dataOut.nHeights <= 1:
235 235 raise ValueError("selectHeights: Too few heights. Current number of heights is %d" %(self.dataOut.nHeights))
236 236 elif self.dataOut.type == 'Spectra':
237 237 if (minIndex < 0) or (minIndex > maxIndex):
238 238 raise ValueError("Error selecting heights: Index range (%d,%d) is not valid" % (
239 239 minIndex, maxIndex))
240 240
241 241 if (maxIndex >= self.dataOut.nHeights):
242 242 maxIndex = self.dataOut.nHeights - 1
243 243
244 244 # Spectra
245 245 data_spc = self.dataOut.data_spc[:, :, minIndex:maxIndex + 1]
246 246
247 247 data_cspc = None
248 248 if self.dataOut.data_cspc is not None:
249 249 data_cspc = self.dataOut.data_cspc[:, :, minIndex:maxIndex + 1]
250 250
251 251 data_dc = None
252 252 if self.dataOut.data_dc is not None:
253 253 data_dc = self.dataOut.data_dc[:, minIndex:maxIndex + 1]
254 254
255 255 self.dataOut.data_spc = data_spc
256 256 self.dataOut.data_cspc = data_cspc
257 257 self.dataOut.data_dc = data_dc
258 258
259 259 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex + 1]
260 260
261 261 return 1
262 262
263 263
264 264 class filterByHeights(Operation):
265 265
266 266 def run(self, dataOut, window):
267 267
268 268 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
269 269
270 270 if window == None:
271 271 window = (dataOut.radarControllerHeaderObj.txA/dataOut.radarControllerHeaderObj.nBaud) / deltaHeight
272 272
273 273 newdelta = deltaHeight * window
274 274 r = dataOut.nHeights % window
275 275 newheights = (dataOut.nHeights-r)/window
276 276
277 277 if newheights <= 1:
278 278 raise ValueError("filterByHeights: Too few heights. Current number of heights is %d and window is %d" %(dataOut.nHeights, window))
279 279
280 280 if dataOut.flagDataAsBlock:
281 281 """
282 282 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
283 283 """
284 284 buffer = dataOut.data[:, :, 0:int(dataOut.nHeights-r)]
285 285 buffer = buffer.reshape(dataOut.nChannels, dataOut.nProfiles, int(dataOut.nHeights/window), window)
286 286 buffer = numpy.sum(buffer,3)
287 287
288 288 else:
289 289 buffer = dataOut.data[:,0:int(dataOut.nHeights-r)]
290 290 buffer = buffer.reshape(dataOut.nChannels,int(dataOut.nHeights/window),int(window))
291 291 buffer = numpy.sum(buffer,2)
292 292
293 293 dataOut.data = buffer
294 294 dataOut.heightList = dataOut.heightList[0] + numpy.arange( newheights )*newdelta
295 295 dataOut.windowOfFilter = window
296 296
297 297 return dataOut
298 298
299 299
300 300 class setH0(Operation):
301 301
302 302 def run(self, dataOut, h0, deltaHeight = None):
303 303
304 304 if not deltaHeight:
305 305 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
306 306
307 307 nHeights = dataOut.nHeights
308 308
309 309 newHeiRange = h0 + numpy.arange(nHeights)*deltaHeight
310 310
311 311 dataOut.heightList = newHeiRange
312 312
313 313 return dataOut
314 314
315 315
316 316 class deFlip(Operation):
317 317
318 318 def run(self, dataOut, channelList = []):
319 319
320 320 data = dataOut.data.copy()
321 321
322 322 if dataOut.flagDataAsBlock:
323 323 flip = self.flip
324 324 profileList = list(range(dataOut.nProfiles))
325 325
326 326 if not channelList:
327 327 for thisProfile in profileList:
328 328 data[:,thisProfile,:] = data[:,thisProfile,:]*flip
329 329 flip *= -1.0
330 330 else:
331 331 for thisChannel in channelList:
332 332 if thisChannel not in dataOut.channelList:
333 333 continue
334 334
335 335 for thisProfile in profileList:
336 336 data[thisChannel,thisProfile,:] = data[thisChannel,thisProfile,:]*flip
337 337 flip *= -1.0
338 338
339 339 self.flip = flip
340 340
341 341 else:
342 342 if not channelList:
343 343 data[:,:] = data[:,:]*self.flip
344 344 else:
345 345 for thisChannel in channelList:
346 346 if thisChannel not in dataOut.channelList:
347 347 continue
348 348
349 349 data[thisChannel,:] = data[thisChannel,:]*self.flip
350 350
351 351 self.flip *= -1.
352 352
353 353 dataOut.data = data
354 354
355 355 return dataOut
356 356
357 357
358 358 class setAttribute(Operation):
359 359 '''
360 360 Set an arbitrary attribute(s) to dataOut
361 361 '''
362 362
363 363 def __init__(self):
364 364
365 365 Operation.__init__(self)
366 366 self._ready = False
367 367
368 368 def run(self, dataOut, **kwargs):
369 369
370 370 for key, value in kwargs.items():
371 371 setattr(dataOut, key, value)
372 372
373 373 return dataOut
374 374
375 375
376 376 @MPDecorator
377 377 class printAttribute(Operation):
378 378 '''
379 379 Print an arbitrary attribute of dataOut
380 380 '''
381 381
382 382 def __init__(self):
383 383
384 384 Operation.__init__(self)
385 385
386 386 def run(self, dataOut, attributes):
387 387
388 388 if isinstance(attributes, str):
389 389 attributes = [attributes]
390 390 for attr in attributes:
391 391 if hasattr(dataOut, attr):
392 392 log.log(getattr(dataOut, attr), attr)
393 393
394 394
395 395 class interpolateHeights(Operation):
396 396
397 397 def run(self, dataOut, topLim, botLim):
398 398 #69 al 72 para julia
399 399 #82-84 para meteoros
400 400 if len(numpy.shape(dataOut.data))==2:
401 401 sampInterp = (dataOut.data[:,botLim-1] + dataOut.data[:,topLim+1])/2
402 402 sampInterp = numpy.transpose(numpy.tile(sampInterp,(topLim-botLim + 1,1)))
403 403 #dataOut.data[:,botLim:limSup+1] = sampInterp
404 404 dataOut.data[:,botLim:topLim+1] = sampInterp
405 405 else:
406 406 nHeights = dataOut.data.shape[2]
407 407 x = numpy.hstack((numpy.arange(botLim),numpy.arange(topLim+1,nHeights)))
408 408 y = dataOut.data[:,:,list(range(botLim))+list(range(topLim+1,nHeights))]
409 409 f = interpolate.interp1d(x, y, axis = 2)
410 410 xnew = numpy.arange(botLim,topLim+1)
411 411 ynew = f(xnew)
412 412 dataOut.data[:,:,botLim:topLim+1] = ynew
413 413
414 414 return dataOut
415 415
416 416
417 417 class CohInt(Operation):
418 418
419 419 isConfig = False
420 420 __profIndex = 0
421 421 __byTime = False
422 422 __initime = None
423 423 __lastdatatime = None
424 424 __integrationtime = None
425 425 __buffer = None
426 426 __bufferStride = []
427 427 __dataReady = False
428 428 __profIndexStride = 0
429 429 __dataToPutStride = False
430 430 n = None
431 431
432 432 def __init__(self, **kwargs):
433 433
434 434 Operation.__init__(self, **kwargs)
435 435
436 436 def setup(self, n=None, timeInterval=None, stride=None, overlapping=False, byblock=False):
437 437 """
438 438 Set the parameters of the integration class.
439 439
440 440 Inputs:
441 441
442 442 n : Number of coherent integrations
443 443 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
444 444 overlapping :
445 445 """
446 446
447 447 self.__initime = None
448 448 self.__lastdatatime = 0
449 449 self.__buffer = None
450 450 self.__dataReady = False
451 451 self.byblock = byblock
452 452 self.stride = stride
453 453
454 454 if n == None and timeInterval == None:
455 455 raise ValueError("n or timeInterval should be specified ...")
456 456
457 457 if n != None:
458 458 self.n = n
459 459 self.__byTime = False
460 460 else:
461 461 self.__integrationtime = timeInterval #* 60. #if (type(timeInterval)!=integer) -> change this line
462 462 self.n = 9999
463 463 self.__byTime = True
464 464
465 465 if overlapping:
466 466 self.__withOverlapping = True
467 467 self.__buffer = None
468 468 else:
469 469 self.__withOverlapping = False
470 470 self.__buffer = 0
471 471
472 472 self.__profIndex = 0
473 473
474 474 def putData(self, data):
475 475
476 476 """
477 477 Add a profile to the __buffer and increase in one the __profileIndex
478 478
479 479 """
480 480
481 481 if not self.__withOverlapping:
482 482 self.__buffer += data.copy()
483 483 self.__profIndex += 1
484 484 return
485 485
486 486 #Overlapping data
487 487 nChannels, nHeis = data.shape
488 488 data = numpy.reshape(data, (1, nChannels, nHeis))
489 489
490 490 #If the buffer is empty then it takes the data value
491 491 if self.__buffer is None:
492 492 self.__buffer = data
493 493 self.__profIndex += 1
494 494 return
495 495
496 496 #If the buffer length is lower than n then stakcing the data value
497 497 if self.__profIndex < self.n:
498 498 self.__buffer = numpy.vstack((self.__buffer, data))
499 499 self.__profIndex += 1
500 500 return
501 501
502 502 #If the buffer length is equal to n then replacing the last buffer value with the data value
503 503 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
504 504 self.__buffer[self.n-1] = data
505 505 self.__profIndex = self.n
506 506 return
507 507
508 508
509 509 def pushData(self):
510 510 """
511 511 Return the sum of the last profiles and the profiles used in the sum.
512 512
513 513 Affected:
514 514
515 515 self.__profileIndex
516 516
517 517 """
518 518
519 519 if not self.__withOverlapping:
520 520 data = self.__buffer
521 521 n = self.__profIndex
522 522
523 523 self.__buffer = 0
524 524 self.__profIndex = 0
525 525
526 526 return data, n
527 527
528 528 #Integration with Overlapping
529 529 data = numpy.sum(self.__buffer, axis=0)
530 530 # print data
531 531 # raise
532 532 n = self.__profIndex
533 533
534 534 return data, n
535 535
536 536 def byProfiles(self, data):
537 537
538 538 self.__dataReady = False
539 539 avgdata = None
540 540 # n = None
541 541 # print data
542 542 # raise
543 543 self.putData(data)
544 544
545 545 if self.__profIndex == self.n:
546 546 avgdata, n = self.pushData()
547 547 self.__dataReady = True
548 548
549 549 return avgdata
550 550
551 551 def byTime(self, data, datatime):
552 552
553 553 self.__dataReady = False
554 554 avgdata = None
555 555 n = None
556 556
557 557 self.putData(data)
558 558
559 559 if (datatime - self.__initime) >= self.__integrationtime:
560 560 avgdata, n = self.pushData()
561 561 self.n = n
562 562 self.__dataReady = True
563 563
564 564 return avgdata
565 565
566 566 def integrateByStride(self, data, datatime):
567 567 # print data
568 568 if self.__profIndex == 0:
569 569 self.__buffer = [[data.copy(), datatime]]
570 570 else:
571 571 self.__buffer.append([data.copy(),datatime])
572 572 self.__profIndex += 1
573 573 self.__dataReady = False
574 574
575 575 if self.__profIndex == self.n * self.stride :
576 576 self.__dataToPutStride = True
577 577 self.__profIndexStride = 0
578 578 self.__profIndex = 0
579 579 self.__bufferStride = []
580 580 for i in range(self.stride):
581 581 current = self.__buffer[i::self.stride]
582 582 data = numpy.sum([t[0] for t in current], axis=0)
583 583 avgdatatime = numpy.average([t[1] for t in current])
584 584 # print data
585 585 self.__bufferStride.append((data, avgdatatime))
586 586
587 587 if self.__dataToPutStride:
588 588 self.__dataReady = True
589 589 self.__profIndexStride += 1
590 590 if self.__profIndexStride == self.stride:
591 591 self.__dataToPutStride = False
592 592 # print self.__bufferStride[self.__profIndexStride - 1]
593 593 # raise
594 594 return self.__bufferStride[self.__profIndexStride - 1]
595 595
596 596
597 597 return None, None
598 598
599 599 def integrate(self, data, datatime=None):
600 600
601 601 if self.__initime == None:
602 602 self.__initime = datatime
603 603
604 604 if self.__byTime:
605 605 avgdata = self.byTime(data, datatime)
606 606 else:
607 607 avgdata = self.byProfiles(data)
608 608
609 609
610 610 self.__lastdatatime = datatime
611 611
612 612 if avgdata is None:
613 613 return None, None
614 614
615 615 avgdatatime = self.__initime
616 616
617 617 deltatime = datatime - self.__lastdatatime
618 618
619 619 if not self.__withOverlapping:
620 620 self.__initime = datatime
621 621 else:
622 622 self.__initime += deltatime
623 623
624 624 return avgdata, avgdatatime
625 625
626 626 def integrateByBlock(self, dataOut):
627 627
628 628 times = int(dataOut.data.shape[1]/self.n)
629 629 avgdata = numpy.zeros((dataOut.nChannels, times, dataOut.nHeights), dtype=numpy.complex)
630 630
631 631 id_min = 0
632 632 id_max = self.n
633 633
634 634 for i in range(times):
635 635 junk = dataOut.data[:,id_min:id_max,:]
636 636 avgdata[:,i,:] = junk.sum(axis=1)
637 637 id_min += self.n
638 638 id_max += self.n
639 639
640 640 timeInterval = dataOut.ippSeconds*self.n
641 641 avgdatatime = (times - 1) * timeInterval + dataOut.utctime
642 642 self.__dataReady = True
643 643 return avgdata, avgdatatime
644 644
645 645 def run(self, dataOut, n=None, timeInterval=None, stride=None, overlapping=False, byblock=False, **kwargs):
646 646
647 647 if not self.isConfig:
648 648 self.setup(n=n, stride=stride, timeInterval=timeInterval, overlapping=overlapping, byblock=byblock, **kwargs)
649 649 self.isConfig = True
650 650
651 651 if dataOut.flagDataAsBlock:
652 652 """
653 653 Si la data es leida por bloques, dimension = [nChannels, nProfiles, nHeis]
654 654 """
655 655 avgdata, avgdatatime = self.integrateByBlock(dataOut)
656 656 dataOut.nProfiles /= self.n
657 657 else:
658 658 if stride is None:
659 659 avgdata, avgdatatime = self.integrate(dataOut.data, dataOut.utctime)
660 660 else:
661 661 avgdata, avgdatatime = self.integrateByStride(dataOut.data, dataOut.utctime)
662 662
663 663
664 664 # dataOut.timeInterval *= n
665 665 dataOut.flagNoData = True
666 666
667 667 if self.__dataReady:
668 668 dataOut.data = avgdata
669 669 if not dataOut.flagCohInt:
670 670 dataOut.nCohInt *= self.n
671 671 dataOut.flagCohInt = True
672 672 dataOut.utctime = avgdatatime
673 673 # print avgdata, avgdatatime
674 674 # raise
675 675 # dataOut.timeInterval = dataOut.ippSeconds * dataOut.nCohInt
676 676 dataOut.flagNoData = False
677 677 return dataOut
678 678
679 679 class Decoder(Operation):
680 680
681 681 isConfig = False
682 682 __profIndex = 0
683 683
684 684 code = None
685 685
686 686 nCode = None
687 687 nBaud = None
688 688
689 689 def __init__(self, **kwargs):
690 690
691 691 Operation.__init__(self, **kwargs)
692 692
693 693 self.times = None
694 694 self.osamp = None
695 695 # self.__setValues = False
696 696 self.isConfig = False
697 697 self.setupReq = False
698 698 def setup(self, code, osamp, dataOut):
699 699
700 700 self.__profIndex = 0
701 701
702 702 self.code = code
703 703
704 704 self.nCode = len(code)
705 705 self.nBaud = len(code[0])
706 706 if (osamp != None) and (osamp >1):
707 707 self.osamp = osamp
708 708 self.code = numpy.repeat(code, repeats=self.osamp, axis=1)
709 709 self.nBaud = self.nBaud*self.osamp
710 710
711 711 self.__nChannels = dataOut.nChannels
712 712 self.__nProfiles = dataOut.nProfiles
713 713 self.__nHeis = dataOut.nHeights
714 714
715 715 if self.__nHeis < self.nBaud:
716 716 raise ValueError('Number of heights (%d) should be greater than number of bauds (%d)' %(self.__nHeis, self.nBaud))
717 717
718 718 #Frequency
719 719 __codeBuffer = numpy.zeros((self.nCode, self.__nHeis), dtype=numpy.complex)
720 720
721 721 __codeBuffer[:,0:self.nBaud] = self.code
722 722
723 723 self.fft_code = numpy.conj(numpy.fft.fft(__codeBuffer, axis=1))
724 724
725 725 if dataOut.flagDataAsBlock:
726 726
727 727 self.ndatadec = self.__nHeis #- self.nBaud + 1
728 728
729 729 self.datadecTime = numpy.zeros((self.__nChannels, self.__nProfiles, self.ndatadec), dtype=numpy.complex)
730 730
731 731 else:
732 732
733 733 #Time
734 734 self.ndatadec = self.__nHeis #- self.nBaud + 1
735 735
736 736 self.datadecTime = numpy.zeros((self.__nChannels, self.ndatadec), dtype=numpy.complex)
737 737
738 738 def __convolutionInFreq(self, data):
739 739
740 740 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
741 741
742 742 fft_data = numpy.fft.fft(data, axis=1)
743 743
744 744 conv = fft_data*fft_code
745 745
746 746 data = numpy.fft.ifft(conv,axis=1)
747 747
748 748 return data
749 749
750 750 def __convolutionInFreqOpt(self, data):
751 751
752 752 raise NotImplementedError
753 753
754 754 def __convolutionInTime(self, data):
755 755
756 756 code = self.code[self.__profIndex]
757 757 for i in range(self.__nChannels):
758 758 self.datadecTime[i,:] = numpy.correlate(data[i,:], code, mode='full')[self.nBaud-1:]
759 759
760 760 return self.datadecTime
761 761
762 762 def __convolutionByBlockInTime(self, data):
763 763
764 764 repetitions = int(self.__nProfiles / self.nCode)
765 765 junk = numpy.lib.stride_tricks.as_strided(self.code, (repetitions, self.code.size), (0, self.code.itemsize))
766 766 junk = junk.flatten()
767 767 code_block = numpy.reshape(junk, (self.nCode*repetitions, self.nBaud))
768 768 profilesList = range(self.__nProfiles)
769 769
770 770 for i in range(self.__nChannels):
771 771 for j in profilesList:
772 772 self.datadecTime[i,j,:] = numpy.correlate(data[i,j,:], code_block[j,:], mode='full')[self.nBaud-1:]
773 773 return self.datadecTime
774 774
775 775 def __convolutionByBlockInFreq(self, data):
776 776
777 777 raise NotImplementedError("Decoder by frequency fro Blocks not implemented")
778 778
779 779
780 780 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
781 781
782 782 fft_data = numpy.fft.fft(data, axis=2)
783 783
784 784 conv = fft_data*fft_code
785 785
786 786 data = numpy.fft.ifft(conv,axis=2)
787 787
788 788 return data
789 789
790 790
791 791 def run(self, dataOut, code=None, nCode=None, nBaud=None, mode = 0, osamp=None, times=None):
792 792
793 793 if dataOut.flagDecodeData:
794 794 print("This data is already decoded, recoding again ...")
795 795
796 796 if not self.isConfig:
797 797
798 798 if code is None:
799 799 if dataOut.code is None:
800 800 raise ValueError("Code could not be read from %s instance. Enter a value in Code parameter" %dataOut.type)
801 801
802 802 code = dataOut.code
803 803 else:
804 804 code = numpy.array(code).reshape(nCode,nBaud)
805 805 self.setup(code, osamp, dataOut)
806 806
807 807 self.isConfig = True
808 808
809 809 if mode == 3:
810 810 sys.stderr.write("Decoder Warning: mode=%d is not valid, using mode=0\n" %mode)
811 811
812 812 if times != None:
813 813 sys.stderr.write("Decoder Warning: Argument 'times' in not used anymore\n")
814 814
815 815 if self.code is None:
816 816 print("Fail decoding: Code is not defined.")
817 817 return
818 818
819 819 self.__nProfiles = dataOut.nProfiles
820 820 datadec = None
821 821
822 822 if mode == 3:
823 823 mode = 0
824 824
825 825 if dataOut.flagDataAsBlock:
826 826 """
827 827 Decoding when data have been read as block,
828 828 """
829 829
830 830 if mode == 0:
831 831 datadec = self.__convolutionByBlockInTime(dataOut.data)
832 832 if mode == 1:
833 833 datadec = self.__convolutionByBlockInFreq(dataOut.data)
834 834 else:
835 835 """
836 836 Decoding when data have been read profile by profile
837 837 """
838 838 if mode == 0:
839 839 datadec = self.__convolutionInTime(dataOut.data)
840 840
841 841 if mode == 1:
842 842 datadec = self.__convolutionInFreq(dataOut.data)
843 843
844 844 if mode == 2:
845 845 datadec = self.__convolutionInFreqOpt(dataOut.data)
846 846
847 847 if datadec is None:
848 848 raise ValueError("Codification mode selected is not valid: mode=%d. Try selecting 0 or 1" %mode)
849 849
850 850 dataOut.code = self.code
851 851 dataOut.nCode = self.nCode
852 852 dataOut.nBaud = self.nBaud
853 853
854 854 dataOut.data = datadec
855 855
856 856 dataOut.heightList = dataOut.heightList[0:datadec.shape[-1]]
857 857
858 858 dataOut.flagDecodeData = True #asumo q la data esta decodificada
859 859
860 860 if self.__profIndex == self.nCode-1:
861 861 self.__profIndex = 0
862 862 return dataOut
863 863
864 864 self.__profIndex += 1
865 865
866 866 return dataOut
867 867 # dataOut.flagDeflipData = True #asumo q la data no esta sin flip
868 868
869 869
870 870 class ProfileConcat(Operation):
871 871
872 872 isConfig = False
873 873 buffer = None
874 874
875 875 def __init__(self, **kwargs):
876 876
877 877 Operation.__init__(self, **kwargs)
878 878 self.profileIndex = 0
879 879
880 880 def reset(self):
881 881 self.buffer = numpy.zeros_like(self.buffer)
882 882 self.start_index = 0
883 883 self.times = 1
884 884
885 885 def setup(self, data, m, n=1):
886 886 self.buffer = numpy.zeros((data.shape[0],data.shape[1]*m),dtype=type(data[0,0]))
887 887 self.nHeights = data.shape[1]#.nHeights
888 888 self.start_index = 0
889 889 self.times = 1
890 890
891 891 def concat(self, data):
892 892
893 893 self.buffer[:,self.start_index:self.nHeights*self.times] = data.copy()
894 894 self.start_index = self.start_index + self.nHeights
895 895
896 896 def run(self, dataOut, m):
897 897 dataOut.flagNoData = True
898 898
899 899 if not self.isConfig:
900 900 self.setup(dataOut.data, m, 1)
901 901 self.isConfig = True
902 902
903 903 if dataOut.flagDataAsBlock:
904 904 raise ValueError("ProfileConcat can only be used when voltage have been read profile by profile, getBlock = False")
905 905
906 906 else:
907 907 self.concat(dataOut.data)
908 908 self.times += 1
909 909 if self.times > m:
910 910 dataOut.data = self.buffer
911 911 self.reset()
912 912 dataOut.flagNoData = False
913 913 # se deben actualizar mas propiedades del header y del objeto dataOut, por ejemplo, las alturas
914 914 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
915 915 xf = dataOut.heightList[0] + dataOut.nHeights * deltaHeight * m
916 916 dataOut.heightList = numpy.arange(dataOut.heightList[0], xf, deltaHeight)
917 917 dataOut.ippSeconds *= m
918 918 return dataOut
919 919
920 920 class ProfileSelector(Operation):
921 921
922 922 profileIndex = None
923 923 # Tamanho total de los perfiles
924 924 nProfiles = None
925 925
926 926 def __init__(self, **kwargs):
927 927
928 928 Operation.__init__(self, **kwargs)
929 929 self.profileIndex = 0
930 930
931 931 def incProfileIndex(self):
932 932
933 933 self.profileIndex += 1
934 934
935 935 if self.profileIndex >= self.nProfiles:
936 936 self.profileIndex = 0
937 937
938 938 def isThisProfileInRange(self, profileIndex, minIndex, maxIndex):
939 939
940 940 if profileIndex < minIndex:
941 941 return False
942 942
943 943 if profileIndex > maxIndex:
944 944 return False
945 945
946 946 return True
947 947
948 948 def isThisProfileInList(self, profileIndex, profileList):
949 949
950 950 if profileIndex not in profileList:
951 951 return False
952 952
953 953 return True
954 954
955 955 def run(self, dataOut, profileList=None, profileRangeList=None, beam=None, byblock=False, rangeList = None, nProfiles=None):
956 956
957 957 """
958 958 ProfileSelector:
959 959
960 960 Inputs:
961 961 profileList : Index of profiles selected. Example: profileList = (0,1,2,7,8)
962 962
963 963 profileRangeList : Minimum and maximum profile indexes. Example: profileRangeList = (4, 30)
964 964
965 965 rangeList : List of profile ranges. Example: rangeList = ((4, 30), (32, 64), (128, 256))
966 966
967 967 """
968 968
969 969 if rangeList is not None:
970 970 if type(rangeList[0]) not in (tuple, list):
971 971 rangeList = [rangeList]
972 972
973 973 dataOut.flagNoData = True
974 974
975 975 if dataOut.flagDataAsBlock:
976 976 """
977 977 data dimension = [nChannels, nProfiles, nHeis]
978 978 """
979 979 if profileList != None:
980 980 dataOut.data = dataOut.data[:,profileList,:]
981 981
982 982 if profileRangeList != None:
983 983 minIndex = profileRangeList[0]
984 984 maxIndex = profileRangeList[1]
985 985 profileList = list(range(minIndex, maxIndex+1))
986 986
987 987 dataOut.data = dataOut.data[:,minIndex:maxIndex+1,:]
988 988
989 989 if rangeList != None:
990 990
991 991 profileList = []
992 992
993 993 for thisRange in rangeList:
994 994 minIndex = thisRange[0]
995 995 maxIndex = thisRange[1]
996 996
997 997 profileList.extend(list(range(minIndex, maxIndex+1)))
998 998
999 999 dataOut.data = dataOut.data[:,profileList,:]
1000 1000
1001 1001 dataOut.nProfiles = len(profileList)
1002 1002 dataOut.profileIndex = dataOut.nProfiles - 1
1003 1003 dataOut.flagNoData = False
1004 1004
1005 1005 return dataOut
1006 1006
1007 1007 """
1008 1008 data dimension = [nChannels, nHeis]
1009 1009 """
1010 1010
1011 1011 if profileList != None:
1012 1012
1013 1013 if self.isThisProfileInList(dataOut.profileIndex, profileList):
1014 1014
1015 1015 self.nProfiles = len(profileList)
1016 1016 dataOut.nProfiles = self.nProfiles
1017 1017 dataOut.profileIndex = self.profileIndex
1018 1018 dataOut.flagNoData = False
1019 1019
1020 1020 self.incProfileIndex()
1021 1021 return dataOut
1022 1022
1023 1023 if profileRangeList != None:
1024 1024
1025 1025 minIndex = profileRangeList[0]
1026 1026 maxIndex = profileRangeList[1]
1027 1027
1028 1028 if self.isThisProfileInRange(dataOut.profileIndex, minIndex, maxIndex):
1029 1029
1030 1030 self.nProfiles = maxIndex - minIndex + 1
1031 1031 dataOut.nProfiles = self.nProfiles
1032 1032 dataOut.profileIndex = self.profileIndex
1033 1033 dataOut.flagNoData = False
1034 1034
1035 1035 self.incProfileIndex()
1036 1036 return dataOut
1037 1037
1038 1038 if rangeList != None:
1039 1039
1040 1040 nProfiles = 0
1041 1041
1042 1042 for thisRange in rangeList:
1043 1043 minIndex = thisRange[0]
1044 1044 maxIndex = thisRange[1]
1045 1045
1046 1046 nProfiles += maxIndex - minIndex + 1
1047 1047
1048 1048 for thisRange in rangeList:
1049 1049
1050 1050 minIndex = thisRange[0]
1051 1051 maxIndex = thisRange[1]
1052 1052
1053 1053 if self.isThisProfileInRange(dataOut.profileIndex, minIndex, maxIndex):
1054 1054
1055 1055 self.nProfiles = nProfiles
1056 1056 dataOut.nProfiles = self.nProfiles
1057 1057 dataOut.profileIndex = self.profileIndex
1058 1058 dataOut.flagNoData = False
1059 1059
1060 1060 self.incProfileIndex()
1061 1061
1062 1062 break
1063 1063
1064 1064 return dataOut
1065 1065
1066 1066
1067 1067 if beam != None: #beam is only for AMISR data
1068 1068 if self.isThisProfileInList(dataOut.profileIndex, dataOut.beamRangeDict[beam]):
1069 1069 dataOut.flagNoData = False
1070 1070 dataOut.profileIndex = self.profileIndex
1071 1071
1072 1072 self.incProfileIndex()
1073 1073
1074 1074 return dataOut
1075 1075
1076 1076 raise ValueError("ProfileSelector needs profileList, profileRangeList or rangeList parameter")
1077 1077
1078 1078
1079 1079 class Reshaper(Operation):
1080 1080
1081 1081 def __init__(self, **kwargs):
1082 1082
1083 1083 Operation.__init__(self, **kwargs)
1084 1084
1085 1085 self.__buffer = None
1086 1086 self.__nitems = 0
1087 1087
1088 1088 def __appendProfile(self, dataOut, nTxs):
1089 1089
1090 1090 if self.__buffer is None:
1091 1091 shape = (dataOut.nChannels, int(dataOut.nHeights/nTxs) )
1092 1092 self.__buffer = numpy.empty(shape, dtype = dataOut.data.dtype)
1093 1093
1094 1094 ini = dataOut.nHeights * self.__nitems
1095 1095 end = ini + dataOut.nHeights
1096 1096
1097 1097 self.__buffer[:, ini:end] = dataOut.data
1098 1098
1099 1099 self.__nitems += 1
1100 1100
1101 1101 return int(self.__nitems*nTxs)
1102 1102
1103 1103 def __getBuffer(self):
1104 1104
1105 1105 if self.__nitems == int(1./self.__nTxs):
1106 1106
1107 1107 self.__nitems = 0
1108 1108
1109 1109 return self.__buffer.copy()
1110 1110
1111 1111 return None
1112 1112
1113 1113 def __checkInputs(self, dataOut, shape, nTxs):
1114 1114
1115 1115 if shape is None and nTxs is None:
1116 1116 raise ValueError("Reshaper: shape of factor should be defined")
1117 1117
1118 1118 if nTxs:
1119 1119 if nTxs < 0:
1120 1120 raise ValueError("nTxs should be greater than 0")
1121 1121
1122 1122 if nTxs < 1 and dataOut.nProfiles % (1./nTxs) != 0:
1123 1123 raise ValueError("nProfiles= %d is not divisibled by (1./nTxs) = %f" %(dataOut.nProfiles, (1./nTxs)))
1124 1124
1125 1125 shape = [dataOut.nChannels, dataOut.nProfiles*nTxs, dataOut.nHeights/nTxs]
1126 1126
1127 1127 return shape, nTxs
1128 1128
1129 1129 if len(shape) != 2 and len(shape) != 3:
1130 1130 raise ValueError("shape dimension should be equal to 2 or 3. shape = (nProfiles, nHeis) or (nChannels, nProfiles, nHeis). Actually shape = (%d, %d, %d)" %(dataOut.nChannels, dataOut.nProfiles, dataOut.nHeights))
1131 1131
1132 1132 if len(shape) == 2:
1133 1133 shape_tuple = [dataOut.nChannels]
1134 1134 shape_tuple.extend(shape)
1135 1135 else:
1136 1136 shape_tuple = list(shape)
1137 1137
1138 1138 nTxs = 1.0*shape_tuple[1]/dataOut.nProfiles
1139 1139
1140 1140 return shape_tuple, nTxs
1141 1141
1142 1142 def run(self, dataOut, shape=None, nTxs=None):
1143 1143
1144 1144 shape_tuple, self.__nTxs = self.__checkInputs(dataOut, shape, nTxs)
1145 1145
1146 1146 dataOut.flagNoData = True
1147 1147 profileIndex = None
1148 1148
1149 1149 if dataOut.flagDataAsBlock:
1150 1150
1151 1151 dataOut.data = numpy.reshape(dataOut.data, shape_tuple)
1152 1152 dataOut.flagNoData = False
1153 1153
1154 1154 profileIndex = int(dataOut.nProfiles*self.__nTxs) - 1
1155 1155
1156 1156 else:
1157 1157
1158 1158 if self.__nTxs < 1:
1159 1159
1160 1160 self.__appendProfile(dataOut, self.__nTxs)
1161 1161 new_data = self.__getBuffer()
1162 1162
1163 1163 if new_data is not None:
1164 1164 dataOut.data = new_data
1165 1165 dataOut.flagNoData = False
1166 1166
1167 1167 profileIndex = dataOut.profileIndex*nTxs
1168 1168
1169 1169 else:
1170 1170 raise ValueError("nTxs should be greater than 0 and lower than 1, or use VoltageReader(..., getblock=True)")
1171 1171
1172 1172 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1173 1173
1174 1174 dataOut.heightList = numpy.arange(dataOut.nHeights/self.__nTxs) * deltaHeight + dataOut.heightList[0]
1175 1175
1176 1176 dataOut.nProfiles = int(dataOut.nProfiles*self.__nTxs)
1177 1177
1178 1178 dataOut.profileIndex = profileIndex
1179 1179
1180 1180 dataOut.ippSeconds /= self.__nTxs
1181 1181
1182 1182 return dataOut
1183 1183
1184 1184 class SplitProfiles(Operation):
1185 1185
1186 1186 def __init__(self, **kwargs):
1187 1187
1188 1188 Operation.__init__(self, **kwargs)
1189 1189
1190 1190 def run(self, dataOut, n):
1191 1191
1192 1192 dataOut.flagNoData = True
1193 1193 profileIndex = None
1194 1194
1195 1195 if dataOut.flagDataAsBlock:
1196 1196
1197 1197 #nchannels, nprofiles, nsamples
1198 1198 shape = dataOut.data.shape
1199 1199
1200 1200 if shape[2] % n != 0:
1201 1201 raise ValueError("Could not split the data, n=%d has to be multiple of %d" %(n, shape[2]))
1202 1202
1203 1203 new_shape = shape[0], shape[1]*n, int(shape[2]/n)
1204 1204
1205 1205 dataOut.data = numpy.reshape(dataOut.data, new_shape)
1206 1206 dataOut.flagNoData = False
1207 1207
1208 1208 profileIndex = int(dataOut.nProfiles/n) - 1
1209 1209
1210 1210 else:
1211 1211
1212 1212 raise ValueError("Could not split the data when is read Profile by Profile. Use VoltageReader(..., getblock=True)")
1213 1213
1214 1214 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1215 1215
1216 1216 dataOut.heightList = numpy.arange(dataOut.nHeights/n) * deltaHeight + dataOut.heightList[0]
1217 1217
1218 1218 dataOut.nProfiles = int(dataOut.nProfiles*n)
1219 1219
1220 1220 dataOut.profileIndex = profileIndex
1221 1221
1222 1222 dataOut.ippSeconds /= n
1223 1223
1224 1224 return dataOut
1225 1225
1226 1226 class CombineProfiles(Operation):
1227 1227 def __init__(self, **kwargs):
1228 1228
1229 1229 Operation.__init__(self, **kwargs)
1230 1230
1231 1231 self.__remData = None
1232 1232 self.__profileIndex = 0
1233 1233
1234 1234 def run(self, dataOut, n):
1235 1235
1236 1236 dataOut.flagNoData = True
1237 1237 profileIndex = None
1238 1238
1239 1239 if dataOut.flagDataAsBlock:
1240 1240
1241 1241 #nchannels, nprofiles, nsamples
1242 1242 shape = dataOut.data.shape
1243 1243 new_shape = shape[0], shape[1]/n, shape[2]*n
1244 1244
1245 1245 if shape[1] % n != 0:
1246 1246 raise ValueError("Could not split the data, n=%d has to be multiple of %d" %(n, shape[1]))
1247 1247
1248 1248 dataOut.data = numpy.reshape(dataOut.data, new_shape)
1249 1249 dataOut.flagNoData = False
1250 1250
1251 1251 profileIndex = int(dataOut.nProfiles*n) - 1
1252 1252
1253 1253 else:
1254 1254
1255 1255 #nchannels, nsamples
1256 1256 if self.__remData is None:
1257 1257 newData = dataOut.data
1258 1258 else:
1259 1259 newData = numpy.concatenate((self.__remData, dataOut.data), axis=1)
1260 1260
1261 1261 self.__profileIndex += 1
1262 1262
1263 1263 if self.__profileIndex < n:
1264 1264 self.__remData = newData
1265 1265 #continue
1266 1266 return
1267 1267
1268 1268 self.__profileIndex = 0
1269 1269 self.__remData = None
1270 1270
1271 1271 dataOut.data = newData
1272 1272 dataOut.flagNoData = False
1273 1273
1274 1274 profileIndex = dataOut.profileIndex/n
1275 1275
1276 1276
1277 1277 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1278 1278
1279 1279 dataOut.heightList = numpy.arange(dataOut.nHeights*n) * deltaHeight + dataOut.heightList[0]
1280 1280
1281 1281 dataOut.nProfiles = int(dataOut.nProfiles/n)
1282 1282
1283 1283 dataOut.profileIndex = profileIndex
1284 1284
1285 1285 dataOut.ippSeconds *= n
1286 1286
1287 1287 return dataOut
1288 1288
1289 1289 class PulsePairVoltage(Operation):
1290 1290 '''
1291 1291 Function PulsePair(Signal Power, Velocity)
1292 1292 The real component of Lag[0] provides Intensity Information
1293 1293 The imag component of Lag[1] Phase provides Velocity Information
1294 1294
1295 1295 Configuration Parameters:
1296 1296 nPRF = Number of Several PRF
1297 1297 theta = Degree Azimuth angel Boundaries
1298 1298
1299 1299 Input:
1300 1300 self.dataOut
1301 1301 lag[N]
1302 1302 Affected:
1303 1303 self.dataOut.spc
1304 1304 '''
1305 1305 isConfig = False
1306 1306 __profIndex = 0
1307 1307 __initime = None
1308 1308 __lastdatatime = None
1309 1309 __buffer = None
1310 1310 noise = None
1311 1311 __dataReady = False
1312 1312 n = None
1313 1313 __nch = 0
1314 1314 __nHeis = 0
1315 1315 removeDC = False
1316 1316 ipp = None
1317 1317 lambda_ = 0
1318 1318
1319 1319 def __init__(self,**kwargs):
1320 1320 Operation.__init__(self,**kwargs)
1321 1321
1322 1322 def setup(self, dataOut, n = None, removeDC=False):
1323 1323 '''
1324 1324 n= Numero de PRF's de entrada
1325 1325 '''
1326 1326 self.__initime = None
1327 1327 self.__lastdatatime = 0
1328 1328 self.__dataReady = False
1329 1329 self.__buffer = 0
1330 1330 self.__profIndex = 0
1331 1331 self.noise = None
1332 1332 self.__nch = dataOut.nChannels
1333 1333 self.__nHeis = dataOut.nHeights
1334 1334 self.removeDC = removeDC
1335 1335 self.lambda_ = 3.0e8/(9345.0e6)
1336 1336 self.ippSec = dataOut.ippSeconds
1337 1337 self.nCohInt = dataOut.nCohInt
1338 print("IPPseconds",dataOut.ippSeconds)
1339
1340 print("ELVALOR DE n es:", n)
1338
1341 1339 if n == None:
1342 1340 raise ValueError("n should be specified.")
1343 1341
1344 1342 if n != None:
1345 1343 if n<2:
1346 1344 raise ValueError("n should be greater than 2")
1347 1345
1348 1346 self.n = n
1349 1347 self.__nProf = n
1350 1348
1351 1349 self.__buffer = numpy.zeros((dataOut.nChannels,
1352 1350 n,
1353 1351 dataOut.nHeights),
1354 1352 dtype='complex')
1355 1353
1356 1354 def putData(self,data):
1357 1355 '''
1358 1356 Add a profile to he __buffer and increase in one the __profiel Index
1359 1357 '''
1360 1358 self.__buffer[:,self.__profIndex,:]= data
1361 1359 self.__profIndex += 1
1362 1360 return
1363 1361
1364 1362 def pushData(self,dataOut):
1365 1363 '''
1366 1364 Return the PULSEPAIR and the profiles used in the operation
1367 1365 Affected : self.__profileIndex
1368 1366 '''
1369 1367 #----------------- Remove DC-----------------------------------
1370 1368 if self.removeDC==True:
1371 1369 mean = numpy.mean(self.__buffer,1)
1372 1370 tmp = mean.reshape(self.__nch,1,self.__nHeis)
1373 1371 dc= numpy.tile(tmp,[1,self.__nProf,1])
1374 1372 self.__buffer = self.__buffer - dc
1375 1373 #------------------Calculo de Potencia ------------------------
1376 1374 pair0 = self.__buffer*numpy.conj(self.__buffer)
1377 1375 pair0 = pair0.real
1378 1376 lag_0 = numpy.sum(pair0,1)
1379 1377 #------------------Calculo de Ruido x canal--------------------
1380 1378 self.noise = numpy.zeros(self.__nch)
1381 1379 for i in range(self.__nch):
1382 1380 daux = numpy.sort(pair0[i,:,:],axis= None)
1383 1381 self.noise[i]=hildebrand_sekhon( daux ,self.nCohInt)
1384 1382
1385 1383 self.noise = self.noise.reshape(self.__nch,1)
1386 1384 self.noise = numpy.tile(self.noise,[1,self.__nHeis])
1387 1385 noise_buffer = self.noise.reshape(self.__nch,1,self.__nHeis)
1388 1386 noise_buffer = numpy.tile(noise_buffer,[1,self.__nProf,1])
1389 1387 #------------------ Potencia recibida= P , Potencia senal = S , Ruido= N--
1390 1388 #------------------ P= S+N ,P=lag_0/N ---------------------------------
1391 1389 #-------------------- Power --------------------------------------------------
1392 1390 data_power = lag_0/(self.n*self.nCohInt)
1393 1391 #------------------ Senal ---------------------------------------------------
1394 1392 data_intensity = pair0 - noise_buffer
1395 1393 data_intensity = numpy.sum(data_intensity,axis=1)*(self.n*self.nCohInt)#*self.nCohInt)
1396 1394 #data_intensity = (lag_0-self.noise*self.n)*(self.n*self.nCohInt)
1397 1395 for i in range(self.__nch):
1398 1396 for j in range(self.__nHeis):
1399 1397 if data_intensity[i][j] < 0:
1400 1398 data_intensity[i][j] = numpy.min(numpy.absolute(data_intensity[i][j]))
1401 1399
1402 1400 #----------------- Calculo de Frecuencia y Velocidad doppler--------
1403 1401 pair1 = self.__buffer[:,:-1,:]*numpy.conjugate(self.__buffer[:,1:,:])
1404 1402 lag_1 = numpy.sum(pair1,1)
1405 1403 data_freq = (-1/(2.0*math.pi*self.ippSec*self.nCohInt))*numpy.angle(lag_1)
1406 1404 data_velocity = (self.lambda_/2.0)*data_freq
1407 1405
1408 1406 #---------------- Potencia promedio estimada de la Senal-----------
1409 1407 lag_0 = lag_0/self.n
1410 1408 S = lag_0-self.noise
1411 1409
1412 1410 #---------------- Frecuencia Doppler promedio ---------------------
1413 1411 lag_1 = lag_1/(self.n-1)
1414 1412 R1 = numpy.abs(lag_1)
1415 1413
1416 1414 #---------------- Calculo del SNR----------------------------------
1417 1415 data_snrPP = S/self.noise
1418 1416 for i in range(self.__nch):
1419 1417 for j in range(self.__nHeis):
1420 1418 if data_snrPP[i][j] < 1.e-20:
1421 1419 data_snrPP[i][j] = 1.e-20
1422 1420
1423 1421 #----------------- Calculo del ancho espectral ----------------------
1424 1422 L = S/R1
1425 1423 L = numpy.where(L<0,1,L)
1426 1424 L = numpy.log(L)
1427 1425 tmp = numpy.sqrt(numpy.absolute(L))
1428 1426 data_specwidth = (self.lambda_/(2*math.sqrt(2)*math.pi*self.ippSec*self.nCohInt))*tmp*numpy.sign(L)
1429 1427 n = self.__profIndex
1430 1428
1431 1429 self.__buffer = numpy.zeros((self.__nch, self.__nProf,self.__nHeis), dtype='complex')
1432 1430 self.__profIndex = 0
1433 1431 return data_power,data_intensity,data_velocity,data_snrPP,data_specwidth,n
1434 1432
1435 1433
1436 1434 def pulsePairbyProfiles(self,dataOut):
1437 1435
1438 1436 self.__dataReady = False
1439 1437 data_power = None
1440 1438 data_intensity = None
1441 1439 data_velocity = None
1442 1440 data_specwidth = None
1443 1441 data_snrPP = None
1444 1442 self.putData(data=dataOut.data)
1445 1443 if self.__profIndex == self.n:
1446 1444 data_power,data_intensity, data_velocity,data_snrPP,data_specwidth, n = self.pushData(dataOut=dataOut)
1447 1445 self.__dataReady = True
1448 1446
1449 1447 return data_power, data_intensity, data_velocity, data_snrPP, data_specwidth
1450 1448
1451 1449
1452 1450 def pulsePairOp(self, dataOut, datatime= None):
1453 1451
1454 1452 if self.__initime == None:
1455 1453 self.__initime = datatime
1456 1454 data_power, data_intensity, data_velocity, data_snrPP, data_specwidth = self.pulsePairbyProfiles(dataOut)
1457 1455 self.__lastdatatime = datatime
1458 1456
1459 1457 if data_power is None:
1460 1458 return None, None, None,None,None,None
1461 1459
1462 1460 avgdatatime = self.__initime
1463 1461 deltatime = datatime - self.__lastdatatime
1464 1462 self.__initime = datatime
1465 1463
1466 1464 return data_power, data_intensity, data_velocity, data_snrPP, data_specwidth, avgdatatime
1467 1465
1468 1466 def run(self, dataOut,n = None,removeDC= False, overlapping= False,**kwargs):
1469 1467
1470 1468 if not self.isConfig:
1471 1469 self.setup(dataOut = dataOut, n = n , removeDC=removeDC , **kwargs)
1472 1470 self.isConfig = True
1473 1471 data_power, data_intensity, data_velocity,data_snrPP,data_specwidth, avgdatatime = self.pulsePairOp(dataOut, dataOut.utctime)
1474 1472 dataOut.flagNoData = True
1475 1473
1476 1474 if self.__dataReady:
1477 1475 dataOut.nCohInt *= self.n
1478 1476 dataOut.dataPP_POW = data_intensity # S
1479 1477 dataOut.dataPP_POWER = data_power # P
1480 1478 dataOut.dataPP_DOP = data_velocity
1481 1479 dataOut.dataPP_SNR = data_snrPP
1482 1480 dataOut.dataPP_WIDTH = data_specwidth
1483 1481 dataOut.PRFbyAngle = self.n #numero de PRF*cada angulo rotado que equivale a un tiempo.
1484 1482 dataOut.utctime = avgdatatime
1485 1483 dataOut.flagNoData = False
1486 1484 return dataOut
1487 1485
1488 1486
1489 1487
1490 1488 # import collections
1491 1489 # from scipy.stats import mode
1492 1490 #
1493 1491 # class Synchronize(Operation):
1494 1492 #
1495 1493 # isConfig = False
1496 1494 # __profIndex = 0
1497 1495 #
1498 1496 # def __init__(self, **kwargs):
1499 1497 #
1500 1498 # Operation.__init__(self, **kwargs)
1501 1499 # # self.isConfig = False
1502 1500 # self.__powBuffer = None
1503 1501 # self.__startIndex = 0
1504 1502 # self.__pulseFound = False
1505 1503 #
1506 1504 # def __findTxPulse(self, dataOut, channel=0, pulse_with = None):
1507 1505 #
1508 1506 # #Read data
1509 1507 #
1510 1508 # powerdB = dataOut.getPower(channel = channel)
1511 1509 # noisedB = dataOut.getNoise(channel = channel)[0]
1512 1510 #
1513 1511 # self.__powBuffer.extend(powerdB.flatten())
1514 1512 #
1515 1513 # dataArray = numpy.array(self.__powBuffer)
1516 1514 #
1517 1515 # filteredPower = numpy.correlate(dataArray, dataArray[0:self.__nSamples], "same")
1518 1516 #
1519 1517 # maxValue = numpy.nanmax(filteredPower)
1520 1518 #
1521 1519 # if maxValue < noisedB + 10:
1522 1520 # #No se encuentra ningun pulso de transmision
1523 1521 # return None
1524 1522 #
1525 1523 # maxValuesIndex = numpy.where(filteredPower > maxValue - 0.1*abs(maxValue))[0]
1526 1524 #
1527 1525 # if len(maxValuesIndex) < 2:
1528 1526 # #Solo se encontro un solo pulso de transmision de un baudio, esperando por el siguiente TX
1529 1527 # return None
1530 1528 #
1531 1529 # phasedMaxValuesIndex = maxValuesIndex - self.__nSamples
1532 1530 #
1533 1531 # #Seleccionar solo valores con un espaciamiento de nSamples
1534 1532 # pulseIndex = numpy.intersect1d(maxValuesIndex, phasedMaxValuesIndex)
1535 1533 #
1536 1534 # if len(pulseIndex) < 2:
1537 1535 # #Solo se encontro un pulso de transmision con ancho mayor a 1
1538 1536 # return None
1539 1537 #
1540 1538 # spacing = pulseIndex[1:] - pulseIndex[:-1]
1541 1539 #
1542 1540 # #remover senales que se distancien menos de 10 unidades o muestras
1543 1541 # #(No deberian existir IPP menor a 10 unidades)
1544 1542 #
1545 1543 # realIndex = numpy.where(spacing > 10 )[0]
1546 1544 #
1547 1545 # if len(realIndex) < 2:
1548 1546 # #Solo se encontro un pulso de transmision con ancho mayor a 1
1549 1547 # return None
1550 1548 #
1551 1549 # #Eliminar pulsos anchos (deja solo la diferencia entre IPPs)
1552 1550 # realPulseIndex = pulseIndex[realIndex]
1553 1551 #
1554 1552 # period = mode(realPulseIndex[1:] - realPulseIndex[:-1])[0][0]
1555 1553 #
1556 1554 # print "IPP = %d samples" %period
1557 1555 #
1558 1556 # self.__newNSamples = dataOut.nHeights #int(period)
1559 1557 # self.__startIndex = int(realPulseIndex[0])
1560 1558 #
1561 1559 # return 1
1562 1560 #
1563 1561 #
1564 1562 # def setup(self, nSamples, nChannels, buffer_size = 4):
1565 1563 #
1566 1564 # self.__powBuffer = collections.deque(numpy.zeros( buffer_size*nSamples,dtype=numpy.float),
1567 1565 # maxlen = buffer_size*nSamples)
1568 1566 #
1569 1567 # bufferList = []
1570 1568 #
1571 1569 # for i in range(nChannels):
1572 1570 # bufferByChannel = collections.deque(numpy.zeros( buffer_size*nSamples, dtype=numpy.complex) + numpy.NAN,
1573 1571 # maxlen = buffer_size*nSamples)
1574 1572 #
1575 1573 # bufferList.append(bufferByChannel)
1576 1574 #
1577 1575 # self.__nSamples = nSamples
1578 1576 # self.__nChannels = nChannels
1579 1577 # self.__bufferList = bufferList
1580 1578 #
1581 1579 # def run(self, dataOut, channel = 0):
1582 1580 #
1583 1581 # if not self.isConfig:
1584 1582 # nSamples = dataOut.nHeights
1585 1583 # nChannels = dataOut.nChannels
1586 1584 # self.setup(nSamples, nChannels)
1587 1585 # self.isConfig = True
1588 1586 #
1589 1587 # #Append new data to internal buffer
1590 1588 # for thisChannel in range(self.__nChannels):
1591 1589 # bufferByChannel = self.__bufferList[thisChannel]
1592 1590 # bufferByChannel.extend(dataOut.data[thisChannel])
1593 1591 #
1594 1592 # if self.__pulseFound:
1595 1593 # self.__startIndex -= self.__nSamples
1596 1594 #
1597 1595 # #Finding Tx Pulse
1598 1596 # if not self.__pulseFound:
1599 1597 # indexFound = self.__findTxPulse(dataOut, channel)
1600 1598 #
1601 1599 # if indexFound == None:
1602 1600 # dataOut.flagNoData = True
1603 1601 # return
1604 1602 #
1605 1603 # self.__arrayBuffer = numpy.zeros((self.__nChannels, self.__newNSamples), dtype = numpy.complex)
1606 1604 # self.__pulseFound = True
1607 1605 # self.__startIndex = indexFound
1608 1606 #
1609 1607 # #If pulse was found ...
1610 1608 # for thisChannel in range(self.__nChannels):
1611 1609 # bufferByChannel = self.__bufferList[thisChannel]
1612 1610 # #print self.__startIndex
1613 1611 # x = numpy.array(bufferByChannel)
1614 1612 # self.__arrayBuffer[thisChannel] = x[self.__startIndex:self.__startIndex+self.__newNSamples]
1615 1613 #
1616 1614 # deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1617 1615 # dataOut.heightList = numpy.arange(self.__newNSamples)*deltaHeight
1618 1616 # # dataOut.ippSeconds = (self.__newNSamples / deltaHeight)/1e6
1619 1617 #
1620 1618 # dataOut.data = self.__arrayBuffer
1621 1619 #
1622 1620 # self.__startIndex += self.__newNSamples
1623 1621 #
1624 1622 # return
General Comments 0
You need to be logged in to leave comments. Login now