##// END OF EJS Templates
Add metadata attribute to data types
jespinoza -
r1338:317db0cd9a09
parent child
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,90 +1,87
1 1 import numpy
2 2 import copy
3 3
4 4 class Beam:
5 5 def __init__(self):
6 6 self.codeList = []
7 7 self.azimuthList = []
8 8 self.zenithList = []
9 9
10 10
11 11 class AMISR:
12 12 def __init__(self):
13 13 self.flagNoData = True
14 14 self.data = None
15 15 self.utctime = None
16 16 self.type = "AMISR"
17 17
18 18 #propiedades para compatibilidad con Voltages
19 19 self.timeZone = 0#timezone like jroheader, difference in minutes between UTC and localtime
20 20 self.dstFlag = 0#self.dataIn.dstFlag
21 21 self.errorCount = 0#self.dataIn.errorCount
22 22 self.useLocalTime = True#self.dataIn.useLocalTime
23 23
24 24 self.radarControllerHeaderObj = None#self.dataIn.radarControllerHeaderObj.copy()
25 25 self.systemHeaderObj = None#self.dataIn.systemHeaderObj.copy()
26 26 self.channelList = [0]#self.dataIn.channelList esto solo aplica para el caso de AMISR
27 27 self.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
28 28
29 29 self.flagDiscontinuousBlock = None#self.dataIn.flagDiscontinuousBlock
30 30 #self.utctime = #self.firstdatatime
31 31 self.flagDecodeData = None#self.dataIn.flagDecodeData #asumo q la data esta decodificada
32 32 self.flagDeflipData = None#self.dataIn.flagDeflipData #asumo q la data esta sin flip
33 33
34 34 self.nCohInt = 1#self.dataIn.nCohInt
35 35 self.nIncohInt = 1
36 36 self.ippSeconds = None#self.dataIn.ippSeconds, segun el filename/Setup/Tufile
37 37 self.windowOfFilter = None#self.dataIn.windowOfFilter
38 38
39 39 self.timeInterval = None#self.dataIn.timeInterval*self.dataOut.nFFTPoints*self.dataOut.nIncohInt
40 40 self.frequency = None#self.dataIn.frequency
41 41 self.realtime = 0#self.dataIn.realtime
42 42
43 43 #actualizar en la lectura de datos
44 44 self.heightList = None#self.dataIn.heightList
45 45 self.nProfiles = None#Number of samples or nFFTPoints
46 46 self.nRecords = None
47 47 self.nBeams = None
48 48 self.nBaud = None#self.dataIn.nBaud
49 49 self.nCode = None#self.dataIn.nCode
50 50 self.code = None#self.dataIn.code
51 51
52 52 #consideracion para los Beams
53 53 self.beamCodeDict = None
54 54 self.beamRangeDict = None
55 55 self.beamcode = None
56 56 self.azimuth = None
57 57 self.zenith = None
58 58 self.gain = None
59 59
60 60 self.npulseByFrame = None
61 61
62 62 self.profileIndex = None
63 63
64 64 self.beam = Beam()
65 65
66 66 def copy(self, inputObj=None):
67 67
68 68 if inputObj is None:
69 69 return copy.deepcopy(self)
70 70
71 71 for key in list(inputObj.__dict__.keys()):
72 72 self.__dict__[key] = inputObj.__dict__[key]
73 73
74 def getNHeights(self):
74 @property
75 def nHeights(self):
75 76
76 77 return len(self.heightList)
77 78
78 79
79 80 def isEmpty(self):
80 81
81 82 return self.flagNoData
82 83
83 def getTimeInterval(self):
84 @property
85 def timeInterval(self):
84 86
85 timeInterval = self.ippSeconds * self.nCohInt
86
87 return timeInterval
88
89 timeInterval = property(getTimeInterval, "I'm the 'timeInterval' property")
90 nHeights = property(getNHeights, "I'm the 'nHeights' property.") No newline at end of file
87 return self.ippSeconds * self.nCohInt
@@ -1,1400 +1,1193
1 '''
1 # Copyright (c) 2012-2020 Jicamarca Radio Observatory
2 # All rights reserved.
3 #
4 # Distributed under the terms of the BSD 3-clause license.
5 """Definition of diferent Data objects for different types of data
2 6
3 $Author: murco $
4 $Id: JROData.py 173 2012-11-20 15:06:21Z murco $
5 '''
7 Here you will find the diferent data objects for the different types
8 of data, this data objects must be used as dataIn or dataOut objects in
9 processing units and operations. Currently the supported data objects are:
10 Voltage, Spectra, SpectraHeis, Fits, Correlation and Parameters
11 """
6 12
7 13 import copy
8 14 import numpy
9 15 import datetime
10 16 import json
11 17
12 18 import schainpy.admin
13 19 from schainpy.utils import log
14 20 from .jroheaderIO import SystemHeader, RadarControllerHeader
15 21 from schainpy.model.data import _noise
16 22
17 23
18 24 def getNumpyDtype(dataTypeCode):
19 25
20 26 if dataTypeCode == 0:
21 27 numpyDtype = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
22 28 elif dataTypeCode == 1:
23 29 numpyDtype = numpy.dtype([('real', '<i2'), ('imag', '<i2')])
24 30 elif dataTypeCode == 2:
25 31 numpyDtype = numpy.dtype([('real', '<i4'), ('imag', '<i4')])
26 32 elif dataTypeCode == 3:
27 33 numpyDtype = numpy.dtype([('real', '<i8'), ('imag', '<i8')])
28 34 elif dataTypeCode == 4:
29 35 numpyDtype = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
30 36 elif dataTypeCode == 5:
31 37 numpyDtype = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
32 38 else:
33 39 raise ValueError('dataTypeCode was not defined')
34 40
35 41 return numpyDtype
36 42
37 43
38 44 def getDataTypeCode(numpyDtype):
39 45
40 46 if numpyDtype == numpy.dtype([('real', '<i1'), ('imag', '<i1')]):
41 47 datatype = 0
42 48 elif numpyDtype == numpy.dtype([('real', '<i2'), ('imag', '<i2')]):
43 49 datatype = 1
44 50 elif numpyDtype == numpy.dtype([('real', '<i4'), ('imag', '<i4')]):
45 51 datatype = 2
46 52 elif numpyDtype == numpy.dtype([('real', '<i8'), ('imag', '<i8')]):
47 53 datatype = 3
48 54 elif numpyDtype == numpy.dtype([('real', '<f4'), ('imag', '<f4')]):
49 55 datatype = 4
50 56 elif numpyDtype == numpy.dtype([('real', '<f8'), ('imag', '<f8')]):
51 57 datatype = 5
52 58 else:
53 59 datatype = None
54 60
55 61 return datatype
56 62
57 63
58 64 def hildebrand_sekhon(data, navg):
59 65 """
60 66 This method is for the objective determination of the noise level in Doppler spectra. This
61 67 implementation technique is based on the fact that the standard deviation of the spectral
62 68 densities is equal to the mean spectral density for white Gaussian noise
63 69
64 70 Inputs:
65 71 Data : heights
66 72 navg : numbers of averages
67 73
68 74 Return:
69 75 mean : noise's level
70 76 """
71 77
72 78 sortdata = numpy.sort(data, axis=None)
73 79 '''
74 80 lenOfData = len(sortdata)
75 81 nums_min = lenOfData*0.2
76 82
77 83 if nums_min <= 5:
78 84
79 85 nums_min = 5
80 86
81 87 sump = 0.
82 88 sumq = 0.
83 89
84 90 j = 0
85 91 cont = 1
86 92
87 93 while((cont == 1)and(j < lenOfData)):
88 94
89 95 sump += sortdata[j]
90 96 sumq += sortdata[j]**2
91 97
92 98 if j > nums_min:
93 99 rtest = float(j)/(j-1) + 1.0/navg
94 100 if ((sumq*j) > (rtest*sump**2)):
95 101 j = j - 1
96 102 sump = sump - sortdata[j]
97 103 sumq = sumq - sortdata[j]**2
98 104 cont = 0
99 105
100 106 j += 1
101 107
102 108 lnoise = sump / j
103 109 '''
104 110 return _noise.hildebrand_sekhon(sortdata, navg)
105 111
106 112
107 113 class Beam:
108 114
109 115 def __init__(self):
110 116 self.codeList = []
111 117 self.azimuthList = []
112 118 self.zenithList = []
113 119
114 120
115 121 class GenericData(object):
116 122
117 123 flagNoData = True
118 124
119 125 def copy(self, inputObj=None):
120 126
121 127 if inputObj == None:
122 128 return copy.deepcopy(self)
123 129
124 130 for key in list(inputObj.__dict__.keys()):
125 131
126 132 attribute = inputObj.__dict__[key]
127 133
128 134 # If this attribute is a tuple or list
129 135 if type(inputObj.__dict__[key]) in (tuple, list):
130 136 self.__dict__[key] = attribute[:]
131 137 continue
132 138
133 139 # If this attribute is another object or instance
134 140 if hasattr(attribute, '__dict__'):
135 141 self.__dict__[key] = attribute.copy()
136 142 continue
137 143
138 144 self.__dict__[key] = inputObj.__dict__[key]
139 145
140 146 def deepcopy(self):
141 147
142 148 return copy.deepcopy(self)
143 149
144 150 def isEmpty(self):
145 151
146 152 return self.flagNoData
147 153
148 154 def isReady(self):
149 155
150 156 return not self.flagNoData
151 157
152 158
153 159 class JROData(GenericData):
154 160
155 # m_BasicHeader = BasicHeader()
156 # m_ProcessingHeader = ProcessingHeader()
157
158 161 systemHeaderObj = SystemHeader()
159 162 radarControllerHeaderObj = RadarControllerHeader()
160 # data = None
161 163 type = None
162 164 datatype = None # dtype but in string
163 # dtype = None
164 # nChannels = None
165 # nHeights = None
166 165 nProfiles = None
167 166 heightList = None
168 167 channelList = None
169 168 flagDiscontinuousBlock = False
170 169 useLocalTime = False
171 170 utctime = None
172 171 timeZone = None
173 172 dstFlag = None
174 173 errorCount = None
175 174 blocksize = None
176 # nCode = None
177 # nBaud = None
178 # code = None
179 175 flagDecodeData = False # asumo q la data no esta decodificada
180 176 flagDeflipData = False # asumo q la data no esta sin flip
181 177 flagShiftFFT = False
182 # ippSeconds = None
183 # timeInterval = None
184 178 nCohInt = None
185 # noise = None
186 179 windowOfFilter = 1
187 # Speed of ligth
188 180 C = 3e8
189 181 frequency = 49.92e6
190 182 realtime = False
191 183 beacon_heiIndexList = None
192 184 last_block = None
193 185 blocknow = None
194 186 azimuth = None
195 187 zenith = None
196 188 beam = Beam()
197 189 profileIndex = None
198 190 error = None
199 191 data = None
200 192 nmodes = None
193 metadata_list = ['heightList', 'timeZone', 'type']
201 194
202 195 def __str__(self):
203 196
204 return '{} - {}'.format(self.type, self.getDatatime())
197 return '{} - {}'.format(self.type, self.datatime())
205 198
206 199 def getNoise(self):
207 200
208 201 raise NotImplementedError
209 202
210 def getNChannels(self):
203 @property
204 def nChannels(self):
211 205
212 206 return len(self.channelList)
213 207
214 def getChannelIndexList(self):
208 @property
209 def channelIndexList(self):
215 210
216 211 return list(range(self.nChannels))
217 212
218 def getNHeights(self):
213 @property
214 def nHeights(self):
219 215
220 216 return len(self.heightList)
221 217
222 def getHeiRange(self, extrapoints=0):
223
224 heis = self.heightList
225 # deltah = self.heightList[1] - self.heightList[0]
226 #
227 # heis.append(self.heightList[-1])
228
229 return heis
230
231 218 def getDeltaH(self):
232 219
233 delta = self.heightList[1] - self.heightList[0]
220 return self.heightList[1] - self.heightList[0]
234 221
235 return delta
236
237 def getltctime(self):
222 @property
223 def ltctime(self):
238 224
239 225 if self.useLocalTime:
240 226 return self.utctime - self.timeZone * 60
241 227
242 228 return self.utctime
243 229
244 def getDatatime(self):
230 @property
231 def datatime(self):
245 232
246 233 datatimeValue = datetime.datetime.utcfromtimestamp(self.ltctime)
247 234 return datatimeValue
248 235
249 236 def getTimeRange(self):
250 237
251 238 datatime = []
252 239
253 240 datatime.append(self.ltctime)
254 241 datatime.append(self.ltctime + self.timeInterval + 1)
255 242
256 243 datatime = numpy.array(datatime)
257 244
258 245 return datatime
259 246
260 247 def getFmaxTimeResponse(self):
261 248
262 249 period = (10**-6) * self.getDeltaH() / (0.15)
263 250
264 251 PRF = 1. / (period * self.nCohInt)
265 252
266 253 fmax = PRF
267 254
268 255 return fmax
269 256
270 257 def getFmax(self):
271 258 PRF = 1. / (self.ippSeconds * self.nCohInt)
272 259
273 260 fmax = PRF
274 261 return fmax
275 262
276 263 def getVmax(self):
277 264
278 265 _lambda = self.C / self.frequency
279 266
280 267 vmax = self.getFmax() * _lambda / 2
281 268
282 269 return vmax
283 270
284 def get_ippSeconds(self):
271 @property
272 def ippSeconds(self):
285 273 '''
286 274 '''
287 275 return self.radarControllerHeaderObj.ippSeconds
288
289 def set_ippSeconds(self, ippSeconds):
276
277 @ippSeconds.setter
278 def ippSeconds(self, ippSeconds):
290 279 '''
291 280 '''
292
293 281 self.radarControllerHeaderObj.ippSeconds = ippSeconds
294
295 return
296
297 def get_dtype(self):
282
283 @property
284 def code(self):
298 285 '''
299 286 '''
300 return getNumpyDtype(self.datatype)
287 return self.radarControllerHeaderObj.code
301 288
302 def set_dtype(self, numpyDtype):
289 @code.setter
290 def code(self, code):
303 291 '''
304 292 '''
293 self.radarControllerHeaderObj.code = code
305 294
306 self.datatype = getDataTypeCode(numpyDtype)
307
308 def get_code(self):
295 @property
296 def ncode(self):
309 297 '''
310 298 '''
311 return self.radarControllerHeaderObj.code
299 return self.radarControllerHeaderObj.nCode
312 300
313 def set_code(self, code):
301 @ncode.setter
302 def ncode(self, ncode):
314 303 '''
315 304 '''
316 self.radarControllerHeaderObj.code = code
317
318 return
305 self.radarControllerHeaderObj.nCode = ncode
319 306
320 def get_ncode(self):
307 @property
308 def nbaud(self):
321 309 '''
322 310 '''
323 return self.radarControllerHeaderObj.nCode
311 return self.radarControllerHeaderObj.nBaud
324 312
325 def set_ncode(self, nCode):
313 @nbaud.setter
314 def nbaud(self, nbaud):
326 315 '''
327 316 '''
328 self.radarControllerHeaderObj.nCode = nCode
329
330 return
317 self.radarControllerHeaderObj.nBaud = nbaud
331 318
332 def get_nbaud(self):
319 @property
320 def ipp(self):
333 321 '''
334 322 '''
335 return self.radarControllerHeaderObj.nBaud
323 return self.radarControllerHeaderObj.ipp
336 324
337 def set_nbaud(self, nBaud):
325 @ipp.setter
326 def ipp(self, ipp):
338 327 '''
339 328 '''
340 self.radarControllerHeaderObj.nBaud = nBaud
329 self.radarControllerHeaderObj.ipp = ipp
341 330
342 return
331 @property
332 def metadata(self):
333 '''
334 '''
343 335
344 nChannels = property(getNChannels, "I'm the 'nChannel' property.")
345 channelIndexList = property(
346 getChannelIndexList, "I'm the 'channelIndexList' property.")
347 nHeights = property(getNHeights, "I'm the 'nHeights' property.")
348 #noise = property(getNoise, "I'm the 'nHeights' property.")
349 datatime = property(getDatatime, "I'm the 'datatime' property")
350 ltctime = property(getltctime, "I'm the 'ltctime' property")
351 ippSeconds = property(get_ippSeconds, set_ippSeconds)
352 dtype = property(get_dtype, set_dtype)
353 # timeInterval = property(getTimeInterval, "I'm the 'timeInterval' property")
354 code = property(get_code, set_code)
355 nCode = property(get_ncode, set_ncode)
356 nBaud = property(get_nbaud, set_nbaud)
336 return {attr: getattr(self, attr) for attr in self.metadata_list}
357 337
358 338
359 339 class Voltage(JROData):
360 340
361 # data es un numpy array de 2 dmensiones (canales, alturas)
362 data = None
363 341 dataPP_POW = None
364 342 dataPP_DOP = None
365 343 dataPP_WIDTH = None
366 344 dataPP_SNR = None
367 345
368 346 def __init__(self):
369 347 '''
370 348 Constructor
371 349 '''
372 350
373 351 self.useLocalTime = True
374 352 self.radarControllerHeaderObj = RadarControllerHeader()
375 353 self.systemHeaderObj = SystemHeader()
376 354 self.type = "Voltage"
377 355 self.data = None
378 # self.dtype = None
379 # self.nChannels = 0
380 # self.nHeights = 0
381 356 self.nProfiles = None
382 357 self.heightList = None
383 358 self.channelList = None
384 # self.channelIndexList = None
385 359 self.flagNoData = True
386 360 self.flagDiscontinuousBlock = False
387 361 self.utctime = None
388 362 self.timeZone = 0
389 363 self.dstFlag = None
390 364 self.errorCount = None
391 365 self.nCohInt = None
392 366 self.blocksize = None
393 367 self.flagCohInt = False
394 368 self.flagDecodeData = False # asumo q la data no esta decodificada
395 369 self.flagDeflipData = False # asumo q la data no esta sin flip
396 370 self.flagShiftFFT = False
397 371 self.flagDataAsBlock = False # Asumo que la data es leida perfil a perfil
398 372 self.profileIndex = 0
373 self.metadata_list = ['type', 'heightList', 'timeZone', 'nProfiles', 'channelList', 'nCohInt',
374 'code', 'ncode', 'nbaud', 'ippSeconds', 'ipp']
399 375
400 376 def getNoisebyHildebrand(self, channel=None):
401 377 """
402 378 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
403 379
404 380 Return:
405 381 noiselevel
406 382 """
407 383
408 384 if channel != None:
409 385 data = self.data[channel]
410 386 nChannels = 1
411 387 else:
412 388 data = self.data
413 389 nChannels = self.nChannels
414 390
415 391 noise = numpy.zeros(nChannels)
416 392 power = data * numpy.conjugate(data)
417 393
418 394 for thisChannel in range(nChannels):
419 395 if nChannels == 1:
420 396 daux = power[:].real
421 397 else:
422 398 daux = power[thisChannel, :].real
423 399 noise[thisChannel] = hildebrand_sekhon(daux, self.nCohInt)
424 400
425 401 return noise
426 402
427 403 def getNoise(self, type=1, channel=None):
428 404
429 405 if type == 1:
430 406 noise = self.getNoisebyHildebrand(channel)
431 407
432 408 return noise
433 409
434 410 def getPower(self, channel=None):
435 411
436 412 if channel != None:
437 413 data = self.data[channel]
438 414 else:
439 415 data = self.data
440 416
441 417 power = data * numpy.conjugate(data)
442 418 powerdB = 10 * numpy.log10(power.real)
443 419 powerdB = numpy.squeeze(powerdB)
444 420
445 421 return powerdB
446 422
447 def getTimeInterval(self):
448
449 timeInterval = self.ippSeconds * self.nCohInt
423 @property
424 def timeInterval(self):
450 425
451 return timeInterval
426 return self.ippSeconds * self.nCohInt
452 427
453 428 noise = property(getNoise, "I'm the 'nHeights' property.")
454 timeInterval = property(getTimeInterval, "I'm the 'timeInterval' property")
455 429
456 430
457 431 class Spectra(JROData):
458 432
459 # data spc es un numpy array de 2 dmensiones (canales, perfiles, alturas)
460 data_spc = None
461 # data cspc es un numpy array de 2 dmensiones (canales, pares, alturas)
462 data_cspc = None
463 # data dc es un numpy array de 2 dmensiones (canales, alturas)
464 data_dc = None
465 # data power
466 data_pwr = None
467 nFFTPoints = None
468 # nPairs = None
469 pairsList = None
470 nIncohInt = None
471 wavelength = None # Necesario para cacular el rango de velocidad desde la frecuencia
472 nCohInt = None # se requiere para determinar el valor de timeInterval
473 ippFactor = None
474 profileIndex = 0
475 plotting = "spectra"
476
477 433 def __init__(self):
478 434 '''
479 435 Constructor
480 436 '''
481 437
482 438 self.useLocalTime = True
483 439 self.radarControllerHeaderObj = RadarControllerHeader()
484 440 self.systemHeaderObj = SystemHeader()
485 441 self.type = "Spectra"
486 442 self.timeZone = 0
487 # self.data = None
488 # self.dtype = None
489 # self.nChannels = 0
490 # self.nHeights = 0
491 443 self.nProfiles = None
492 444 self.heightList = None
493 445 self.channelList = None
494 # self.channelIndexList = None
495 446 self.pairsList = None
496 447 self.flagNoData = True
497 448 self.flagDiscontinuousBlock = False
498 449 self.utctime = None
499 450 self.nCohInt = None
500 451 self.nIncohInt = None
501 452 self.blocksize = None
502 453 self.nFFTPoints = None
503 454 self.wavelength = None
504 455 self.flagDecodeData = False # asumo q la data no esta decodificada
505 456 self.flagDeflipData = False # asumo q la data no esta sin flip
506 457 self.flagShiftFFT = False
507 458 self.ippFactor = 1
508 #self.noise = None
509 459 self.beacon_heiIndexList = []
510 460 self.noise_estimation = None
461 self.metadata_list = ['type', 'heightList', 'timeZone', 'pairsList', 'channelList', 'nCohInt',
462 'code', 'ncode', 'nbaud', 'ippSeconds', 'ipp','nIncohInt', 'nFFTPoints', 'nProfiles']
511 463
512 464 def getNoisebyHildebrand(self, xmin_index=None, xmax_index=None, ymin_index=None, ymax_index=None):
513 465 """
514 466 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
515 467
516 468 Return:
517 469 noiselevel
518 470 """
519 471
520 472 noise = numpy.zeros(self.nChannels)
521 473
522 474 for channel in range(self.nChannels):
523 475 daux = self.data_spc[channel,
524 476 xmin_index:xmax_index, ymin_index:ymax_index]
525 477 noise[channel] = hildebrand_sekhon(daux, self.nIncohInt)
526 478
527 479 return noise
528 480
529 481 def getNoise(self, xmin_index=None, xmax_index=None, ymin_index=None, ymax_index=None):
530 482
531 483 if self.noise_estimation is not None:
532 484 # this was estimated by getNoise Operation defined in jroproc_spectra.py
533 485 return self.noise_estimation
534 486 else:
535 487 noise = self.getNoisebyHildebrand(
536 488 xmin_index, xmax_index, ymin_index, ymax_index)
537 489 return noise
538 490
539 491 def getFreqRangeTimeResponse(self, extrapoints=0):
540 492
541 493 deltafreq = self.getFmaxTimeResponse() / (self.nFFTPoints * self.ippFactor)
542 494 freqrange = deltafreq * (numpy.arange(self.nFFTPoints + extrapoints) - self.nFFTPoints / 2.) - deltafreq / 2
543 495
544 496 return freqrange
545 497
546 498 def getAcfRange(self, extrapoints=0):
547 499
548 500 deltafreq = 10. / (self.getFmax() / (self.nFFTPoints * self.ippFactor))
549 501 freqrange = deltafreq * (numpy.arange(self.nFFTPoints + extrapoints) -self.nFFTPoints / 2.) - deltafreq / 2
550 502
551 503 return freqrange
552 504
553 505 def getFreqRange(self, extrapoints=0):
554 506
555 507 deltafreq = self.getFmax() / (self.nFFTPoints * self.ippFactor)
556 508 freqrange = deltafreq * (numpy.arange(self.nFFTPoints + extrapoints) -self.nFFTPoints / 2.) - deltafreq / 2
557 509
558 510 return freqrange
559 511
560 512 def getVelRange(self, extrapoints=0):
561 513
562 514 deltav = self.getVmax() / (self.nFFTPoints * self.ippFactor)
563 515 velrange = deltav * (numpy.arange(self.nFFTPoints + extrapoints) - self.nFFTPoints / 2.)
564 516
565 517 if self.nmodes:
566 518 return velrange/self.nmodes
567 519 else:
568 520 return velrange
569 521
570 def getNPairs(self):
522 @property
523 def nPairs(self):
571 524
572 525 return len(self.pairsList)
573 526
574 def getPairsIndexList(self):
527 @property
528 def pairsIndexList(self):
575 529
576 530 return list(range(self.nPairs))
577 531
578 def getNormFactor(self):
532 @property
533 def normFactor(self):
579 534
580 535 pwcode = 1
581 536
582 537 if self.flagDecodeData:
583 538 pwcode = numpy.sum(self.code[0]**2)
584 539 #normFactor = min(self.nFFTPoints,self.nProfiles)*self.nIncohInt*self.nCohInt*pwcode*self.windowOfFilter
585 540 normFactor = self.nProfiles * self.nIncohInt * self.nCohInt * pwcode * self.windowOfFilter
586 541
587 542 return normFactor
588 543
589 def getFlagCspc(self):
544 @property
545 def flag_cspc(self):
590 546
591 547 if self.data_cspc is None:
592 548 return True
593 549
594 550 return False
595 551
596 def getFlagDc(self):
552 @property
553 def flag_dc(self):
597 554
598 555 if self.data_dc is None:
599 556 return True
600 557
601 558 return False
602 559
603 def getTimeInterval(self):
560 @property
561 def timeInterval(self):
604 562
605 563 timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt * self.nProfiles * self.ippFactor
606 564 if self.nmodes:
607 565 return self.nmodes*timeInterval
608 566 else:
609 567 return timeInterval
610 568
611 569 def getPower(self):
612 570
613 571 factor = self.normFactor
614 572 z = self.data_spc / factor
615 573 z = numpy.where(numpy.isfinite(z), z, numpy.NAN)
616 574 avg = numpy.average(z, axis=1)
617 575
618 576 return 10 * numpy.log10(avg)
619 577
620 578 def getCoherence(self, pairsList=None, phase=False):
621 579
622 580 z = []
623 581 if pairsList is None:
624 582 pairsIndexList = self.pairsIndexList
625 583 else:
626 584 pairsIndexList = []
627 585 for pair in pairsList:
628 586 if pair not in self.pairsList:
629 587 raise ValueError("Pair %s is not in dataOut.pairsList" % (
630 588 pair))
631 589 pairsIndexList.append(self.pairsList.index(pair))
632 590 for i in range(len(pairsIndexList)):
633 591 pair = self.pairsList[pairsIndexList[i]]
634 592 ccf = numpy.average(self.data_cspc[pairsIndexList[i], :, :], axis=0)
635 593 powa = numpy.average(self.data_spc[pair[0], :, :], axis=0)
636 594 powb = numpy.average(self.data_spc[pair[1], :, :], axis=0)
637 595 avgcoherenceComplex = ccf / numpy.sqrt(powa * powb)
638 596 if phase:
639 597 data = numpy.arctan2(avgcoherenceComplex.imag,
640 598 avgcoherenceComplex.real) * 180 / numpy.pi
641 599 else:
642 600 data = numpy.abs(avgcoherenceComplex)
643 601
644 602 z.append(data)
645 603
646 604 return numpy.array(z)
647 605
648 606 def setValue(self, value):
649 607
650 608 print("This property should not be initialized")
651 609
652 610 return
653
654 nPairs = property(getNPairs, setValue, "I'm the 'nPairs' property.")
655 pairsIndexList = property(
656 getPairsIndexList, setValue, "I'm the 'pairsIndexList' property.")
657 normFactor = property(getNormFactor, setValue,
658 "I'm the 'getNormFactor' property.")
659 flag_cspc = property(getFlagCspc, setValue)
660 flag_dc = property(getFlagDc, setValue)
611
661 612 noise = property(getNoise, setValue, "I'm the 'nHeights' property.")
662 timeInterval = property(getTimeInterval, setValue,
663 "I'm the 'timeInterval' property")
664 613
665 614
666 615 class SpectraHeis(Spectra):
667 616
668 data_spc = None
669 data_cspc = None
670 data_dc = None
671 nFFTPoints = None
672 # nPairs = None
673 pairsList = None
674 nCohInt = None
675 nIncohInt = None
676
677 617 def __init__(self):
678 618
679 619 self.radarControllerHeaderObj = RadarControllerHeader()
680
681 620 self.systemHeaderObj = SystemHeader()
682
683 621 self.type = "SpectraHeis"
684
685 # self.dtype = None
686
687 # self.nChannels = 0
688
689 # self.nHeights = 0
690
691 622 self.nProfiles = None
692
693 623 self.heightList = None
694
695 624 self.channelList = None
696
697 # self.channelIndexList = None
698
699 625 self.flagNoData = True
700
701 626 self.flagDiscontinuousBlock = False
702
703 # self.nPairs = 0
704
705 627 self.utctime = None
706
707 628 self.blocksize = None
708
709 629 self.profileIndex = 0
710
711 630 self.nCohInt = 1
712
713 631 self.nIncohInt = 1
714 632
715 def getNormFactor(self):
633 @property
634 def normFactor(self):
716 635 pwcode = 1
717 636 if self.flagDecodeData:
718 637 pwcode = numpy.sum(self.code[0]**2)
719 638
720 639 normFactor = self.nIncohInt * self.nCohInt * pwcode
721 640
722 641 return normFactor
723 642
724 def getTimeInterval(self):
725
726 timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt
727
728 return timeInterval
643 @property
644 def timeInterval(self):
729 645
730 normFactor = property(getNormFactor, "I'm the 'getNormFactor' property.")
731 timeInterval = property(getTimeInterval, "I'm the 'timeInterval' property")
646 return self.ippSeconds * self.nCohInt * self.nIncohInt
732 647
733 648
734 649 class Fits(JROData):
735 650
736 heightList = None
737 channelList = None
738 flagNoData = True
739 flagDiscontinuousBlock = False
740 useLocalTime = False
741 utctime = None
742 # ippSeconds = None
743 # timeInterval = None
744 nCohInt = None
745 nIncohInt = None
746 noise = None
747 windowOfFilter = 1
748 # Speed of ligth
749 C = 3e8
750 frequency = 49.92e6
751 realtime = False
752
753 651 def __init__(self):
754 652
755 653 self.type = "Fits"
756
757 654 self.nProfiles = None
758
759 655 self.heightList = None
760
761 656 self.channelList = None
762
763 # self.channelIndexList = None
764
765 657 self.flagNoData = True
766
767 658 self.utctime = None
768
769 659 self.nCohInt = 1
770
771 660 self.nIncohInt = 1
772
773 661 self.useLocalTime = True
774
775 662 self.profileIndex = 0
776
777 # self.utctime = None
778 663 self.timeZone = 0
779 # self.ltctime = None
780 # self.timeInterval = None
781 # self.header = None
782 # self.data_header = None
783 # self.data = None
784 # self.datatime = None
785 # self.flagNoData = False
786 # self.expName = ''
787 # self.nChannels = None
788 # self.nSamples = None
789 # self.dataBlocksPerFile = None
790 # self.comments = ''
791 #
792
793 def getltctime(self):
794
795 if self.useLocalTime:
796 return self.utctime - self.timeZone * 60
797
798 return self.utctime
799
800 def getDatatime(self):
801
802 datatime = datetime.datetime.utcfromtimestamp(self.ltctime)
803 return datatime
804 664
805 665 def getTimeRange(self):
806 666
807 667 datatime = []
808 668
809 669 datatime.append(self.ltctime)
810 670 datatime.append(self.ltctime + self.timeInterval)
811 671
812 672 datatime = numpy.array(datatime)
813 673
814 674 return datatime
815 675
816 def getHeiRange(self):
817
818 heis = self.heightList
819
820 return heis
821
822 def getNHeights(self):
823
824 return len(self.heightList)
825
826 def getNChannels(self):
827
828 return len(self.channelList)
829
830 676 def getChannelIndexList(self):
831 677
832 678 return list(range(self.nChannels))
833 679
834 680 def getNoise(self, type=1):
835 681
836 #noise = numpy.zeros(self.nChannels)
837 682
838 683 if type == 1:
839 684 noise = self.getNoisebyHildebrand()
840 685
841 686 if type == 2:
842 687 noise = self.getNoisebySort()
843 688
844 689 if type == 3:
845 690 noise = self.getNoisebyWindow()
846 691
847 692 return noise
848 693
849 def getTimeInterval(self):
694 @property
695 def timeInterval(self):
850 696
851 697 timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt
852 698
853 699 return timeInterval
854 700
855 def get_ippSeconds(self):
701 @property
702 def ippSeconds(self):
856 703 '''
857 704 '''
858 705 return self.ipp_sec
859 706
860
861 datatime = property(getDatatime, "I'm the 'datatime' property")
862 nHeights = property(getNHeights, "I'm the 'nHeights' property.")
863 nChannels = property(getNChannels, "I'm the 'nChannel' property.")
864 channelIndexList = property(
865 getChannelIndexList, "I'm the 'channelIndexList' property.")
866 707 noise = property(getNoise, "I'm the 'nHeights' property.")
867
868 ltctime = property(getltctime, "I'm the 'ltctime' property")
869 timeInterval = property(getTimeInterval, "I'm the 'timeInterval' property")
870 ippSeconds = property(get_ippSeconds, '')
708
871 709
872 710 class Correlation(JROData):
873 711
874 noise = None
875 SNR = None
876 #--------------------------------------------------
877 mode = None
878 split = False
879 data_cf = None
880 lags = None
881 lagRange = None
882 pairsList = None
883 normFactor = None
884 #--------------------------------------------------
885 # calculateVelocity = None
886 nLags = None
887 nPairs = None
888 nAvg = None
889
890 712 def __init__(self):
891 713 '''
892 714 Constructor
893 715 '''
894 716 self.radarControllerHeaderObj = RadarControllerHeader()
895
896 717 self.systemHeaderObj = SystemHeader()
897
898 718 self.type = "Correlation"
899
900 719 self.data = None
901
902 720 self.dtype = None
903
904 721 self.nProfiles = None
905
906 722 self.heightList = None
907
908 723 self.channelList = None
909
910 724 self.flagNoData = True
911
912 725 self.flagDiscontinuousBlock = False
913
914 726 self.utctime = None
915
916 727 self.timeZone = 0
917
918 728 self.dstFlag = None
919
920 729 self.errorCount = None
921
922 730 self.blocksize = None
923
924 731 self.flagDecodeData = False # asumo q la data no esta decodificada
925
926 732 self.flagDeflipData = False # asumo q la data no esta sin flip
927
928 733 self.pairsList = None
929
930 734 self.nPoints = None
931 735
932 736 def getPairsList(self):
933 737
934 738 return self.pairsList
935 739
936 740 def getNoise(self, mode=2):
937 741
938 742 indR = numpy.where(self.lagR == 0)[0][0]
939 743 indT = numpy.where(self.lagT == 0)[0][0]
940 744
941 745 jspectra0 = self.data_corr[:, :, indR, :]
942 746 jspectra = copy.copy(jspectra0)
943 747
944 748 num_chan = jspectra.shape[0]
945 749 num_hei = jspectra.shape[2]
946 750
947 751 freq_dc = jspectra.shape[1] / 2
948 752 ind_vel = numpy.array([-2, -1, 1, 2]) + freq_dc
949 753
950 754 if ind_vel[0] < 0:
951 755 ind_vel[list(range(0, 1))] = ind_vel[list(
952 756 range(0, 1))] + self.num_prof
953 757
954 758 if mode == 1:
955 759 jspectra[:, freq_dc, :] = (
956 760 jspectra[:, ind_vel[1], :] + jspectra[:, ind_vel[2], :]) / 2 # CORRECCION
957 761
958 762 if mode == 2:
959 763
960 764 vel = numpy.array([-2, -1, 1, 2])
961 765 xx = numpy.zeros([4, 4])
962 766
963 767 for fil in range(4):
964 768 xx[fil, :] = vel[fil]**numpy.asarray(list(range(4)))
965 769
966 770 xx_inv = numpy.linalg.inv(xx)
967 771 xx_aux = xx_inv[0, :]
968 772
969 773 for ich in range(num_chan):
970 774 yy = jspectra[ich, ind_vel, :]
971 775 jspectra[ich, freq_dc, :] = numpy.dot(xx_aux, yy)
972 776
973 777 junkid = jspectra[ich, freq_dc, :] <= 0
974 778 cjunkid = sum(junkid)
975 779
976 780 if cjunkid.any():
977 781 jspectra[ich, freq_dc, junkid.nonzero()] = (
978 782 jspectra[ich, ind_vel[1], junkid] + jspectra[ich, ind_vel[2], junkid]) / 2
979 783
980 784 noise = jspectra0[:, freq_dc, :] - jspectra[:, freq_dc, :]
981 785
982 786 return noise
983 787
984 def getTimeInterval(self):
788 @property
789 def timeInterval(self):
985 790
986 timeInterval = self.ippSeconds * self.nCohInt * self.nProfiles
987
988 return timeInterval
791 return self.ippSeconds * self.nCohInt * self.nProfiles
989 792
990 793 def splitFunctions(self):
991 794
992 795 pairsList = self.pairsList
993 796 ccf_pairs = []
994 797 acf_pairs = []
995 798 ccf_ind = []
996 799 acf_ind = []
997 800 for l in range(len(pairsList)):
998 801 chan0 = pairsList[l][0]
999 802 chan1 = pairsList[l][1]
1000 803
1001 804 # Obteniendo pares de Autocorrelacion
1002 805 if chan0 == chan1:
1003 806 acf_pairs.append(chan0)
1004 807 acf_ind.append(l)
1005 808 else:
1006 809 ccf_pairs.append(pairsList[l])
1007 810 ccf_ind.append(l)
1008 811
1009 812 data_acf = self.data_cf[acf_ind]
1010 813 data_ccf = self.data_cf[ccf_ind]
1011 814
1012 815 return acf_ind, ccf_ind, acf_pairs, ccf_pairs, data_acf, data_ccf
1013 816
1014 def getNormFactor(self):
817 @property
818 def normFactor(self):
1015 819 acf_ind, ccf_ind, acf_pairs, ccf_pairs, data_acf, data_ccf = self.splitFunctions()
1016 820 acf_pairs = numpy.array(acf_pairs)
1017 821 normFactor = numpy.zeros((self.nPairs, self.nHeights))
1018 822
1019 823 for p in range(self.nPairs):
1020 824 pair = self.pairsList[p]
1021 825
1022 826 ch0 = pair[0]
1023 827 ch1 = pair[1]
1024 828
1025 829 ch0_max = numpy.max(data_acf[acf_pairs == ch0, :, :], axis=1)
1026 830 ch1_max = numpy.max(data_acf[acf_pairs == ch1, :, :], axis=1)
1027 831 normFactor[p, :] = numpy.sqrt(ch0_max * ch1_max)
1028 832
1029 833 return normFactor
1030 834
1031 timeInterval = property(getTimeInterval, "I'm the 'timeInterval' property")
1032 normFactor = property(getNormFactor, "I'm the 'normFactor property'")
1033
1034 835
1035 836 class Parameters(Spectra):
1036 837
1037 experimentInfo = None # Information about the experiment
1038 # Information from previous data
1039 inputUnit = None # Type of data to be processed
1040 operation = None # Type of operation to parametrize
1041 # normFactor = None #Normalization Factor
1042 838 groupList = None # List of Pairs, Groups, etc
1043 # Parameters
1044 839 data_param = None # Parameters obtained
1045 840 data_pre = None # Data Pre Parametrization
1046 841 data_SNR = None # Signal to Noise Ratio
1047 # heightRange = None #Heights
1048 842 abscissaList = None # Abscissa, can be velocities, lags or time
1049 # noise = None #Noise Potency
1050 843 utctimeInit = None # Initial UTC time
1051 844 paramInterval = None # Time interval to calculate Parameters in seconds
1052 845 useLocalTime = True
1053 846 # Fitting
1054 847 data_error = None # Error of the estimation
1055 848 constants = None
1056 849 library = None
1057 850 # Output signal
1058 851 outputInterval = None # Time interval to calculate output signal in seconds
1059 852 data_output = None # Out signal
1060 853 nAvg = None
1061 854 noise_estimation = None
1062 855 GauSPC = None # Fit gaussian SPC
1063 856
1064 857 def __init__(self):
1065 858 '''
1066 859 Constructor
1067 860 '''
1068 861 self.radarControllerHeaderObj = RadarControllerHeader()
1069 862 self.systemHeaderObj = SystemHeader()
1070 863 self.type = "Parameters"
1071 864 self.timeZone = 0
1072 865
1073 866 def getTimeRange1(self, interval):
1074 867
1075 868 datatime = []
1076 869
1077 870 if self.useLocalTime:
1078 871 time1 = self.utctimeInit - self.timeZone * 60
1079 872 else:
1080 873 time1 = self.utctimeInit
1081 874
1082 875 datatime.append(time1)
1083 876 datatime.append(time1 + interval)
1084 877 datatime = numpy.array(datatime)
1085 878
1086 879 return datatime
1087 880
1088 def getTimeInterval(self):
881 @property
882 def timeInterval(self):
1089 883
1090 884 if hasattr(self, 'timeInterval1'):
1091 885 return self.timeInterval1
1092 886 else:
1093 887 return self.paramInterval
1094 888
1095 889 def setValue(self, value):
1096 890
1097 891 print("This property should not be initialized")
1098 892
1099 893 return
1100 894
1101 895 def getNoise(self):
1102 896
1103 897 return self.spc_noise
1104 898
1105 timeInterval = property(getTimeInterval)
1106 899 noise = property(getNoise, setValue, "I'm the 'Noise' property.")
1107 900
1108 901
1109 902 class PlotterData(object):
1110 903 '''
1111 904 Object to hold data to be plotted
1112 905 '''
1113 906
1114 907 MAXNUMX = 200
1115 908 MAXNUMY = 200
1116 909
1117 910 def __init__(self, code, throttle_value, exp_code, localtime=True, buffering=True, snr=False):
1118 911
1119 912 self.key = code
1120 913 self.throttle = throttle_value
1121 914 self.exp_code = exp_code
1122 915 self.buffering = buffering
1123 916 self.ready = False
1124 917 self.flagNoData = False
1125 918 self.localtime = localtime
1126 919 self.data = {}
1127 920 self.meta = {}
1128 921 self.__heights = []
1129 922
1130 923 if 'snr' in code:
1131 924 self.plottypes = ['snr']
1132 925 elif code == 'spc':
1133 926 self.plottypes = ['spc', 'noise', 'rti']
1134 927 elif code == 'cspc':
1135 928 self.plottypes = ['cspc', 'spc', 'noise', 'rti']
1136 929 elif code == 'rti':
1137 930 self.plottypes = ['noise', 'rti']
1138 931 else:
1139 932 self.plottypes = [code]
1140 933
1141 934 if 'snr' not in self.plottypes and snr:
1142 935 self.plottypes.append('snr')
1143 936
1144 937 for plot in self.plottypes:
1145 938 self.data[plot] = {}
1146 939
1147 940 def __str__(self):
1148 941 dum = ['{}{}'.format(key, self.shape(key)) for key in self.data]
1149 942 return 'Data[{}][{}]'.format(';'.join(dum), len(self.times))
1150 943
1151 944 def __len__(self):
1152 945 return len(self.data[self.key])
1153 946
1154 947 def __getitem__(self, key):
1155 948
1156 949 if key not in self.data:
1157 950 raise KeyError(log.error('Missing key: {}'.format(key)))
1158 951 if 'spc' in key or not self.buffering:
1159 952 ret = self.data[key][self.tm]
1160 953 elif 'scope' in key:
1161 954 ret = numpy.array(self.data[key][float(self.tm)])
1162 955 else:
1163 956 ret = numpy.array([self.data[key][x] for x in self.times])
1164 957 if ret.ndim > 1:
1165 958 ret = numpy.swapaxes(ret, 0, 1)
1166 959 return ret
1167 960
1168 961 def __contains__(self, key):
1169 962 return key in self.data
1170 963
1171 964 def setup(self):
1172 965 '''
1173 966 Configure object
1174 967 '''
1175 968 self.type = ''
1176 969 self.ready = False
1177 970 del self.data
1178 971 self.data = {}
1179 972 self.__heights = []
1180 973 self.__all_heights = set()
1181 974 for plot in self.plottypes:
1182 975 if 'snr' in plot:
1183 976 plot = 'snr'
1184 977 elif 'spc_moments' == plot:
1185 978 plot = 'moments'
1186 979 self.data[plot] = {}
1187 980
1188 981 if 'spc' in self.data or 'rti' in self.data or 'cspc' in self.data or 'moments' in self.data:
1189 982 self.data['noise'] = {}
1190 983 self.data['rti'] = {}
1191 984 if 'noise' not in self.plottypes:
1192 985 self.plottypes.append('noise')
1193 986 if 'rti' not in self.plottypes:
1194 987 self.plottypes.append('rti')
1195 988
1196 989 def shape(self, key):
1197 990 '''
1198 991 Get the shape of the one-element data for the given key
1199 992 '''
1200 993
1201 994 if len(self.data[key]):
1202 995 if 'spc' in key or not self.buffering:
1203 996 return self.data[key].shape
1204 997 return self.data[key][self.times[0]].shape
1205 998 return (0,)
1206 999
1207 1000 def update(self, dataOut, tm):
1208 1001 '''
1209 1002 Update data object with new dataOut
1210 1003 '''
1211 1004
1212 1005 self.profileIndex = dataOut.profileIndex
1213 1006 self.tm = tm
1214 1007 self.type = dataOut.type
1215 1008 self.parameters = getattr(dataOut, 'parameters', [])
1216 1009
1217 1010 if hasattr(dataOut, 'meta'):
1218 1011 self.meta.update(dataOut.meta)
1219 1012
1220 1013 if hasattr(dataOut, 'pairsList'):
1221 1014 self.pairs = dataOut.pairsList
1222 1015
1223 self.interval = dataOut.getTimeInterval()
1016 self.interval = dataOut.timeInterval
1224 1017 if True in ['spc' in ptype for ptype in self.plottypes]:
1225 1018 self.xrange = (dataOut.getFreqRange(1)/1000.,
1226 1019 dataOut.getAcfRange(1), dataOut.getVelRange(1))
1227 1020 self.__heights.append(dataOut.heightList)
1228 1021 self.__all_heights.update(dataOut.heightList)
1229 1022
1230 1023 for plot in self.plottypes:
1231 1024 if plot in ('spc', 'spc_moments', 'spc_cut'):
1232 1025 z = dataOut.data_spc/dataOut.normFactor
1233 1026 buffer = 10*numpy.log10(z)
1234 1027 if plot == 'cspc':
1235 1028 buffer = (dataOut.data_spc, dataOut.data_cspc)
1236 1029 if plot == 'noise':
1237 1030 buffer = 10*numpy.log10(dataOut.getNoise()/dataOut.normFactor)
1238 1031 if plot in ('rti', 'spcprofile'):
1239 1032 buffer = dataOut.getPower()
1240 1033 if plot == 'snr_db':
1241 1034 buffer = dataOut.data_SNR
1242 1035 if plot == 'snr':
1243 1036 buffer = 10*numpy.log10(dataOut.data_SNR)
1244 1037 if plot == 'dop':
1245 1038 buffer = dataOut.data_DOP
1246 1039 if plot == 'pow':
1247 1040 buffer = 10*numpy.log10(dataOut.data_POW)
1248 1041 if plot == 'width':
1249 1042 buffer = dataOut.data_WIDTH
1250 1043 if plot == 'coh':
1251 1044 buffer = dataOut.getCoherence()
1252 1045 if plot == 'phase':
1253 1046 buffer = dataOut.getCoherence(phase=True)
1254 1047 if plot == 'output':
1255 1048 buffer = dataOut.data_output
1256 1049 if plot == 'param':
1257 1050 buffer = dataOut.data_param
1258 1051 if plot == 'scope':
1259 1052 buffer = dataOut.data
1260 1053 self.flagDataAsBlock = dataOut.flagDataAsBlock
1261 1054 self.nProfiles = dataOut.nProfiles
1262 1055 if plot == 'pp_power':
1263 1056 buffer = dataOut.dataPP_POWER
1264 1057 self.flagDataAsBlock = dataOut.flagDataAsBlock
1265 1058 self.nProfiles = dataOut.nProfiles
1266 1059 if plot == 'pp_signal':
1267 1060 buffer = dataOut.dataPP_POW
1268 1061 self.flagDataAsBlock = dataOut.flagDataAsBlock
1269 1062 self.nProfiles = dataOut.nProfiles
1270 1063 if plot == 'pp_velocity':
1271 1064 buffer = dataOut.dataPP_DOP
1272 1065 self.flagDataAsBlock = dataOut.flagDataAsBlock
1273 1066 self.nProfiles = dataOut.nProfiles
1274 1067 if plot == 'pp_specwidth':
1275 1068 buffer = dataOut.dataPP_WIDTH
1276 1069 self.flagDataAsBlock = dataOut.flagDataAsBlock
1277 1070 self.nProfiles = dataOut.nProfiles
1278 1071
1279 1072 if plot == 'spc':
1280 1073 self.data['spc'][tm] = buffer
1281 1074 elif plot == 'cspc':
1282 1075 self.data['cspc'][tm] = buffer
1283 1076 elif plot == 'spc_moments':
1284 1077 self.data['spc'][tm] = buffer
1285 1078 self.data['moments'][tm] = dataOut.moments
1286 1079 else:
1287 1080 if self.buffering:
1288 1081 self.data[plot][tm] = buffer
1289 1082 else:
1290 1083 self.data[plot][tm] = buffer
1291 1084
1292 1085 if dataOut.channelList is None:
1293 1086 self.channels = range(buffer.shape[0])
1294 1087 else:
1295 1088 self.channels = dataOut.channelList
1296 1089
1297 1090 if buffer is None:
1298 1091 self.flagNoData = True
1299 1092 raise schainpy.admin.SchainWarning('Attribute data_{} is empty'.format(self.key))
1300 1093
1301 1094 def normalize_heights(self):
1302 1095 '''
1303 1096 Ensure same-dimension of the data for different heighList
1304 1097 '''
1305 1098
1306 1099 H = numpy.array(list(self.__all_heights))
1307 1100 H.sort()
1308 1101 for key in self.data:
1309 1102 shape = self.shape(key)[:-1] + H.shape
1310 1103 for tm, obj in list(self.data[key].items()):
1311 1104 h = self.__heights[self.times.tolist().index(tm)]
1312 1105 if H.size == h.size:
1313 1106 continue
1314 1107 index = numpy.where(numpy.in1d(H, h))[0]
1315 1108 dummy = numpy.zeros(shape) + numpy.nan
1316 1109 if len(shape) == 2:
1317 1110 dummy[:, index] = obj
1318 1111 else:
1319 1112 dummy[index] = obj
1320 1113 self.data[key][tm] = dummy
1321 1114
1322 1115 self.__heights = [H for tm in self.times]
1323 1116
1324 1117 def jsonify(self, tm, plot_name, plot_type, decimate=False):
1325 1118 '''
1326 1119 Convert data to json
1327 1120 '''
1328 1121
1329 1122 dy = int(self.heights.size/self.MAXNUMY) + 1
1330 1123 if self.key in ('spc', 'cspc'):
1331 1124 dx = int(self.data[self.key][tm].shape[1]/self.MAXNUMX) + 1
1332 1125 data = self.roundFloats(
1333 1126 self.data[self.key][tm][::, ::dx, ::dy].tolist())
1334 1127 else:
1335 1128 if self.key is 'noise':
1336 1129 data = [[x] for x in self.roundFloats(self.data[self.key][tm].tolist())]
1337 1130 else:
1338 1131 data = self.roundFloats(self.data[self.key][tm][::, ::dy].tolist())
1339 1132
1340 1133 meta = {}
1341 1134 ret = {
1342 1135 'plot': plot_name,
1343 1136 'code': self.exp_code,
1344 1137 'time': float(tm),
1345 1138 'data': data,
1346 1139 }
1347 1140 meta['type'] = plot_type
1348 1141 meta['interval'] = float(self.interval)
1349 1142 meta['localtime'] = self.localtime
1350 1143 meta['yrange'] = self.roundFloats(self.heights[::dy].tolist())
1351 1144 if 'spc' in self.data or 'cspc' in self.data:
1352 1145 meta['xrange'] = self.roundFloats(self.xrange[2][::dx].tolist())
1353 1146 else:
1354 1147 meta['xrange'] = []
1355 1148
1356 1149 meta.update(self.meta)
1357 1150 ret['metadata'] = meta
1358 1151 return json.dumps(ret)
1359 1152
1360 1153 @property
1361 1154 def times(self):
1362 1155 '''
1363 1156 Return the list of times of the current data
1364 1157 '''
1365 1158
1366 1159 ret = numpy.array([t for t in self.data[self.key]])
1367 1160 if self:
1368 1161 ret.sort()
1369 1162 return ret
1370 1163
1371 1164 @property
1372 1165 def min_time(self):
1373 1166 '''
1374 1167 Return the minimun time value
1375 1168 '''
1376 1169
1377 1170 return self.times[0]
1378 1171
1379 1172 @property
1380 1173 def max_time(self):
1381 1174 '''
1382 1175 Return the maximun time value
1383 1176 '''
1384 1177
1385 1178 return self.times[-1]
1386 1179
1387 1180 @property
1388 1181 def heights(self):
1389 1182 '''
1390 1183 Return the list of heights of the current data
1391 1184 '''
1392 1185
1393 1186 return numpy.array(self.__heights[-1])
1394 1187
1395 1188 @staticmethod
1396 1189 def roundFloats(obj):
1397 1190 if isinstance(obj, list):
1398 1191 return list(map(PlotterData.roundFloats, obj))
1399 1192 elif isinstance(obj, float):
1400 1193 return round(obj, 2)
@@ -1,1577 +1,1575
1 1 """
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 """
6 6 import os
7 7 import sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import inspect
13 13 import time
14 14 import datetime
15 15 import zmq
16 16
17 17 from schainpy.model.proc.jroproc_base import Operation, MPDecorator
18 18 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
19 19 from schainpy.model.data.jroheaderIO import get_dtype_index, get_numpy_dtype, get_procflag_dtype, get_dtype_width
20 20 from schainpy.utils import log
21 21 import schainpy.admin
22 22
23 23 LOCALTIME = True
24 24 DT_DIRECTIVES = {
25 25 '%Y': 4,
26 26 '%y': 2,
27 27 '%m': 2,
28 28 '%d': 2,
29 29 '%j': 3,
30 30 '%H': 2,
31 31 '%M': 2,
32 32 '%S': 2,
33 33 '%f': 6
34 34 }
35 35
36 36
37 37 def isNumber(cad):
38 38 """
39 39 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
40 40
41 41 Excepciones:
42 42 Si un determinado string no puede ser convertido a numero
43 43 Input:
44 44 str, string al cual se le analiza para determinar si convertible a un numero o no
45 45
46 46 Return:
47 47 True : si el string es uno numerico
48 48 False : no es un string numerico
49 49 """
50 50 try:
51 51 float(cad)
52 52 return True
53 53 except:
54 54 return False
55 55
56 56
57 57 def isFileInEpoch(filename, startUTSeconds, endUTSeconds):
58 58 """
59 59 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
60 60
61 61 Inputs:
62 62 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
63 63
64 64 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
65 65 segundos contados desde 01/01/1970.
66 66 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
67 67 segundos contados desde 01/01/1970.
68 68
69 69 Return:
70 70 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
71 71 fecha especificado, de lo contrario retorna False.
72 72
73 73 Excepciones:
74 74 Si el archivo no existe o no puede ser abierto
75 75 Si la cabecera no puede ser leida.
76 76
77 77 """
78 78 basicHeaderObj = BasicHeader(LOCALTIME)
79 79
80 80 try:
81 81 fp = open(filename, 'rb')
82 82 except IOError:
83 83 print("The file %s can't be opened" % (filename))
84 84 return 0
85 85
86 86 sts = basicHeaderObj.read(fp)
87 87 fp.close()
88 88
89 89 if not(sts):
90 90 print("Skipping the file %s because it has not a valid header" % (filename))
91 91 return 0
92 92
93 93 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
94 94 return 0
95 95
96 96 return 1
97 97
98 98
99 99 def isTimeInRange(thisTime, startTime, endTime):
100 100 if endTime >= startTime:
101 101 if (thisTime < startTime) or (thisTime > endTime):
102 102 return 0
103 103 return 1
104 104 else:
105 105 if (thisTime < startTime) and (thisTime > endTime):
106 106 return 0
107 107 return 1
108 108
109 109
110 110 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
111 111 """
112 112 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
113 113
114 114 Inputs:
115 115 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
116 116
117 117 startDate : fecha inicial del rango seleccionado en formato datetime.date
118 118
119 119 endDate : fecha final del rango seleccionado en formato datetime.date
120 120
121 121 startTime : tiempo inicial del rango seleccionado en formato datetime.time
122 122
123 123 endTime : tiempo final del rango seleccionado en formato datetime.time
124 124
125 125 Return:
126 126 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
127 127 fecha especificado, de lo contrario retorna False.
128 128
129 129 Excepciones:
130 130 Si el archivo no existe o no puede ser abierto
131 131 Si la cabecera no puede ser leida.
132 132
133 133 """
134 134
135 135 try:
136 136 fp = open(filename, 'rb')
137 137 except IOError:
138 138 print("The file %s can't be opened" % (filename))
139 139 return None
140 140
141 141 firstBasicHeaderObj = BasicHeader(LOCALTIME)
142 142 systemHeaderObj = SystemHeader()
143 143 radarControllerHeaderObj = RadarControllerHeader()
144 144 processingHeaderObj = ProcessingHeader()
145 145
146 146 lastBasicHeaderObj = BasicHeader(LOCALTIME)
147 147
148 148 sts = firstBasicHeaderObj.read(fp)
149 149
150 150 if not(sts):
151 151 print("[Reading] Skipping the file %s because it has not a valid header" % (filename))
152 152 return None
153 153
154 154 if not systemHeaderObj.read(fp):
155 155 return None
156 156
157 157 if not radarControllerHeaderObj.read(fp):
158 158 return None
159 159
160 160 if not processingHeaderObj.read(fp):
161 161 return None
162 162
163 163 filesize = os.path.getsize(filename)
164 164
165 165 offset = processingHeaderObj.blockSize + 24 # header size
166 166
167 167 if filesize <= offset:
168 168 print("[Reading] %s: This file has not enough data" % filename)
169 169 return None
170 170
171 171 fp.seek(-offset, 2)
172 172
173 173 sts = lastBasicHeaderObj.read(fp)
174 174
175 175 fp.close()
176 176
177 177 thisDatetime = lastBasicHeaderObj.datatime
178 178 thisTime_last_block = thisDatetime.time()
179 179
180 180 thisDatetime = firstBasicHeaderObj.datatime
181 181 thisDate = thisDatetime.date()
182 182 thisTime_first_block = thisDatetime.time()
183 183
184 184 # General case
185 185 # o>>>>>>>>>>>>>><<<<<<<<<<<<<<o
186 186 #-----------o----------------------------o-----------
187 187 # startTime endTime
188 188
189 189 if endTime >= startTime:
190 190 if (thisTime_last_block < startTime) or (thisTime_first_block > endTime):
191 191 return None
192 192
193 193 return thisDatetime
194 194
195 195 # If endTime < startTime then endTime belongs to the next day
196 196
197 197 #<<<<<<<<<<<o o>>>>>>>>>>>
198 198 #-----------o----------------------------o-----------
199 199 # endTime startTime
200 200
201 201 if (thisDate == startDate) and (thisTime_last_block < startTime):
202 202 return None
203 203
204 204 if (thisDate == endDate) and (thisTime_first_block > endTime):
205 205 return None
206 206
207 207 if (thisTime_last_block < startTime) and (thisTime_first_block > endTime):
208 208 return None
209 209
210 210 return thisDatetime
211 211
212 212
213 213 def isFolderInDateRange(folder, startDate=None, endDate=None):
214 214 """
215 215 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
216 216
217 217 Inputs:
218 218 folder : nombre completo del directorio.
219 219 Su formato deberia ser "/path_root/?YYYYDDD"
220 220
221 221 siendo:
222 222 YYYY : Anio (ejemplo 2015)
223 223 DDD : Dia del anio (ejemplo 305)
224 224
225 225 startDate : fecha inicial del rango seleccionado en formato datetime.date
226 226
227 227 endDate : fecha final del rango seleccionado en formato datetime.date
228 228
229 229 Return:
230 230 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
231 231 fecha especificado, de lo contrario retorna False.
232 232 Excepciones:
233 233 Si el directorio no tiene el formato adecuado
234 234 """
235 235
236 236 basename = os.path.basename(folder)
237 237
238 238 if not isRadarFolder(basename):
239 239 print("The folder %s has not the rigth format" % folder)
240 240 return 0
241 241
242 242 if startDate and endDate:
243 243 thisDate = getDateFromRadarFolder(basename)
244 244
245 245 if thisDate < startDate:
246 246 return 0
247 247
248 248 if thisDate > endDate:
249 249 return 0
250 250
251 251 return 1
252 252
253 253
254 254 def isFileInDateRange(filename, startDate=None, endDate=None):
255 255 """
256 256 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
257 257
258 258 Inputs:
259 259 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
260 260
261 261 Su formato deberia ser "?YYYYDDDsss"
262 262
263 263 siendo:
264 264 YYYY : Anio (ejemplo 2015)
265 265 DDD : Dia del anio (ejemplo 305)
266 266 sss : set
267 267
268 268 startDate : fecha inicial del rango seleccionado en formato datetime.date
269 269
270 270 endDate : fecha final del rango seleccionado en formato datetime.date
271 271
272 272 Return:
273 273 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
274 274 fecha especificado, de lo contrario retorna False.
275 275 Excepciones:
276 276 Si el archivo no tiene el formato adecuado
277 277 """
278 278
279 279 basename = os.path.basename(filename)
280 280
281 281 if not isRadarFile(basename):
282 282 print("The filename %s has not the rigth format" % filename)
283 283 return 0
284 284
285 285 if startDate and endDate:
286 286 thisDate = getDateFromRadarFile(basename)
287 287
288 288 if thisDate < startDate:
289 289 return 0
290 290
291 291 if thisDate > endDate:
292 292 return 0
293 293
294 294 return 1
295 295
296 296
297 297 def getFileFromSet(path, ext, set):
298 298 validFilelist = []
299 299 fileList = os.listdir(path)
300 300
301 301 # 0 1234 567 89A BCDE
302 302 # H YYYY DDD SSS .ext
303 303
304 304 for thisFile in fileList:
305 305 try:
306 306 year = int(thisFile[1:5])
307 307 doy = int(thisFile[5:8])
308 308 except:
309 309 continue
310 310
311 311 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
312 312 continue
313 313
314 314 validFilelist.append(thisFile)
315 315
316 316 myfile = fnmatch.filter(
317 317 validFilelist, '*%4.4d%3.3d%3.3d*' % (year, doy, set))
318 318
319 319 if len(myfile) != 0:
320 320 return myfile[0]
321 321 else:
322 322 filename = '*%4.4d%3.3d%3.3d%s' % (year, doy, set, ext.lower())
323 323 print('the filename %s does not exist' % filename)
324 324 print('...going to the last file: ')
325 325
326 326 if validFilelist:
327 327 validFilelist = sorted(validFilelist, key=str.lower)
328 328 return validFilelist[-1]
329 329
330 330 return None
331 331
332 332
333 333 def getlastFileFromPath(path, ext):
334 334 """
335 335 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
336 336 al final de la depuracion devuelve el ultimo file de la lista que quedo.
337 337
338 338 Input:
339 339 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
340 340 ext : extension de los files contenidos en una carpeta
341 341
342 342 Return:
343 343 El ultimo file de una determinada carpeta, no se considera el path.
344 344 """
345 345 validFilelist = []
346 346 fileList = os.listdir(path)
347 347
348 348 # 0 1234 567 89A BCDE
349 349 # H YYYY DDD SSS .ext
350 350
351 351 for thisFile in fileList:
352 352
353 353 year = thisFile[1:5]
354 354 if not isNumber(year):
355 355 continue
356 356
357 357 doy = thisFile[5:8]
358 358 if not isNumber(doy):
359 359 continue
360 360
361 361 year = int(year)
362 362 doy = int(doy)
363 363
364 364 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
365 365 continue
366 366
367 367 validFilelist.append(thisFile)
368 368
369 369 if validFilelist:
370 370 validFilelist = sorted(validFilelist, key=str.lower)
371 371 return validFilelist[-1]
372 372
373 373 return None
374 374
375 375
376 376 def isRadarFolder(folder):
377 377 try:
378 378 year = int(folder[1:5])
379 379 doy = int(folder[5:8])
380 380 except:
381 381 return 0
382 382
383 383 return 1
384 384
385 385
386 386 def isRadarFile(file):
387 387 try:
388 388 year = int(file[1:5])
389 389 doy = int(file[5:8])
390 390 set = int(file[8:11])
391 391 except:
392 392 return 0
393 393
394 394 return 1
395 395
396 396
397 397 def getDateFromRadarFile(file):
398 398 try:
399 399 year = int(file[1:5])
400 400 doy = int(file[5:8])
401 401 set = int(file[8:11])
402 402 except:
403 403 return None
404 404
405 405 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1)
406 406 return thisDate
407 407
408 408
409 409 def getDateFromRadarFolder(folder):
410 410 try:
411 411 year = int(folder[1:5])
412 412 doy = int(folder[5:8])
413 413 except:
414 414 return None
415 415
416 416 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy - 1)
417 417 return thisDate
418 418
419 419 def parse_format(s, fmt):
420 420
421 421 for i in range(fmt.count('%')):
422 422 x = fmt.index('%')
423 423 d = DT_DIRECTIVES[fmt[x:x+2]]
424 424 fmt = fmt.replace(fmt[x:x+2], s[x:x+d])
425 425 return fmt
426 426
427 427 class Reader(object):
428 428
429 429 c = 3E8
430 430 isConfig = False
431 431 dtype = None
432 432 pathList = []
433 433 filenameList = []
434 434 datetimeList = []
435 435 filename = None
436 436 ext = None
437 437 flagIsNewFile = 1
438 438 flagDiscontinuousBlock = 0
439 439 flagIsNewBlock = 0
440 440 flagNoMoreFiles = 0
441 441 fp = None
442 442 firstHeaderSize = 0
443 443 basicHeaderSize = 24
444 444 versionFile = 1103
445 445 fileSize = None
446 446 fileSizeByHeader = None
447 447 fileIndex = -1
448 448 profileIndex = None
449 449 blockIndex = 0
450 450 nTotalBlocks = 0
451 451 maxTimeStep = 30
452 452 lastUTTime = None
453 453 datablock = None
454 454 dataOut = None
455 455 getByBlock = False
456 456 path = None
457 457 startDate = None
458 458 endDate = None
459 459 startTime = datetime.time(0, 0, 0)
460 460 endTime = datetime.time(23, 59, 59)
461 461 set = None
462 462 expLabel = ""
463 463 online = False
464 464 delay = 60
465 465 nTries = 3 # quantity tries
466 466 nFiles = 3 # number of files for searching
467 467 walk = True
468 468 getblock = False
469 469 nTxs = 1
470 470 realtime = False
471 471 blocksize = 0
472 472 blocktime = None
473 473 warnings = True
474 474 verbose = True
475 475 server = None
476 476 format = None
477 477 oneDDict = None
478 478 twoDDict = None
479 479 independentParam = None
480 480 filefmt = None
481 481 folderfmt = None
482 482 open_file = open
483 483 open_mode = 'rb'
484 484
485 485 def run(self):
486 486
487 487 raise NotImplementedError
488 488
489 489 def getAllowedArgs(self):
490 490 if hasattr(self, '__attrs__'):
491 491 return self.__attrs__
492 492 else:
493 493 return inspect.getargspec(self.run).args
494 494
495 495 def set_kwargs(self, **kwargs):
496 496
497 497 for key, value in kwargs.items():
498 498 setattr(self, key, value)
499 499
500 500 def find_folders(self, path, startDate, endDate, folderfmt, last=False):
501 501
502 502 folders = [x for f in path.split(',')
503 503 for x in os.listdir(f) if os.path.isdir(os.path.join(f, x))]
504 504 folders.sort()
505 505
506 506 if last:
507 507 folders = [folders[-1]]
508 508
509 509 for folder in folders:
510 510 try:
511 511 dt = datetime.datetime.strptime(parse_format(folder, folderfmt), folderfmt).date()
512 512 if dt >= startDate and dt <= endDate:
513 513 yield os.path.join(path, folder)
514 514 else:
515 515 log.log('Skiping folder {}'.format(folder), self.name)
516 516 except Exception as e:
517 517 log.log('Skiping folder {}'.format(folder), self.name)
518 518 continue
519 519 return
520 520
521 521 def find_files(self, folders, ext, filefmt, startDate=None, endDate=None,
522 522 expLabel='', last=False):
523 523
524 524 for path in folders:
525 525 files = glob.glob1(path, '*{}'.format(ext))
526 526 files.sort()
527 527 if last:
528 528 if files:
529 529 fo = files[-1]
530 530 try:
531 531 dt = datetime.datetime.strptime(parse_format(fo, filefmt), filefmt).date()
532 532 yield os.path.join(path, expLabel, fo)
533 533 except Exception as e:
534 534 pass
535 535 return
536 536 else:
537 537 return
538 538
539 539 for fo in files:
540 540 try:
541 541 dt = datetime.datetime.strptime(parse_format(fo, filefmt), filefmt).date()
542 542 if dt >= startDate and dt <= endDate:
543 543 yield os.path.join(path, expLabel, fo)
544 544 else:
545 545 log.log('Skiping file {}'.format(fo), self.name)
546 546 except Exception as e:
547 547 log.log('Skiping file {}'.format(fo), self.name)
548 548 continue
549 549
550 550 def searchFilesOffLine(self, path, startDate, endDate,
551 551 expLabel, ext, walk,
552 552 filefmt, folderfmt):
553 553 """Search files in offline mode for the given arguments
554 554
555 555 Return:
556 556 Generator of files
557 557 """
558 558
559 559 if walk:
560 560 folders = self.find_folders(
561 561 path, startDate, endDate, folderfmt)
562 562 else:
563 563 folders = path.split(',')
564 564
565 565 return self.find_files(
566 566 folders, ext, filefmt, startDate, endDate, expLabel)
567 567
568 568 def searchFilesOnLine(self, path, startDate, endDate,
569 569 expLabel, ext, walk,
570 570 filefmt, folderfmt):
571 571 """Search for the last file of the last folder
572 572
573 573 Arguments:
574 574 path : carpeta donde estan contenidos los files que contiene data
575 575 expLabel : Nombre del subexperimento (subfolder)
576 576 ext : extension de los files
577 577 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
578 578
579 579 Return:
580 580 generator with the full path of last filename
581 581 """
582 582
583 583 if walk:
584 584 folders = self.find_folders(
585 585 path, startDate, endDate, folderfmt, last=True)
586 586 else:
587 587 folders = path.split(',')
588 588
589 589 return self.find_files(
590 590 folders, ext, filefmt, startDate, endDate, expLabel, last=True)
591 591
592 592 def setNextFile(self):
593 593 """Set the next file to be readed open it and parse de file header"""
594 594
595 595 while True:
596 596 if self.fp != None:
597 597 self.fp.close()
598 598
599 599 if self.online:
600 600 newFile = self.setNextFileOnline()
601 601 else:
602 602 newFile = self.setNextFileOffline()
603 603
604 604 if not(newFile):
605 605 if self.online:
606 606 raise schainpy.admin.SchainError('Time to wait for new files reach')
607 607 else:
608 608 if self.fileIndex == -1:
609 609 raise schainpy.admin.SchainWarning('No files found in the given path')
610 610 else:
611 611 raise schainpy.admin.SchainWarning('No more files to read')
612 612
613 613 if self.verifyFile(self.filename):
614 614 break
615 615
616 616 log.log('Opening file: %s' % self.filename, self.name)
617 617
618 618 self.readFirstHeader()
619 619 self.nReadBlocks = 0
620 620
621 621 def setNextFileOnline(self):
622 622 """Check for the next file to be readed in online mode.
623 623
624 624 Set:
625 625 self.filename
626 626 self.fp
627 627 self.filesize
628 628
629 629 Return:
630 630 boolean
631 631
632 632 """
633 633 nextFile = True
634 634 nextDay = False
635 635
636 636 for nFiles in range(self.nFiles+1):
637 637 for nTries in range(self.nTries):
638 638 fullfilename, filename = self.checkForRealPath(nextFile, nextDay)
639 639 if fullfilename is not None:
640 640 break
641 641 log.warning(
642 642 "Waiting %0.2f sec for the next file: \"%s\" , try %02d ..." % (self.delay, filename, nTries + 1),
643 643 self.name)
644 644 time.sleep(self.delay)
645 645 nextFile = False
646 646 continue
647 647
648 648 if fullfilename is not None:
649 649 break
650 650
651 651 self.nTries = 1
652 652 nextFile = True
653 653
654 654 if nFiles == (self.nFiles - 1):
655 655 log.log('Trying with next day...', self.name)
656 656 nextDay = True
657 657 self.nTries = 3
658 658
659 659 if fullfilename:
660 660 self.fileSize = os.path.getsize(fullfilename)
661 661 self.filename = fullfilename
662 662 self.flagIsNewFile = 1
663 663 if self.fp != None:
664 664 self.fp.close()
665 665 self.fp = self.open_file(fullfilename, self.open_mode)
666 666 self.flagNoMoreFiles = 0
667 667 self.fileIndex += 1
668 668 return 1
669 669 else:
670 670 return 0
671 671
672 672 def setNextFileOffline(self):
673 673 """Open the next file to be readed in offline mode"""
674 674
675 675 try:
676 676 filename = next(self.filenameList)
677 677 self.fileIndex +=1
678 678 except StopIteration:
679 679 self.flagNoMoreFiles = 1
680 680 return 0
681 681
682 682 self.filename = filename
683 683 self.fileSize = os.path.getsize(filename)
684 684 self.fp = self.open_file(filename, self.open_mode)
685 685 self.flagIsNewFile = 1
686 686
687 687 return 1
688 688
689 689 @staticmethod
690 690 def isDateTimeInRange(dt, startDate, endDate, startTime, endTime):
691 691 """Check if the given datetime is in range"""
692 692
693 693 if startDate <= dt.date() <= endDate:
694 694 if startTime <= dt.time() <= endTime:
695 695 return True
696 696 return False
697 697
698 698 def verifyFile(self, filename):
699 699 """Check for a valid file
700 700
701 701 Arguments:
702 702 filename -- full path filename
703 703
704 704 Return:
705 705 boolean
706 706 """
707 707
708 708 return True
709 709
710 710 def checkForRealPath(self, nextFile, nextDay):
711 711 """Check if the next file to be readed exists"""
712 712
713 713 raise NotImplementedError
714 714
715 715 def readFirstHeader(self):
716 716 """Parse the file header"""
717 717
718 718 pass
719 719
720 720 def waitDataBlock(self, pointer_location, blocksize=None):
721 721 """
722 722 """
723 723
724 724 currentPointer = pointer_location
725 725 if blocksize is None:
726 726 neededSize = self.processingHeaderObj.blockSize # + self.basicHeaderSize
727 727 else:
728 728 neededSize = blocksize
729 729
730 730 for nTries in range(self.nTries):
731 731 self.fp.close()
732 732 self.fp = open(self.filename, 'rb')
733 733 self.fp.seek(currentPointer)
734 734
735 735 self.fileSize = os.path.getsize(self.filename)
736 736 currentSize = self.fileSize - currentPointer
737 737
738 738 if (currentSize >= neededSize):
739 739 return 1
740 740
741 741 log.warning(
742 742 "Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1),
743 743 self.name
744 744 )
745 745 time.sleep(self.delay)
746 746
747 747 return 0
748 748
749 749 class JRODataReader(Reader):
750 750
751 751 utc = 0
752 752 nReadBlocks = 0
753 753 foldercounter = 0
754 754 firstHeaderSize = 0
755 755 basicHeaderSize = 24
756 756 __isFirstTimeOnline = 1
757 757 filefmt = "*%Y%j***"
758 758 folderfmt = "*%Y%j"
759 759 __attrs__ = ['path', 'startDate', 'endDate', 'startTime', 'endTime', 'online', 'delay', 'walk']
760 760
761 761 def getDtypeWidth(self):
762 762
763 763 dtype_index = get_dtype_index(self.dtype)
764 764 dtype_width = get_dtype_width(dtype_index)
765 765
766 766 return dtype_width
767 767
768 768 def checkForRealPath(self, nextFile, nextDay):
769 769 """Check if the next file to be readed exists.
770 770
771 771 Example :
772 772 nombre correcto del file es .../.../D2009307/P2009307367.ext
773 773
774 774 Entonces la funcion prueba con las siguientes combinaciones
775 775 .../.../y2009307367.ext
776 776 .../.../Y2009307367.ext
777 777 .../.../x2009307/y2009307367.ext
778 778 .../.../x2009307/Y2009307367.ext
779 779 .../.../X2009307/y2009307367.ext
780 780 .../.../X2009307/Y2009307367.ext
781 781 siendo para este caso, la ultima combinacion de letras, identica al file buscado
782 782
783 783 Return:
784 784 str -- fullpath of the file
785 785 """
786 786
787 787
788 788 if nextFile:
789 789 self.set += 1
790 790 if nextDay:
791 791 self.set = 0
792 792 self.doy += 1
793 793 foldercounter = 0
794 794 prefixDirList = [None, 'd', 'D']
795 795 if self.ext.lower() == ".r": # voltage
796 796 prefixFileList = ['d', 'D']
797 797 elif self.ext.lower() == ".pdata": # spectra
798 798 prefixFileList = ['p', 'P']
799 799
800 800 # barrido por las combinaciones posibles
801 801 for prefixDir in prefixDirList:
802 802 thispath = self.path
803 803 if prefixDir != None:
804 804 # formo el nombre del directorio xYYYYDDD (x=d o x=D)
805 805 if foldercounter == 0:
806 806 thispath = os.path.join(self.path, "%s%04d%03d" %
807 807 (prefixDir, self.year, self.doy))
808 808 else:
809 809 thispath = os.path.join(self.path, "%s%04d%03d_%02d" % (
810 810 prefixDir, self.year, self.doy, foldercounter))
811 811 for prefixFile in prefixFileList: # barrido por las dos combinaciones posibles de "D"
812 812 # formo el nombre del file xYYYYDDDSSS.ext
813 813 filename = "%s%04d%03d%03d%s" % (prefixFile, self.year, self.doy, self.set, self.ext)
814 814 fullfilename = os.path.join(
815 815 thispath, filename)
816 816
817 817 if os.path.exists(fullfilename):
818 818 return fullfilename, filename
819 819
820 820 return None, filename
821 821
822 822 def __waitNewBlock(self):
823 823 """
824 824 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
825 825
826 826 Si el modo de lectura es OffLine siempre retorn 0
827 827 """
828 828 if not self.online:
829 829 return 0
830 830
831 831 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
832 832 return 0
833 833
834 834 currentPointer = self.fp.tell()
835 835
836 836 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
837 837
838 838 for nTries in range(self.nTries):
839 839
840 840 self.fp.close()
841 841 self.fp = open(self.filename, 'rb')
842 842 self.fp.seek(currentPointer)
843 843
844 844 self.fileSize = os.path.getsize(self.filename)
845 845 currentSize = self.fileSize - currentPointer
846 846
847 847 if (currentSize >= neededSize):
848 848 self.basicHeaderObj.read(self.fp)
849 849 return 1
850 850
851 851 if self.fileSize == self.fileSizeByHeader:
852 852 # self.flagEoF = True
853 853 return 0
854 854
855 855 print("[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries + 1))
856 856 time.sleep(self.delay)
857 857
858 858 return 0
859 859
860 860 def __setNewBlock(self):
861 861
862 862 if self.fp == None:
863 863 return 0
864 864
865 865 if self.flagIsNewFile:
866 866 self.lastUTTime = self.basicHeaderObj.utc
867 867 return 1
868 868
869 869 if self.realtime:
870 870 self.flagDiscontinuousBlock = 1
871 871 if not(self.setNextFile()):
872 872 return 0
873 873 else:
874 874 return 1
875 875
876 876 currentSize = self.fileSize - self.fp.tell()
877 877 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
878 878
879 879 if (currentSize >= neededSize):
880 880 self.basicHeaderObj.read(self.fp)
881 881 self.lastUTTime = self.basicHeaderObj.utc
882 882 return 1
883 883
884 884 if self.__waitNewBlock():
885 885 self.lastUTTime = self.basicHeaderObj.utc
886 886 return 1
887 887
888 888 if not(self.setNextFile()):
889 889 return 0
890 890
891 891 deltaTime = self.basicHeaderObj.utc - self.lastUTTime
892 892 self.lastUTTime = self.basicHeaderObj.utc
893 893
894 894 self.flagDiscontinuousBlock = 0
895 895
896 896 if deltaTime > self.maxTimeStep:
897 897 self.flagDiscontinuousBlock = 1
898 898
899 899 return 1
900 900
901 901 def readNextBlock(self):
902 902
903 903 while True:
904 904 if not(self.__setNewBlock()):
905 905 continue
906 906
907 907 if not(self.readBlock()):
908 908 return 0
909 909
910 910 self.getBasicHeader()
911 911
912 912 if not self.isDateTimeInRange(self.dataOut.datatime, self.startDate, self.endDate, self.startTime, self.endTime):
913 913 print("[Reading] Block No. %d/%d -> %s [Skipping]" % (self.nReadBlocks,
914 914 self.processingHeaderObj.dataBlocksPerFile,
915 915 self.dataOut.datatime.ctime()))
916 916 continue
917 917
918 918 break
919 919
920 920 if self.verbose:
921 921 print("[Reading] Block No. %d/%d -> %s" % (self.nReadBlocks,
922 922 self.processingHeaderObj.dataBlocksPerFile,
923 923 self.dataOut.datatime.ctime()))
924 924 return 1
925 925
926 926 def readFirstHeader(self):
927 927
928 928 self.basicHeaderObj.read(self.fp)
929 929 self.systemHeaderObj.read(self.fp)
930 930 self.radarControllerHeaderObj.read(self.fp)
931 931 self.processingHeaderObj.read(self.fp)
932 932 self.firstHeaderSize = self.basicHeaderObj.size
933 933
934 934 datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
935 935 PROCFLAG.DATATYPE_MASK)) - numpy.log2(PROCFLAG.DATATYPE_CHAR))
936 936 if datatype == 0:
937 937 datatype_str = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
938 938 elif datatype == 1:
939 939 datatype_str = numpy.dtype([('real', '<i2'), ('imag', '<i2')])
940 940 elif datatype == 2:
941 941 datatype_str = numpy.dtype([('real', '<i4'), ('imag', '<i4')])
942 942 elif datatype == 3:
943 943 datatype_str = numpy.dtype([('real', '<i8'), ('imag', '<i8')])
944 944 elif datatype == 4:
945 945 datatype_str = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
946 946 elif datatype == 5:
947 947 datatype_str = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
948 948 else:
949 949 raise ValueError('Data type was not defined')
950 950
951 951 self.dtype = datatype_str
952 952 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
953 953 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + \
954 954 self.firstHeaderSize + self.basicHeaderSize * \
955 955 (self.processingHeaderObj.dataBlocksPerFile - 1)
956 956 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
957 957 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
958 958 self.getBlockDimension()
959 959
960 960 def verifyFile(self, filename):
961 961
962 962 flag = True
963 963
964 964 try:
965 965 fp = open(filename, 'rb')
966 966 except IOError:
967 967 log.error("File {} can't be opened".format(filename), self.name)
968 968 return False
969 969
970 970 if self.online and self.waitDataBlock(0):
971 971 pass
972 972
973 973 basicHeaderObj = BasicHeader(LOCALTIME)
974 974 systemHeaderObj = SystemHeader()
975 975 radarControllerHeaderObj = RadarControllerHeader()
976 976 processingHeaderObj = ProcessingHeader()
977 977
978 978 if not(basicHeaderObj.read(fp)):
979 979 flag = False
980 980 if not(systemHeaderObj.read(fp)):
981 981 flag = False
982 982 if not(radarControllerHeaderObj.read(fp)):
983 983 flag = False
984 984 if not(processingHeaderObj.read(fp)):
985 985 flag = False
986 986 if not self.online:
987 987 dt1 = basicHeaderObj.datatime
988 988 pos = self.fileSize-processingHeaderObj.blockSize-24
989 989 if pos<0:
990 990 flag = False
991 991 log.error('Invalid size for file: {}'.format(self.filename), self.name)
992 992 else:
993 993 fp.seek(pos)
994 994 if not(basicHeaderObj.read(fp)):
995 995 flag = False
996 996 dt2 = basicHeaderObj.datatime
997 997 if not self.isDateTimeInRange(dt1, self.startDate, self.endDate, self.startTime, self.endTime) and not \
998 998 self.isDateTimeInRange(dt2, self.startDate, self.endDate, self.startTime, self.endTime):
999 999 flag = False
1000 1000
1001 1001 fp.close()
1002 1002 return flag
1003 1003
1004 1004 def findDatafiles(self, path, startDate=None, endDate=None, expLabel='', ext='.r', walk=True, include_path=False):
1005 1005
1006 1006 path_empty = True
1007 1007
1008 1008 dateList = []
1009 1009 pathList = []
1010 1010
1011 1011 multi_path = path.split(',')
1012 1012
1013 1013 if not walk:
1014 1014
1015 1015 for single_path in multi_path:
1016 1016
1017 1017 if not os.path.isdir(single_path):
1018 1018 continue
1019 1019
1020 1020 fileList = glob.glob1(single_path, "*" + ext)
1021 1021
1022 1022 if not fileList:
1023 1023 continue
1024 1024
1025 1025 path_empty = False
1026 1026
1027 1027 fileList.sort()
1028 1028
1029 1029 for thisFile in fileList:
1030 1030
1031 1031 if not os.path.isfile(os.path.join(single_path, thisFile)):
1032 1032 continue
1033 1033
1034 1034 if not isRadarFile(thisFile):
1035 1035 continue
1036 1036
1037 1037 if not isFileInDateRange(thisFile, startDate, endDate):
1038 1038 continue
1039 1039
1040 1040 thisDate = getDateFromRadarFile(thisFile)
1041 1041
1042 1042 if thisDate in dateList or single_path in pathList:
1043 1043 continue
1044 1044
1045 1045 dateList.append(thisDate)
1046 1046 pathList.append(single_path)
1047 1047
1048 1048 else:
1049 1049 for single_path in multi_path:
1050 1050
1051 1051 if not os.path.isdir(single_path):
1052 1052 continue
1053 1053
1054 1054 dirList = []
1055 1055
1056 1056 for thisPath in os.listdir(single_path):
1057 1057
1058 1058 if not os.path.isdir(os.path.join(single_path, thisPath)):
1059 1059 continue
1060 1060
1061 1061 if not isRadarFolder(thisPath):
1062 1062 continue
1063 1063
1064 1064 if not isFolderInDateRange(thisPath, startDate, endDate):
1065 1065 continue
1066 1066
1067 1067 dirList.append(thisPath)
1068 1068
1069 1069 if not dirList:
1070 1070 continue
1071 1071
1072 1072 dirList.sort()
1073 1073
1074 1074 for thisDir in dirList:
1075 1075
1076 1076 datapath = os.path.join(single_path, thisDir, expLabel)
1077 1077 fileList = glob.glob1(datapath, "*" + ext)
1078 1078
1079 1079 if not fileList:
1080 1080 continue
1081 1081
1082 1082 path_empty = False
1083 1083
1084 1084 thisDate = getDateFromRadarFolder(thisDir)
1085 1085
1086 1086 pathList.append(datapath)
1087 1087 dateList.append(thisDate)
1088 1088
1089 1089 dateList.sort()
1090 1090
1091 1091 if walk:
1092 1092 pattern_path = os.path.join(multi_path[0], "[dYYYYDDD]", expLabel)
1093 1093 else:
1094 1094 pattern_path = multi_path[0]
1095 1095
1096 1096 if path_empty:
1097 1097 raise schainpy.admin.SchainError("[Reading] No *%s files in %s for %s to %s" % (ext, pattern_path, startDate, endDate))
1098 1098 else:
1099 1099 if not dateList:
1100 1100 raise schainpy.admin.SchainError("[Reading] Date range selected invalid [%s - %s]: No *%s files in %s)" % (startDate, endDate, ext, path))
1101 1101
1102 1102 if include_path:
1103 1103 return dateList, pathList
1104 1104
1105 1105 return dateList
1106 1106
1107 1107 def setup(self, **kwargs):
1108 1108
1109 1109 self.set_kwargs(**kwargs)
1110 1110 if not self.ext.startswith('.'):
1111 1111 self.ext = '.{}'.format(self.ext)
1112 1112
1113 1113 if self.server is not None:
1114 1114 if 'tcp://' in self.server:
1115 1115 address = server
1116 1116 else:
1117 1117 address = 'ipc:///tmp/%s' % self.server
1118 1118 self.server = address
1119 1119 self.context = zmq.Context()
1120 1120 self.receiver = self.context.socket(zmq.PULL)
1121 1121 self.receiver.connect(self.server)
1122 1122 time.sleep(0.5)
1123 1123 print('[Starting] ReceiverData from {}'.format(self.server))
1124 1124 else:
1125 1125 self.server = None
1126 1126 if self.path == None:
1127 1127 raise ValueError("[Reading] The path is not valid")
1128 1128
1129 1129 if self.online:
1130 1130 log.log("[Reading] Searching files in online mode...", self.name)
1131 1131
1132 1132 for nTries in range(self.nTries):
1133 1133 fullpath = self.searchFilesOnLine(self.path, self.startDate,
1134 1134 self.endDate, self.expLabel, self.ext, self.walk,
1135 1135 self.filefmt, self.folderfmt)
1136 1136
1137 1137 try:
1138 1138 fullpath = next(fullpath)
1139 1139 except:
1140 1140 fullpath = None
1141 1141
1142 1142 if fullpath:
1143 1143 break
1144 1144
1145 1145 log.warning(
1146 1146 'Waiting {} sec for a valid file in {}: try {} ...'.format(
1147 1147 self.delay, self.path, nTries + 1),
1148 1148 self.name)
1149 1149 time.sleep(self.delay)
1150 1150
1151 1151 if not(fullpath):
1152 1152 raise schainpy.admin.SchainError(
1153 1153 'There isn\'t any valid file in {}'.format(self.path))
1154 1154
1155 1155 pathname, filename = os.path.split(fullpath)
1156 1156 self.year = int(filename[1:5])
1157 1157 self.doy = int(filename[5:8])
1158 1158 self.set = int(filename[8:11]) - 1
1159 1159 else:
1160 1160 log.log("Searching files in {}".format(self.path), self.name)
1161 1161 self.filenameList = self.searchFilesOffLine(self.path, self.startDate,
1162 1162 self.endDate, self.expLabel, self.ext, self.walk, self.filefmt, self.folderfmt)
1163 1163
1164 1164 self.setNextFile()
1165 1165
1166 1166 return
1167 1167
1168 1168 def getBasicHeader(self):
1169 1169
1170 1170 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond / \
1171 1171 1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds
1172 1172
1173 1173 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
1174 1174
1175 1175 self.dataOut.timeZone = self.basicHeaderObj.timeZone
1176 1176
1177 1177 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
1178 1178
1179 1179 self.dataOut.errorCount = self.basicHeaderObj.errorCount
1180 1180
1181 1181 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
1182 1182
1183 1183 self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds / self.nTxs
1184
1185 # self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock*self.nTxs
1186
1184
1187 1185 def getFirstHeader(self):
1188 1186
1189 1187 raise NotImplementedError
1190 1188
1191 1189 def getData(self):
1192 1190
1193 1191 raise NotImplementedError
1194 1192
1195 1193 def hasNotDataInBuffer(self):
1196 1194
1197 1195 raise NotImplementedError
1198 1196
1199 1197 def readBlock(self):
1200 1198
1201 1199 raise NotImplementedError
1202 1200
1203 1201 def isEndProcess(self):
1204 1202
1205 1203 return self.flagNoMoreFiles
1206 1204
1207 1205 def printReadBlocks(self):
1208 1206
1209 1207 print("[Reading] Number of read blocks per file %04d" % self.nReadBlocks)
1210 1208
1211 1209 def printTotalBlocks(self):
1212 1210
1213 1211 print("[Reading] Number of read blocks %04d" % self.nTotalBlocks)
1214 1212
1215 1213 def run(self, **kwargs):
1216 1214 """
1217 1215
1218 1216 Arguments:
1219 1217 path :
1220 1218 startDate :
1221 1219 endDate :
1222 1220 startTime :
1223 1221 endTime :
1224 1222 set :
1225 1223 expLabel :
1226 1224 ext :
1227 1225 online :
1228 1226 delay :
1229 1227 walk :
1230 1228 getblock :
1231 1229 nTxs :
1232 1230 realtime :
1233 1231 blocksize :
1234 1232 blocktime :
1235 1233 skip :
1236 1234 cursor :
1237 1235 warnings :
1238 1236 server :
1239 1237 verbose :
1240 1238 format :
1241 1239 oneDDict :
1242 1240 twoDDict :
1243 1241 independentParam :
1244 1242 """
1245 1243
1246 1244 if not(self.isConfig):
1247 1245 self.setup(**kwargs)
1248 1246 self.isConfig = True
1249 1247 if self.server is None:
1250 1248 self.getData()
1251 1249 else:
1252 1250 self.getFromServer()
1253 1251
1254 1252
1255 1253 class JRODataWriter(Reader):
1256 1254
1257 1255 """
1258 1256 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
1259 1257 de los datos siempre se realiza por bloques.
1260 1258 """
1261 1259
1262 1260 setFile = None
1263 1261 profilesPerBlock = None
1264 1262 blocksPerFile = None
1265 1263 nWriteBlocks = 0
1266 1264 fileDate = None
1267 1265
1268 1266 def __init__(self, dataOut=None):
1269 1267 raise NotImplementedError
1270 1268
1271 1269 def hasAllDataInBuffer(self):
1272 1270 raise NotImplementedError
1273 1271
1274 1272 def setBlockDimension(self):
1275 1273 raise NotImplementedError
1276 1274
1277 1275 def writeBlock(self):
1278 1276 raise NotImplementedError
1279 1277
1280 1278 def putData(self):
1281 1279 raise NotImplementedError
1282 1280
1283 1281 def getDtypeWidth(self):
1284 1282
1285 1283 dtype_index = get_dtype_index(self.dtype)
1286 1284 dtype_width = get_dtype_width(dtype_index)
1287 1285
1288 1286 return dtype_width
1289 1287
1290 1288 def getProcessFlags(self):
1291 1289
1292 1290 processFlags = 0
1293 1291
1294 1292 dtype_index = get_dtype_index(self.dtype)
1295 1293 procflag_dtype = get_procflag_dtype(dtype_index)
1296 1294
1297 1295 processFlags += procflag_dtype
1298 1296
1299 1297 if self.dataOut.flagDecodeData:
1300 1298 processFlags += PROCFLAG.DECODE_DATA
1301 1299
1302 1300 if self.dataOut.flagDeflipData:
1303 1301 processFlags += PROCFLAG.DEFLIP_DATA
1304 1302
1305 1303 if self.dataOut.code is not None:
1306 1304 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1307 1305
1308 1306 if self.dataOut.nCohInt > 1:
1309 1307 processFlags += PROCFLAG.COHERENT_INTEGRATION
1310 1308
1311 1309 if self.dataOut.type == "Spectra":
1312 1310 if self.dataOut.nIncohInt > 1:
1313 1311 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
1314 1312
1315 1313 if self.dataOut.data_dc is not None:
1316 1314 processFlags += PROCFLAG.SAVE_CHANNELS_DC
1317 1315
1318 1316 if self.dataOut.flagShiftFFT:
1319 1317 processFlags += PROCFLAG.SHIFT_FFT_DATA
1320 1318
1321 1319 return processFlags
1322 1320
1323 1321 def setBasicHeader(self):
1324 1322
1325 1323 self.basicHeaderObj.size = self.basicHeaderSize # bytes
1326 1324 self.basicHeaderObj.version = self.versionFile
1327 1325 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1328 1326 utc = numpy.floor(self.dataOut.utctime)
1329 1327 milisecond = (self.dataOut.utctime - utc) * 1000.0
1330 1328 self.basicHeaderObj.utc = utc
1331 1329 self.basicHeaderObj.miliSecond = milisecond
1332 1330 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1333 1331 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1334 1332 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1335 1333
1336 1334 def setFirstHeader(self):
1337 1335 """
1338 1336 Obtiene una copia del First Header
1339 1337
1340 1338 Affected:
1341 1339
1342 1340 self.basicHeaderObj
1343 1341 self.systemHeaderObj
1344 1342 self.radarControllerHeaderObj
1345 1343 self.processingHeaderObj self.
1346 1344
1347 1345 Return:
1348 1346 None
1349 1347 """
1350 1348
1351 1349 raise NotImplementedError
1352 1350
1353 1351 def __writeFirstHeader(self):
1354 1352 """
1355 1353 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1356 1354
1357 1355 Affected:
1358 1356 __dataType
1359 1357
1360 1358 Return:
1361 1359 None
1362 1360 """
1363 1361
1364 1362 # CALCULAR PARAMETROS
1365 1363
1366 1364 sizeLongHeader = self.systemHeaderObj.size + \
1367 1365 self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1368 1366 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1369 1367
1370 1368 self.basicHeaderObj.write(self.fp)
1371 1369 self.systemHeaderObj.write(self.fp)
1372 1370 self.radarControllerHeaderObj.write(self.fp)
1373 1371 self.processingHeaderObj.write(self.fp)
1374 1372
1375 1373 def __setNewBlock(self):
1376 1374 """
1377 1375 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1378 1376
1379 1377 Return:
1380 1378 0 : si no pudo escribir nada
1381 1379 1 : Si escribio el Basic el First Header
1382 1380 """
1383 1381 if self.fp == None:
1384 1382 self.setNextFile()
1385 1383
1386 1384 if self.flagIsNewFile:
1387 1385 return 1
1388 1386
1389 1387 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1390 1388 self.basicHeaderObj.write(self.fp)
1391 1389 return 1
1392 1390
1393 1391 if not(self.setNextFile()):
1394 1392 return 0
1395 1393
1396 1394 return 1
1397 1395
1398 1396 def writeNextBlock(self):
1399 1397 """
1400 1398 Selecciona el bloque siguiente de datos y los escribe en un file
1401 1399
1402 1400 Return:
1403 1401 0 : Si no hizo pudo escribir el bloque de datos
1404 1402 1 : Si no pudo escribir el bloque de datos
1405 1403 """
1406 1404 if not(self.__setNewBlock()):
1407 1405 return 0
1408 1406
1409 1407 self.writeBlock()
1410 1408
1411 1409 print("[Writing] Block No. %d/%d" % (self.blockIndex,
1412 1410 self.processingHeaderObj.dataBlocksPerFile))
1413 1411
1414 1412 return 1
1415 1413
1416 1414 def setNextFile(self):
1417 1415 """Determina el siguiente file que sera escrito
1418 1416
1419 1417 Affected:
1420 1418 self.filename
1421 1419 self.subfolder
1422 1420 self.fp
1423 1421 self.setFile
1424 1422 self.flagIsNewFile
1425 1423
1426 1424 Return:
1427 1425 0 : Si el archivo no puede ser escrito
1428 1426 1 : Si el archivo esta listo para ser escrito
1429 1427 """
1430 1428 ext = self.ext
1431 1429 path = self.path
1432 1430
1433 1431 if self.fp != None:
1434 1432 self.fp.close()
1435 1433
1436 1434 if not os.path.exists(path):
1437 1435 os.mkdir(path)
1438 1436
1439 1437 timeTuple = time.localtime(self.dataOut.utctime)
1440 1438 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year, timeTuple.tm_yday)
1441 1439
1442 1440 fullpath = os.path.join(path, subfolder)
1443 1441 setFile = self.setFile
1444 1442
1445 1443 if not(os.path.exists(fullpath)):
1446 1444 os.mkdir(fullpath)
1447 1445 setFile = -1 # inicializo mi contador de seteo
1448 1446 else:
1449 1447 filesList = os.listdir(fullpath)
1450 1448 if len(filesList) > 0:
1451 1449 filesList = sorted(filesList, key=str.lower)
1452 1450 filen = filesList[-1]
1453 1451 # el filename debera tener el siguiente formato
1454 1452 # 0 1234 567 89A BCDE (hex)
1455 1453 # x YYYY DDD SSS .ext
1456 1454 if isNumber(filen[8:11]):
1457 1455 # inicializo mi contador de seteo al seteo del ultimo file
1458 1456 setFile = int(filen[8:11])
1459 1457 else:
1460 1458 setFile = -1
1461 1459 else:
1462 1460 setFile = -1 # inicializo mi contador de seteo
1463 1461
1464 1462 setFile += 1
1465 1463
1466 1464 # If this is a new day it resets some values
1467 1465 if self.dataOut.datatime.date() > self.fileDate:
1468 1466 setFile = 0
1469 1467 self.nTotalBlocks = 0
1470 1468
1471 1469 filen = '{}{:04d}{:03d}{:03d}{}'.format(
1472 1470 self.optchar, timeTuple.tm_year, timeTuple.tm_yday, setFile, ext)
1473 1471
1474 1472 filename = os.path.join(path, subfolder, filen)
1475 1473
1476 1474 fp = open(filename, 'wb')
1477 1475
1478 1476 self.blockIndex = 0
1479 1477 self.filename = filename
1480 1478 self.subfolder = subfolder
1481 1479 self.fp = fp
1482 1480 self.setFile = setFile
1483 1481 self.flagIsNewFile = 1
1484 1482 self.fileDate = self.dataOut.datatime.date()
1485 1483 self.setFirstHeader()
1486 1484
1487 1485 print('[Writing] Opening file: %s' % self.filename)
1488 1486
1489 1487 self.__writeFirstHeader()
1490 1488
1491 1489 return 1
1492 1490
1493 1491 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=None, ext=None, datatype=4):
1494 1492 """
1495 1493 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1496 1494
1497 1495 Inputs:
1498 1496 path : directory where data will be saved
1499 1497 profilesPerBlock : number of profiles per block
1500 1498 set : initial file set
1501 1499 datatype : An integer number that defines data type:
1502 1500 0 : int8 (1 byte)
1503 1501 1 : int16 (2 bytes)
1504 1502 2 : int32 (4 bytes)
1505 1503 3 : int64 (8 bytes)
1506 1504 4 : float32 (4 bytes)
1507 1505 5 : double64 (8 bytes)
1508 1506
1509 1507 Return:
1510 1508 0 : Si no realizo un buen seteo
1511 1509 1 : Si realizo un buen seteo
1512 1510 """
1513 1511
1514 1512 if ext == None:
1515 1513 ext = self.ext
1516 1514
1517 1515 self.ext = ext.lower()
1518 1516
1519 1517 self.path = path
1520 1518
1521 1519 if set is None:
1522 1520 self.setFile = -1
1523 1521 else:
1524 1522 self.setFile = set - 1
1525 1523
1526 1524 self.blocksPerFile = blocksPerFile
1527 1525 self.profilesPerBlock = profilesPerBlock
1528 1526 self.dataOut = dataOut
1529 1527 self.fileDate = self.dataOut.datatime.date()
1530 1528 self.dtype = self.dataOut.dtype
1531 1529
1532 1530 if datatype is not None:
1533 1531 self.dtype = get_numpy_dtype(datatype)
1534 1532
1535 1533 if not(self.setNextFile()):
1536 1534 print("[Writing] There isn't a next file")
1537 1535 return 0
1538 1536
1539 1537 self.setBlockDimension()
1540 1538
1541 1539 return 1
1542 1540
1543 1541 def run(self, dataOut, path, blocksPerFile=100, profilesPerBlock=64, set=None, ext=None, datatype=4, **kwargs):
1544 1542
1545 1543 if not(self.isConfig):
1546 1544
1547 1545 self.setup(dataOut, path, blocksPerFile, profilesPerBlock=profilesPerBlock,
1548 1546 set=set, ext=ext, datatype=datatype, **kwargs)
1549 1547 self.isConfig = True
1550 1548
1551 1549 self.dataOut = dataOut
1552 1550 self.putData()
1553 1551 return self.dataOut
1554 1552
1555 1553 @MPDecorator
1556 1554 class printInfo(Operation):
1557 1555
1558 1556 def __init__(self):
1559 1557
1560 1558 Operation.__init__(self)
1561 1559 self.__printInfo = True
1562 1560
1563 1561 def run(self, dataOut, headers = ['systemHeaderObj', 'radarControllerHeaderObj', 'processingHeaderObj']):
1564 1562 if self.__printInfo == False:
1565 1563 return
1566 1564
1567 1565 for header in headers:
1568 1566 if hasattr(dataOut, header):
1569 1567 obj = getattr(dataOut, header)
1570 1568 if hasattr(obj, 'printInfo'):
1571 1569 obj.printInfo()
1572 1570 else:
1573 1571 print(obj)
1574 1572 else:
1575 1573 log.warning('Header {} Not found in object'.format(header))
1576 1574
1577 1575 self.__printInfo = False
@@ -1,793 +1,793
1 1 '''
2 2 Created on Jul 3, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6 # SUBCHANNELS EN VEZ DE CHANNELS
7 7 # BENCHMARKS -> PROBLEMAS CON ARCHIVOS GRANDES -> INCONSTANTE EN EL TIEMPO
8 8 # ACTUALIZACION DE VERSION
9 9 # HEADERS
10 10 # MODULO DE ESCRITURA
11 11 # METADATA
12 12
13 13 import os
14 14 import time
15 15 import datetime
16 16 import numpy
17 17 import timeit
18 18 from fractions import Fraction
19 19 from time import time
20 20 from time import sleep
21 21
22 22 import schainpy.admin
23 23 from schainpy.model.data.jroheaderIO import RadarControllerHeader, SystemHeader
24 24 from schainpy.model.data.jrodata import Voltage
25 25 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
26 26
27 27 import pickle
28 28 try:
29 29 import digital_rf
30 30 except:
31 31 pass
32 32
33 33
34 34 class DigitalRFReader(ProcessingUnit):
35 35 '''
36 36 classdocs
37 37 '''
38 38
39 39 def __init__(self):
40 40 '''
41 41 Constructor
42 42 '''
43 43
44 44 ProcessingUnit.__init__(self)
45 45
46 46 self.dataOut = Voltage()
47 47 self.__printInfo = True
48 48 self.__flagDiscontinuousBlock = False
49 49 self.__bufferIndex = 9999999
50 50 self.__codeType = 0
51 51 self.__ippKm = None
52 52 self.__nCode = None
53 53 self.__nBaud = None
54 54 self.__code = None
55 55 self.dtype = None
56 56 self.oldAverage = None
57 57 self.path = None
58 58
59 59 def close(self):
60 60 print('Average of writing to digital rf format is ', self.oldAverage * 1000)
61 61 return
62 62
63 63 def __getCurrentSecond(self):
64 64
65 65 return self.__thisUnixSample / self.__sample_rate
66 66
67 67 thisSecond = property(__getCurrentSecond, "I'm the 'thisSecond' property.")
68 68
69 69 def __setFileHeader(self):
70 70 '''
71 71 In this method will be initialized every parameter of dataOut object (header, no data)
72 72 '''
73 73 ippSeconds = 1.0 * self.__nSamples / self.__sample_rate
74 74
75 75 nProfiles = 1.0 / ippSeconds # Number of profiles in one second
76 76
77 77 try:
78 78 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(
79 79 self.__radarControllerHeader)
80 80 except:
81 81 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(
82 82 txA=0,
83 83 txB=0,
84 84 nWindows=1,
85 85 nHeights=self.__nSamples,
86 86 firstHeight=self.__firstHeigth,
87 87 deltaHeight=self.__deltaHeigth,
88 88 codeType=self.__codeType,
89 89 nCode=self.__nCode, nBaud=self.__nBaud,
90 90 code=self.__code)
91 91
92 92 try:
93 93 self.dataOut.systemHeaderObj = SystemHeader(self.__systemHeader)
94 94 except:
95 95 self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples,
96 96 nProfiles=nProfiles,
97 97 nChannels=len(
98 98 self.__channelList),
99 99 adcResolution=14)
100 100 self.dataOut.type = "Voltage"
101 101
102 102 self.dataOut.data = None
103 103
104 104 self.dataOut.dtype = self.dtype
105 105
106 106 # self.dataOut.nChannels = 0
107 107
108 108 # self.dataOut.nHeights = 0
109 109
110 110 self.dataOut.nProfiles = int(nProfiles)
111 111
112 112 self.dataOut.heightList = self.__firstHeigth + \
113 113 numpy.arange(self.__nSamples, dtype=numpy.float) * \
114 114 self.__deltaHeigth
115 115
116 116 self.dataOut.channelList = list(range(self.__num_subchannels))
117 117
118 self.dataOut.blocksize = self.dataOut.getNChannels() * self.dataOut.getNHeights()
118 self.dataOut.blocksize = self.dataOut.nChannels * self.dataOut.nHeights
119 119
120 120 # self.dataOut.channelIndexList = None
121 121
122 122 self.dataOut.flagNoData = True
123 123
124 124 self.dataOut.flagDataAsBlock = False
125 125 # Set to TRUE if the data is discontinuous
126 126 self.dataOut.flagDiscontinuousBlock = False
127 127
128 128 self.dataOut.utctime = None
129 129
130 130 # timezone like jroheader, difference in minutes between UTC and localtime
131 131 self.dataOut.timeZone = self.__timezone / 60
132 132
133 133 self.dataOut.dstFlag = 0
134 134
135 135 self.dataOut.errorCount = 0
136 136
137 137 try:
138 138 self.dataOut.nCohInt = self.fixed_metadata_dict.get(
139 139 'nCohInt', self.nCohInt)
140 140
141 141 # asumo que la data esta decodificada
142 142 self.dataOut.flagDecodeData = self.fixed_metadata_dict.get(
143 143 'flagDecodeData', self.flagDecodeData)
144 144
145 145 # asumo que la data esta sin flip
146 146 self.dataOut.flagDeflipData = self.fixed_metadata_dict['flagDeflipData']
147 147
148 148 self.dataOut.flagShiftFFT = self.fixed_metadata_dict['flagShiftFFT']
149 149
150 150 self.dataOut.useLocalTime = self.fixed_metadata_dict['useLocalTime']
151 151 except:
152 152 pass
153 153
154 154 self.dataOut.ippSeconds = ippSeconds
155 155
156 156 # Time interval between profiles
157 157 # self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
158 158
159 159 self.dataOut.frequency = self.__frequency
160 160
161 161 self.dataOut.realtime = self.__online
162 162
163 163 def findDatafiles(self, path, startDate=None, endDate=None):
164 164
165 165 if not os.path.isdir(path):
166 166 return []
167 167
168 168 try:
169 169 digitalReadObj = digital_rf.DigitalRFReader(
170 170 path, load_all_metadata=True)
171 171 except:
172 172 digitalReadObj = digital_rf.DigitalRFReader(path)
173 173
174 174 channelNameList = digitalReadObj.get_channels()
175 175
176 176 if not channelNameList:
177 177 return []
178 178
179 179 metadata_dict = digitalReadObj.get_rf_file_metadata(channelNameList[0])
180 180
181 181 sample_rate = metadata_dict['sample_rate'][0]
182 182
183 183 this_metadata_file = digitalReadObj.get_metadata(channelNameList[0])
184 184
185 185 try:
186 186 timezone = this_metadata_file['timezone'].value
187 187 except:
188 188 timezone = 0
189 189
190 190 startUTCSecond, endUTCSecond = digitalReadObj.get_bounds(
191 191 channelNameList[0]) / sample_rate - timezone
192 192
193 193 startDatetime = datetime.datetime.utcfromtimestamp(startUTCSecond)
194 194 endDatatime = datetime.datetime.utcfromtimestamp(endUTCSecond)
195 195
196 196 if not startDate:
197 197 startDate = startDatetime.date()
198 198
199 199 if not endDate:
200 200 endDate = endDatatime.date()
201 201
202 202 dateList = []
203 203
204 204 thisDatetime = startDatetime
205 205
206 206 while(thisDatetime <= endDatatime):
207 207
208 208 thisDate = thisDatetime.date()
209 209
210 210 if thisDate < startDate:
211 211 continue
212 212
213 213 if thisDate > endDate:
214 214 break
215 215
216 216 dateList.append(thisDate)
217 217 thisDatetime += datetime.timedelta(1)
218 218
219 219 return dateList
220 220
221 221 def setup(self, path=None,
222 222 startDate=None,
223 223 endDate=None,
224 224 startTime=datetime.time(0, 0, 0),
225 225 endTime=datetime.time(23, 59, 59),
226 226 channelList=None,
227 227 nSamples=None,
228 228 online=False,
229 229 delay=60,
230 230 buffer_size=1024,
231 231 ippKm=None,
232 232 nCohInt=1,
233 233 nCode=1,
234 234 nBaud=1,
235 235 flagDecodeData=False,
236 236 code=numpy.ones((1, 1), dtype=numpy.int),
237 237 **kwargs):
238 238 '''
239 239 In this method we should set all initial parameters.
240 240
241 241 Inputs:
242 242 path
243 243 startDate
244 244 endDate
245 245 startTime
246 246 endTime
247 247 set
248 248 expLabel
249 249 ext
250 250 online
251 251 delay
252 252 '''
253 253 self.path = path
254 254 self.nCohInt = nCohInt
255 255 self.flagDecodeData = flagDecodeData
256 256 self.i = 0
257 257 if not os.path.isdir(path):
258 258 raise ValueError("[Reading] Directory %s does not exist" % path)
259 259
260 260 try:
261 261 self.digitalReadObj = digital_rf.DigitalRFReader(
262 262 path, load_all_metadata=True)
263 263 except:
264 264 self.digitalReadObj = digital_rf.DigitalRFReader(path)
265 265
266 266 channelNameList = self.digitalReadObj.get_channels()
267 267
268 268 if not channelNameList:
269 269 raise ValueError("[Reading] Directory %s does not have any files" % path)
270 270
271 271 if not channelList:
272 272 channelList = list(range(len(channelNameList)))
273 273
274 274 ########## Reading metadata ######################
275 275
276 276 top_properties = self.digitalReadObj.get_properties(
277 277 channelNameList[channelList[0]])
278 278
279 279 self.__num_subchannels = top_properties['num_subchannels']
280 280 self.__sample_rate = 1.0 * \
281 281 top_properties['sample_rate_numerator'] / \
282 282 top_properties['sample_rate_denominator']
283 283 # self.__samples_per_file = top_properties['samples_per_file'][0]
284 284 self.__deltaHeigth = 1e6 * 0.15 / self.__sample_rate # why 0.15?
285 285
286 286 this_metadata_file = self.digitalReadObj.get_digital_metadata(
287 287 channelNameList[channelList[0]])
288 288 metadata_bounds = this_metadata_file.get_bounds()
289 289 self.fixed_metadata_dict = this_metadata_file.read(
290 290 metadata_bounds[0])[metadata_bounds[0]] # GET FIRST HEADER
291 291
292 292 try:
293 293 self.__processingHeader = self.fixed_metadata_dict['processingHeader']
294 294 self.__radarControllerHeader = self.fixed_metadata_dict['radarControllerHeader']
295 295 self.__systemHeader = self.fixed_metadata_dict['systemHeader']
296 296 self.dtype = pickle.loads(self.fixed_metadata_dict['dtype'])
297 297 except:
298 298 pass
299 299
300 300 self.__frequency = None
301 301
302 302 self.__frequency = self.fixed_metadata_dict.get('frequency', 1)
303 303
304 304 self.__timezone = self.fixed_metadata_dict.get('timezone', 18000)
305 305
306 306 try:
307 307 nSamples = self.fixed_metadata_dict['nSamples']
308 308 except:
309 309 nSamples = None
310 310
311 311 self.__firstHeigth = 0
312 312
313 313 try:
314 314 codeType = self.__radarControllerHeader['codeType']
315 315 except:
316 316 codeType = 0
317 317
318 318 try:
319 319 if codeType:
320 320 nCode = self.__radarControllerHeader['nCode']
321 321 nBaud = self.__radarControllerHeader['nBaud']
322 322 code = self.__radarControllerHeader['code']
323 323 except:
324 324 pass
325 325
326 326 if not ippKm:
327 327 try:
328 328 # seconds to km
329 329 ippKm = self.__radarControllerHeader['ipp']
330 330 except:
331 331 ippKm = None
332 332 ####################################################
333 333 self.__ippKm = ippKm
334 334 startUTCSecond = None
335 335 endUTCSecond = None
336 336
337 337 if startDate:
338 338 startDatetime = datetime.datetime.combine(startDate, startTime)
339 339 startUTCSecond = (
340 340 startDatetime - datetime.datetime(1970, 1, 1)).total_seconds() + self.__timezone
341 341
342 342 if endDate:
343 343 endDatetime = datetime.datetime.combine(endDate, endTime)
344 344 endUTCSecond = (endDatetime - datetime.datetime(1970,
345 345 1, 1)).total_seconds() + self.__timezone
346 346
347 347 start_index, end_index = self.digitalReadObj.get_bounds(
348 348 channelNameList[channelList[0]])
349 349
350 350 if not startUTCSecond:
351 351 startUTCSecond = start_index / self.__sample_rate
352 352
353 353 if start_index > startUTCSecond * self.__sample_rate:
354 354 startUTCSecond = start_index / self.__sample_rate
355 355
356 356 if not endUTCSecond:
357 357 endUTCSecond = end_index / self.__sample_rate
358 358
359 359 if end_index < endUTCSecond * self.__sample_rate:
360 360 endUTCSecond = end_index / self.__sample_rate
361 361 if not nSamples:
362 362 if not ippKm:
363 363 raise ValueError("[Reading] nSamples or ippKm should be defined")
364 364 nSamples = int(ippKm / (1e6 * 0.15 / self.__sample_rate))
365 365 channelBoundList = []
366 366 channelNameListFiltered = []
367 367
368 368 for thisIndexChannel in channelList:
369 369 thisChannelName = channelNameList[thisIndexChannel]
370 370 start_index, end_index = self.digitalReadObj.get_bounds(
371 371 thisChannelName)
372 372 channelBoundList.append((start_index, end_index))
373 373 channelNameListFiltered.append(thisChannelName)
374 374
375 375 self.profileIndex = 0
376 376 self.i = 0
377 377 self.__delay = delay
378 378
379 379 self.__codeType = codeType
380 380 self.__nCode = nCode
381 381 self.__nBaud = nBaud
382 382 self.__code = code
383 383
384 384 self.__datapath = path
385 385 self.__online = online
386 386 self.__channelList = channelList
387 387 self.__channelNameList = channelNameListFiltered
388 388 self.__channelBoundList = channelBoundList
389 389 self.__nSamples = nSamples
390 390 self.__samples_to_read = int(nSamples) # FIJO: AHORA 40
391 391 self.__nChannels = len(self.__channelList)
392 392
393 393 self.__startUTCSecond = startUTCSecond
394 394 self.__endUTCSecond = endUTCSecond
395 395
396 396 self.__timeInterval = 1.0 * self.__samples_to_read / \
397 397 self.__sample_rate # Time interval
398 398
399 399 if online:
400 400 # self.__thisUnixSample = int(endUTCSecond*self.__sample_rate - 4*self.__samples_to_read)
401 401 startUTCSecond = numpy.floor(endUTCSecond)
402 402
403 403 # por que en el otro metodo lo primero q se hace es sumar samplestoread
404 404 self.__thisUnixSample = int(startUTCSecond * self.__sample_rate) - self.__samples_to_read
405 405
406 406 self.__data_buffer = numpy.zeros(
407 407 (self.__num_subchannels, self.__samples_to_read), dtype=numpy.complex)
408 408
409 409 self.__setFileHeader()
410 410 self.isConfig = True
411 411
412 412 print("[Reading] Digital RF Data was found from %s to %s " % (
413 413 datetime.datetime.utcfromtimestamp(
414 414 self.__startUTCSecond - self.__timezone),
415 415 datetime.datetime.utcfromtimestamp(
416 416 self.__endUTCSecond - self.__timezone)
417 417 ))
418 418
419 419 print("[Reading] Starting process from %s to %s" % (datetime.datetime.utcfromtimestamp(startUTCSecond - self.__timezone),
420 420 datetime.datetime.utcfromtimestamp(
421 421 endUTCSecond - self.__timezone)
422 422 ))
423 423 self.oldAverage = None
424 424 self.count = 0
425 425 self.executionTime = 0
426 426
427 427 def __reload(self):
428 428 # print
429 429 # print "%s not in range [%s, %s]" %(
430 430 # datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
431 431 # datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
432 432 # datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
433 433 # )
434 434 print("[Reading] reloading metadata ...")
435 435
436 436 try:
437 437 self.digitalReadObj.reload(complete_update=True)
438 438 except:
439 439 self.digitalReadObj = digital_rf.DigitalRFReader(self.path)
440 440
441 441 start_index, end_index = self.digitalReadObj.get_bounds(
442 442 self.__channelNameList[self.__channelList[0]])
443 443
444 444 if start_index > self.__startUTCSecond * self.__sample_rate:
445 445 self.__startUTCSecond = 1.0 * start_index / self.__sample_rate
446 446
447 447 if end_index > self.__endUTCSecond * self.__sample_rate:
448 448 self.__endUTCSecond = 1.0 * end_index / self.__sample_rate
449 449 print()
450 450 print("[Reading] New timerange found [%s, %s] " % (
451 451 datetime.datetime.utcfromtimestamp(
452 452 self.__startUTCSecond - self.__timezone),
453 453 datetime.datetime.utcfromtimestamp(
454 454 self.__endUTCSecond - self.__timezone)
455 455 ))
456 456
457 457 return True
458 458
459 459 return False
460 460
461 461 def timeit(self, toExecute):
462 462 t0 = time.time()
463 463 toExecute()
464 464 self.executionTime = time.time() - t0
465 465 if self.oldAverage is None:
466 466 self.oldAverage = self.executionTime
467 467 self.oldAverage = (self.executionTime + self.count *
468 468 self.oldAverage) / (self.count + 1.0)
469 469 self.count = self.count + 1.0
470 470 return
471 471
472 472 def __readNextBlock(self, seconds=30, volt_scale=1):
473 473 '''
474 474 '''
475 475
476 476 # Set the next data
477 477 self.__flagDiscontinuousBlock = False
478 478 self.__thisUnixSample += self.__samples_to_read
479 479
480 480 if self.__thisUnixSample + 2 * self.__samples_to_read > self.__endUTCSecond * self.__sample_rate:
481 481 print ("[Reading] There are no more data into selected time-range")
482 482 if self.__online:
483 483 sleep(3)
484 484 self.__reload()
485 485 else:
486 486 return False
487 487
488 488 if self.__thisUnixSample + 2 * self.__samples_to_read > self.__endUTCSecond * self.__sample_rate:
489 489 return False
490 490 self.__thisUnixSample -= self.__samples_to_read
491 491
492 492 indexChannel = 0
493 493
494 494 dataOk = False
495 495
496 496 for thisChannelName in self.__channelNameList: # TODO VARIOS CHANNELS?
497 497 for indexSubchannel in range(self.__num_subchannels):
498 498 try:
499 499 t0 = time()
500 500 result = self.digitalReadObj.read_vector_c81d(self.__thisUnixSample,
501 501 self.__samples_to_read,
502 502 thisChannelName, sub_channel=indexSubchannel)
503 503 self.executionTime = time() - t0
504 504 if self.oldAverage is None:
505 505 self.oldAverage = self.executionTime
506 506 self.oldAverage = (
507 507 self.executionTime + self.count * self.oldAverage) / (self.count + 1.0)
508 508 self.count = self.count + 1.0
509 509
510 510 except IOError as e:
511 511 # read next profile
512 512 self.__flagDiscontinuousBlock = True
513 513 print("[Reading] %s" % datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), e)
514 514 break
515 515
516 516 if result.shape[0] != self.__samples_to_read:
517 517 self.__flagDiscontinuousBlock = True
518 518 print("[Reading] %s: Too few samples were found, just %d/%d samples" % (datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
519 519 result.shape[0],
520 520 self.__samples_to_read))
521 521 break
522 522
523 523 self.__data_buffer[indexSubchannel, :] = result * volt_scale
524 524 indexChannel+=1
525 525
526 526 dataOk = True
527 527
528 528 self.__utctime = self.__thisUnixSample / self.__sample_rate
529 529
530 530 if not dataOk:
531 531 return False
532 532
533 533 print("[Reading] %s: %d samples <> %f sec" % (datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
534 534 self.__samples_to_read,
535 535 self.__timeInterval))
536 536
537 537 self.__bufferIndex = 0
538 538
539 539 return True
540 540
541 541 def __isBufferEmpty(self):
542 542 return self.__bufferIndex > self.__samples_to_read - self.__nSamples # 40960 - 40
543 543
544 544 def getData(self, seconds=30, nTries=5):
545 545 '''
546 546 This method gets the data from files and put the data into the dataOut object
547 547
548 548 In addition, increase el the buffer counter in one.
549 549
550 550 Return:
551 551 data : retorna un perfil de voltages (alturas * canales) copiados desde el
552 552 buffer. Si no hay mas archivos a leer retorna None.
553 553
554 554 Affected:
555 555 self.dataOut
556 556 self.profileIndex
557 557 self.flagDiscontinuousBlock
558 558 self.flagIsNewBlock
559 559 '''
560 560 #print("getdata")
561 561 err_counter = 0
562 562 self.dataOut.flagNoData = True
563 563
564 564 if self.__isBufferEmpty():
565 565 #print("hi")
566 566 self.__flagDiscontinuousBlock = False
567 567
568 568 while True:
569 569 #print ("q ha pasado")
570 570 if self.__readNextBlock():
571 571 break
572 572 if self.__thisUnixSample > self.__endUTCSecond * self.__sample_rate:
573 573 raise schainpy.admin.SchainError('Error')
574 574 return
575 575
576 576 if self.__flagDiscontinuousBlock:
577 577 raise schainpy.admin.SchainError('discontinuous block found')
578 578 return
579 579
580 580 if not self.__online:
581 581 raise schainpy.admin.SchainError('Online?')
582 582 return
583 583
584 584 err_counter += 1
585 585 if err_counter > nTries:
586 586 raise schainpy.admin.SchainError('Max retrys reach')
587 587 return
588 588
589 589 print('[Reading] waiting %d seconds to read a new block' % seconds)
590 590 time.sleep(seconds)
591 591
592 592 self.dataOut.data = self.__data_buffer[:, self.__bufferIndex:self.__bufferIndex + self.__nSamples]
593 593 self.dataOut.utctime = ( self.__thisUnixSample + self.__bufferIndex) / self.__sample_rate
594 594 self.dataOut.flagNoData = False
595 595 self.dataOut.flagDiscontinuousBlock = self.__flagDiscontinuousBlock
596 596 self.dataOut.profileIndex = self.profileIndex
597 597
598 598 self.__bufferIndex += self.__nSamples
599 599 self.profileIndex += 1
600 600
601 601 if self.profileIndex == self.dataOut.nProfiles:
602 602 self.profileIndex = 0
603 603
604 604 return True
605 605
606 606 def printInfo(self):
607 607 '''
608 608 '''
609 609 if self.__printInfo == False:
610 610 return
611 611
612 612 # self.systemHeaderObj.printInfo()
613 613 # self.radarControllerHeaderObj.printInfo()
614 614
615 615 self.__printInfo = False
616 616
617 617 def printNumberOfBlock(self):
618 618 '''
619 619 '''
620 620 return
621 621 # print self.profileIndex
622 622
623 623 def run(self, **kwargs):
624 624 '''
625 625 This method will be called many times so here you should put all your code
626 626 '''
627 627
628 628 if not self.isConfig:
629 629 self.setup(**kwargs)
630 630 #self.i = self.i+1
631 631 self.getData(seconds=self.__delay)
632 632
633 633 return
634 634
635 635 @MPDecorator
636 636 class DigitalRFWriter(Operation):
637 637 '''
638 638 classdocs
639 639 '''
640 640
641 641 def __init__(self, **kwargs):
642 642 '''
643 643 Constructor
644 644 '''
645 645 Operation.__init__(self, **kwargs)
646 646 self.metadata_dict = {}
647 647 self.dataOut = None
648 648 self.dtype = None
649 649 self.oldAverage = 0
650 650
651 651 def setHeader(self):
652 652
653 653 self.metadata_dict['frequency'] = self.dataOut.frequency
654 654 self.metadata_dict['timezone'] = self.dataOut.timeZone
655 655 self.metadata_dict['dtype'] = pickle.dumps(self.dataOut.dtype)
656 656 self.metadata_dict['nProfiles'] = self.dataOut.nProfiles
657 657 self.metadata_dict['heightList'] = self.dataOut.heightList
658 658 self.metadata_dict['channelList'] = self.dataOut.channelList
659 659 self.metadata_dict['flagDecodeData'] = self.dataOut.flagDecodeData
660 660 self.metadata_dict['flagDeflipData'] = self.dataOut.flagDeflipData
661 661 self.metadata_dict['flagShiftFFT'] = self.dataOut.flagShiftFFT
662 662 self.metadata_dict['useLocalTime'] = self.dataOut.useLocalTime
663 663 self.metadata_dict['nCohInt'] = self.dataOut.nCohInt
664 664 self.metadata_dict['type'] = self.dataOut.type
665 665 self.metadata_dict['flagDataAsBlock']= getattr(
666 666 self.dataOut, 'flagDataAsBlock', None) # chequear
667 667
668 668 def setup(self, dataOut, path, frequency, fileCadence, dirCadence, metadataCadence, set=0, metadataFile='metadata', ext='.h5'):
669 669 '''
670 670 In this method we should set all initial parameters.
671 671 Input:
672 672 dataOut: Input data will also be outputa data
673 673 '''
674 674 self.setHeader()
675 675 self.__ippSeconds = dataOut.ippSeconds
676 676 self.__deltaH = dataOut.getDeltaH()
677 677 self.__sample_rate = 1e6 * 0.15 / self.__deltaH
678 678 self.__dtype = dataOut.dtype
679 679 if len(dataOut.dtype) == 2:
680 680 self.__dtype = dataOut.dtype[0]
681 681 self.__nSamples = dataOut.systemHeaderObj.nSamples
682 682 self.__nProfiles = dataOut.nProfiles
683 683
684 684 if self.dataOut.type != 'Voltage':
685 685 raise 'Digital RF cannot be used with this data type'
686 686 self.arr_data = numpy.ones((1, dataOut.nFFTPoints * len(
687 687 self.dataOut.channelList)), dtype=[('r', self.__dtype), ('i', self.__dtype)])
688 688 else:
689 689 self.arr_data = numpy.ones((self.__nSamples, len(
690 690 self.dataOut.channelList)), dtype=[('r', self.__dtype), ('i', self.__dtype)])
691 691
692 692 file_cadence_millisecs = 1000
693 693
694 694 sample_rate_fraction = Fraction(self.__sample_rate).limit_denominator()
695 695 sample_rate_numerator = int(sample_rate_fraction.numerator)
696 696 sample_rate_denominator = int(sample_rate_fraction.denominator)
697 697 start_global_index = dataOut.utctime * self.__sample_rate
698 698
699 699 uuid = 'prueba'
700 700 compression_level = 0
701 701 checksum = False
702 702 is_complex = True
703 703 num_subchannels = len(dataOut.channelList)
704 704 is_continuous = True
705 705 marching_periods = False
706 706
707 707 self.digitalWriteObj = digital_rf.DigitalRFWriter(path, self.__dtype, dirCadence,
708 708 fileCadence, start_global_index,
709 709 sample_rate_numerator, sample_rate_denominator, uuid, compression_level, checksum,
710 710 is_complex, num_subchannels, is_continuous, marching_periods)
711 711 metadata_dir = os.path.join(path, 'metadata')
712 712 os.system('mkdir %s' % (metadata_dir))
713 713 self.digitalMetadataWriteObj = digital_rf.DigitalMetadataWriter(metadata_dir, dirCadence, 1, # 236, file_cadence_millisecs / 1000
714 714 sample_rate_numerator, sample_rate_denominator,
715 715 metadataFile)
716 716 self.isConfig = True
717 717 self.currentSample = 0
718 718 self.oldAverage = 0
719 719 self.count = 0
720 720 return
721 721
722 722 def writeMetadata(self):
723 723 start_idx = self.__sample_rate * self.dataOut.utctime
724 724
725 725 self.metadata_dict['processingHeader'] = self.dataOut.processingHeaderObj.getAsDict(
726 726 )
727 727 self.metadata_dict['radarControllerHeader'] = self.dataOut.radarControllerHeaderObj.getAsDict(
728 728 )
729 729 self.metadata_dict['systemHeader'] = self.dataOut.systemHeaderObj.getAsDict(
730 730 )
731 731 self.digitalMetadataWriteObj.write(start_idx, self.metadata_dict)
732 732 return
733 733
734 734 def timeit(self, toExecute):
735 735 t0 = time()
736 736 toExecute()
737 737 self.executionTime = time() - t0
738 738 if self.oldAverage is None:
739 739 self.oldAverage = self.executionTime
740 740 self.oldAverage = (self.executionTime + self.count *
741 741 self.oldAverage) / (self.count + 1.0)
742 742 self.count = self.count + 1.0
743 743 return
744 744
745 745 def writeData(self):
746 746 if self.dataOut.type != 'Voltage':
747 747 raise 'Digital RF cannot be used with this data type'
748 748 for channel in self.dataOut.channelList:
749 749 for i in range(self.dataOut.nFFTPoints):
750 750 self.arr_data[1][channel * self.dataOut.nFFTPoints +
751 751 i]['r'] = self.dataOut.data[channel][i].real
752 752 self.arr_data[1][channel * self.dataOut.nFFTPoints +
753 753 i]['i'] = self.dataOut.data[channel][i].imag
754 754 else:
755 755 for i in range(self.dataOut.systemHeaderObj.nSamples):
756 756 for channel in self.dataOut.channelList:
757 757 self.arr_data[i][channel]['r'] = self.dataOut.data[channel][i].real
758 758 self.arr_data[i][channel]['i'] = self.dataOut.data[channel][i].imag
759 759
760 760 def f(): return self.digitalWriteObj.rf_write(self.arr_data)
761 761 self.timeit(f)
762 762
763 763 return
764 764
765 765 def run(self, dataOut, frequency=49.92e6, path=None, fileCadence=1000, dirCadence=36000, metadataCadence=1, **kwargs):
766 766 '''
767 767 This method will be called many times so here you should put all your code
768 768 Inputs:
769 769 dataOut: object with the data
770 770 '''
771 771 # print dataOut.__dict__
772 772 self.dataOut = dataOut
773 773 if not self.isConfig:
774 774 self.setup(dataOut, path, frequency, fileCadence,
775 775 dirCadence, metadataCadence, **kwargs)
776 776 self.writeMetadata()
777 777
778 778 self.writeData()
779 779
780 780 ## self.currentSample += 1
781 781 # if self.dataOut.flagDataAsBlock or self.currentSample == 1:
782 782 # self.writeMetadata()
783 783 ## if self.currentSample == self.__nProfiles: self.currentSample = 0
784 784
785 785 return dataOut# en la version 2.7 no aparece este return
786 786
787 787 def close(self):
788 788 print('[Writing] - Closing files ')
789 789 print('Average of writing to digital rf format is ', self.oldAverage * 1000)
790 790 try:
791 791 self.digitalWriteObj.close()
792 792 except:
793 793 pass
@@ -1,862 +1,862
1 1 '''
2 2 Created on Jul 3, 2014
3 3
4 4 @author: roj-com0419
5 5 '''
6 6
7 7 import os,sys
8 8 import time,datetime
9 9 import h5py
10 10 import numpy
11 11 import fnmatch
12 12 import re
13 13
14 14 from schainpy.model.data.jroheaderIO import RadarControllerHeader, SystemHeader
15 15 from schainpy.model.data.jrodata import Voltage
16 16 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
17 17
18 18
19 19 def isNumber(str):
20 20 """
21 21 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
22 22
23 23 Excepciones:
24 24 Si un determinado string no puede ser convertido a numero
25 25 Input:
26 26 str, string al cual se le analiza para determinar si convertible a un numero o no
27 27
28 28 Return:
29 29 True : si el string es uno numerico
30 30 False : no es un string numerico
31 31 """
32 32 try:
33 33 float( str )
34 34 return True
35 35 except:
36 36 return False
37 37
38 38 def getFileFromSet(path, ext, set=None):
39 39 validFilelist = []
40 40 fileList = os.listdir(path)
41 41
42 42
43 43 if len(fileList) < 1:
44 44 return None
45 45
46 46 # 0 1234 567 89A BCDE
47 47 # H YYYY DDD SSS .ext
48 48
49 49 for thisFile in fileList:
50 50 try:
51 51 number= int(thisFile[6:16])
52 52
53 53 # year = int(thisFile[1:5])
54 54 # doy = int(thisFile[5:8])
55 55 except:
56 56 continue
57 57
58 58 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
59 59 continue
60 60
61 61 validFilelist.append(thisFile)
62 62
63 63 if len(validFilelist) < 1:
64 64 return None
65 65
66 66 validFilelist = sorted( validFilelist, key=str.lower )
67 67
68 68 if set == None:
69 69 return validFilelist[-1]
70 70
71 71 print("set =" ,set)
72 72 for thisFile in validFilelist:
73 73 if set <= int(thisFile[6:16]):
74 74 print(thisFile,int(thisFile[6:16]))
75 75 return thisFile
76 76
77 77 return validFilelist[-1]
78 78
79 79 myfile = fnmatch.filter(validFilelist,'*%10d*'%(set))
80 80 #myfile = fnmatch.filter(validFilelist,'*%4.4d%3.3d%3.3d*'%(year,doy,set))
81 81
82 82 if len(myfile)!= 0:
83 83 return myfile[0]
84 84 else:
85 85 filename = '*%10.10d%s'%(set,ext.lower())
86 86 print('the filename %s does not exist'%filename)
87 87 print('...going to the last file: ')
88 88
89 89 if validFilelist:
90 90 validFilelist = sorted( validFilelist, key=str.lower )
91 91 return validFilelist[-1]
92 92
93 93 return None
94 94
95 95 def getlastFileFromPath(path, ext):
96 96 """
97 97 Depura el fileList dejando solo los que cumplan el formato de "res-xxxxxx.ext"
98 98 al final de la depuracion devuelve el ultimo file de la lista que quedo.
99 99
100 100 Input:
101 101 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
102 102 ext : extension de los files contenidos en una carpeta
103 103
104 104 Return:
105 105 El ultimo file de una determinada carpeta, no se considera el path.
106 106 """
107 107 validFilelist = []
108 108 fileList = os.listdir(path)
109 109
110 110 # 0 1234 567 89A BCDE
111 111 # H YYYY DDD SSS .ext
112 112
113 113 for thisFile in fileList:
114 114
115 115 try:
116 116 number= int(thisFile[6:16])
117 117 except:
118 118 print("There is a file or folder with different format")
119 119 if not isNumber(number):
120 120 continue
121 121
122 122 # year = thisFile[1:5]
123 123 # if not isNumber(year):
124 124 # continue
125 125
126 126 # doy = thisFile[5:8]
127 127 # if not isNumber(doy):
128 128 # continue
129 129
130 130 number= int(number)
131 131 # year = int(year)
132 132 # doy = int(doy)
133 133
134 134 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
135 135 continue
136 136
137 137
138 138 validFilelist.append(thisFile)
139 139
140 140
141 141 if validFilelist:
142 142 validFilelist = sorted( validFilelist, key=str.lower )
143 143 return validFilelist[-1]
144 144
145 145 return None
146 146
147 147
148 148
149 149 class HFReader(ProcessingUnit):
150 150 '''
151 151 classdocs
152 152 '''
153 153 path = None
154 154 startDate= None
155 155 endDate = None
156 156 startTime= None
157 157 endTime = None
158 158 walk = None
159 159 isConfig = False
160 160 dataOut=None
161 161 nTries = 3
162 162 ext = ".hdf5"
163 163
164 164 def __init__(self, **kwargs):
165 165 '''
166 166 Constructor
167 167 '''
168 168 ProcessingUnit.__init__(self, **kwargs)
169 169
170 170 self.isConfig =False
171 171
172 172 self.datablock = None
173 173
174 174 self.filename_current=None
175 175
176 176 self.utc = 0
177 177
178 178 self.ext='.hdf5'
179 179
180 180 self.flagIsNewFile = 1
181 181
182 182 #-------------------------------------------------
183 183 self.fileIndex=None
184 184
185 185 self.profileIndex_offset=None
186 186
187 187 self.filenameList=[]
188 188
189 189 self.hfFilePointer= None
190 190
191 191 self.filename_online = None
192 192
193 193 self.status=True
194 194
195 195 self.flagNoMoreFiles= False
196 196
197 197 self.__waitForNewFile = 20
198 198
199 199
200 200 #--------------------------------------------------
201 201
202 202 self.dataOut = self.createObjByDefault()
203 203
204 204
205 205 def createObjByDefault(self):
206 206
207 207 dataObj = Voltage()
208 208
209 209 return dataObj
210 210
211 211 def setObjProperties(self):
212 212
213 213 pass
214 214
215 215 def getBlockDimension(self):
216 216 """
217 217 Obtiene la cantidad de puntos a leer por cada bloque de datos
218 218
219 219 Affected:
220 220 self.blocksize
221 221
222 222 Return:
223 223 None
224 224 """
225 225 pts2read =self.nChannels*self.nHeights*self.nProfiles
226 226 self.blocksize = pts2read
227 227
228 228 def __readHeader(self):
229 229
230 230 self.nProfiles = 100
231 231 self.nHeights = 1000
232 232 self.nChannels = 2
233 233 self.__firstHeigth=0
234 234 self.__nSamples=1000
235 235 self.__deltaHeigth=1.5
236 236 self.__sample_rate=1e5
237 237 #self.__frequency=2.72e6
238 238 #self.__frequency=3.64e6
239 239 self.__frequency=None
240 240 self.__online = False
241 241 self.filename_next_set=None
242 242
243 243 #print "Frequency of Operation:", self.__frequency
244 244
245 245
246 246 def __setParameters(self,path='', startDate='',endDate='',startTime='', endTime='', walk=''):
247 247 self.path = path
248 248 self.startDate = startDate
249 249 self.endDate = endDate
250 250 self.startTime = startTime
251 251 self.endTime = endTime
252 252 self.walk = walk
253 253
254 254 def __checkPath(self):
255 255 if os.path.exists(self.path):
256 256 self.status=1
257 257 else:
258 258 self.status=0
259 259 print('Path %s does not exits'%self.path)
260 260 return
261 261 return
262 262
263 263 def __selDates(self, hf_dirname_format):
264 264 try:
265 265 dir_hf_filename= self.path+"/"+hf_dirname_format
266 266 fp= h5py.File(dir_hf_filename,'r')
267 267 hipoc=fp['t'].value
268 268 fp.close()
269 269 date_time=datetime.datetime.utcfromtimestamp(hipoc)
270 270 year =int(date_time[0:4])
271 271 month=int(date_time[5:7])
272 272 dom =int(date_time[8:10])
273 273 thisDate= datetime.date(year,month,dom)
274 274 if (thisDate>=self.startDate and thisDate <= self.endDate):
275 275 return hf_dirname_format
276 276 except:
277 277 return None
278 278
279 279 def __findDataForDates(self,online=False):
280 280 if not(self.status):
281 281 return None
282 282
283 283 pat = '\d+.\d+'
284 284 dirnameList = [re.search(pat,x) for x in os.listdir(self.path)]
285 285 dirnameList = [x for x in dirnameList if x!=None]
286 286 dirnameList = [x.string for x in dirnameList]
287 287 if not(online):
288 288
289 289 dirnameList = [self.__selDates(x) for x in dirnameList]
290 290 dirnameList = [x for x in dirnameList if x!=None]
291 291
292 292 if len(dirnameList)>0:
293 293 self.status = 1
294 294 self.dirnameList = dirnameList
295 295 self.dirnameList.sort()
296 296
297 297 else:
298 298 self.status = 0
299 299 return None
300 300
301 301 def __getTimeFromData(self):
302 302 startDateTime_Reader = datetime.datetime.combine(self.startDate,self.startTime)
303 303 endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
304 304 print('Filtering Files from %s to %s'%(startDateTime_Reader, endDateTime_Reader))
305 305 print('........................................')
306 306 filter_filenameList=[]
307 307 self.filenameList.sort()
308 308 for i in range(len(self.filenameList)-1):
309 309 filename=self.filenameList[i]
310 310 dir_hf_filename= filename
311 311 fp= h5py.File(dir_hf_filename,'r')
312 312 hipoc=fp['t'].value
313 313 hipoc=hipoc+self.timezone
314 314 date_time=datetime.datetime.utcfromtimestamp(hipoc)
315 315 fp.close()
316 316 year =int(date_time[0:4])
317 317 month=int(date_time[5:7])
318 318 dom =int(date_time[8:10])
319 319 hour =int(date_time[11:13])
320 320 min =int(date_time[14:16])
321 321 sec =int(date_time[17:19])
322 322 this_time=datetime.datetime(year,month,dom,hour,min,sec)
323 323 if (this_time>=startDateTime_Reader and this_time <= endDateTime_Reader):
324 324 filter_filenameList.append(filename)
325 325 filter_filenameList.sort()
326 326 self.filenameList = filter_filenameList
327 327 return 1
328 328
329 329 def __getFilenameList(self):
330 330 #print "hola"
331 331 #print self.dirnameList
332 332 dirList = [os.path.join(self.path,x) for x in self.dirnameList]
333 333 self.filenameList= dirList
334 334 #print self.filenameList
335 335 #print "pase",len(self.filenameList)
336 336
337 337 def __selectDataForTimes(self, online=False):
338 338
339 339 if not(self.status):
340 340 return None
341 341 #----------------
342 342 self.__getFilenameList()
343 343 #----------------
344 344 if not(online):
345 345 if not(self.all):
346 346 self.__getTimeFromData()
347 347 if len(self.filenameList)>0:
348 348 self.status=1
349 349 self.filenameList.sort()
350 350 else:
351 351 self.status=0
352 352 return None
353 353 else:
354 354 if self.set != None:
355 355
356 356 filename=getFileFromSet(self.path,self.ext,self.set)
357 357
358 358 if self.flag_nextfile==True:
359 359 self.dirnameList=[filename]
360 360 fullfilename=self.path+"/"+filename
361 361 self.filenameList=[fullfilename]
362 362 self.filename_next_set=int(filename[6:16])+10
363 363
364 364 self.flag_nextfile=False
365 365 else:
366 366 print(filename)
367 367 print("PRIMERA CONDICION")
368 368 #if self.filename_next_set== int(filename[6:16]):
369 369 print("TODO BIEN")
370 370
371 371 if filename == None:
372 372 raise ValueError("corregir")
373 373
374 374 self.dirnameList=[filename]
375 375 fullfilename=self.path+"/"+filename
376 376 self.filenameList=[fullfilename]
377 377 self.filename_next_set=int(filename[6:16])+10
378 378 print("Setting next file",self.filename_next_set)
379 379 self.set=int(filename[6:16])
380 380 if True:
381 381 pass
382 382 else:
383 383 print("ESTOY AQUI PORQUE NO EXISTE EL SIGUIENTE ARCHIVO")
384 384
385 385 else:
386 386 filename =getlastFileFromPath(self.path,self.ext)
387 387
388 388 if self.flag_nextfile==True:
389 389 self.dirnameList=[filename]
390 390 fullfilename=self.path+"/"+filename
391 391 self.filenameList=[self.filenameList[-1]]
392 392 self.filename_next_set=int(filename[6:16])+10
393 393
394 394 self.flag_nextfile=False
395 395 else:
396 396 filename=getFileFromSet(self.path,self.ext,self.set)
397 397 print(filename)
398 398 print("PRIMERA CONDICION")
399 399 #if self.filename_next_set== int(filename[6:16]):
400 400 print("TODO BIEN")
401 401
402 402 if filename == None:
403 403 raise ValueError("corregir")
404 404
405 405 self.dirnameList=[filename]
406 406 fullfilename=self.path+"/"+filename
407 407 self.filenameList=[fullfilename]
408 408 self.filename_next_set=int(filename[6:16])+10
409 409 print("Setting next file",self.filename_next_set)
410 410 self.set=int(filename[6:16])
411 411 if True:
412 412 pass
413 413 else:
414 414 print("ESTOY AQUI PORQUE NO EXISTE EL SIGUIENTE ARCHIVO")
415 415
416 416
417 417
418 418 def searchFilesOffLine(self,
419 419 path,
420 420 startDate,
421 421 endDate,
422 422 ext,
423 423 startTime=datetime.time(0,0,0),
424 424 endTime=datetime.time(23,59,59),
425 425 walk=True):
426 426
427 427 self.__setParameters(path, startDate, endDate, startTime, endTime, walk)
428 428
429 429 self.__checkPath()
430 430
431 431 self.__findDataForDates()
432 432 #print self.dirnameList
433 433
434 434 self.__selectDataForTimes()
435 435
436 436 for i in range(len(self.filenameList)):
437 437 print("%s"% (self.filenameList[i]))
438 438
439 439 return
440 440
441 441 def searchFilesOnLine(self,
442 442 path,
443 443 expLabel= "",
444 444 ext=None,
445 445 startDate=None,
446 446 endDate=None,
447 447 walk=True,
448 448 set=None):
449 449
450 450
451 451 startDate = datetime.datetime.utcnow().date()
452 452 endDate = datetime.datetime.utcnow().date()
453 453
454 454 self.__setParameters(path=path,startDate=startDate,endDate=endDate,walk=walk)
455 455
456 456 self.__checkPath()
457 457
458 458 fullpath=path
459 459 print("%s folder was found: " %(fullpath ))
460 460
461 461 if set == None:
462 462 self.set=None
463 463 filename =getlastFileFromPath(fullpath,ext)
464 464 startDate= datetime.datetime.utcnow().date
465 465 endDate= datetime.datetime.utcnow().date()
466 466 #
467 467 else:
468 468 filename= getFileFromSet(fullpath,ext,set)
469 469 startDate=None
470 470 endDate=None
471 471 #
472 472 if not (filename):
473 473 return None,None,None,None,None
474 474 #print "%s file was found" %(filename)
475 475
476 476 #
477 477 # dir_hf_filename= self.path+"/"+filename
478 478 # fp= h5py.File(dir_hf_filename,'r')
479 479 # hipoc=fp['t'].value
480 480 # fp.close()
481 481 # date_time=datetime.datetime.utcfromtimestamp(hipoc)
482 482 #
483 483 # year =int(date_time[0:4])
484 484 # month=int(date_time[5:7])
485 485 # dom =int(date_time[8:10])
486 486 # set= int(filename[4:10])
487 487 # self.set=set-1
488 488 #self.dirnameList=[filename]
489 489 filenameList= fullpath+"/"+filename
490 490 self.dirnameList=[filename]
491 491 self.filenameList=[filenameList]
492 492 self.flag_nextfile=True
493 493
494 494 #self.__findDataForDates(online=True)
495 495 #self.dirnameList=[self.dirnameList[-1]]
496 496 #print self.dirnameList
497 497 #self.__selectDataForTimes(online=True)
498 498 #return fullpath,filename,year,month,dom,set
499 499 return
500 500
501 501 def __setNextFile(self,online=False):
502 502 """
503 503 """
504 504 if not(online):
505 505 newFile = self.__setNextFileOffline()
506 506 else:
507 507 newFile = self.__setNextFileOnline()
508 508
509 509 if not(newFile):
510 510 return 0
511 511 return 1
512 512
513 513 def __setNextFileOffline(self):
514 514 """
515 515 """
516 516 idFile= self.fileIndex
517 517 while(True):
518 518 idFile += 1
519 519 if not (idFile < len(self.filenameList)):
520 520 self.flagNoMoreFiles = 1
521 521 print("No more Files")
522 522 return 0
523 523 filename = self.filenameList[idFile]
524 524 hfFilePointer =h5py.File(filename,'r')
525 525
526 526 epoc=hfFilePointer['t'].value
527 527 #this_time=datetime.datetime(year,month,dom,hour,min,sec)
528 528 break
529 529
530 530 self.flagIsNewFile = 1
531 531 self.fileIndex = idFile
532 532 self.filename = filename
533 533
534 534 self.hfFilePointer = hfFilePointer
535 535 hfFilePointer.close()
536 536 self.__t0=epoc
537 537 print("Setting the file: %s"%self.filename)
538 538
539 539 return 1
540 540
541 541 def __setNextFileOnline(self):
542 542 """
543 543 """
544 544 print("SOY NONE",self.set)
545 545 if self.set==None:
546 546 pass
547 547 else:
548 548 self.set +=10
549 549
550 550 filename = self.filenameList[0]#fullfilename
551 551 if self.filename_online != None:
552 552 self.__selectDataForTimes(online=True)
553 553 filename = self.filenameList[0]
554 554 while self.filename_online == filename:
555 555 print('waiting %d seconds to get a new file...'%(self.__waitForNewFile))
556 556 time.sleep(self.__waitForNewFile)
557 557 #self.__findDataForDates(online=True)
558 558 self.set=self.filename_next_set
559 559 self.__selectDataForTimes(online=True)
560 560 filename = self.filenameList[0]
561 561 sizeoffile=os.path.getsize(filename)
562 562
563 563 #print filename
564 564 sizeoffile=os.path.getsize(filename)
565 565 if sizeoffile<1670240:
566 566 print("%s is not the rigth size"%filename)
567 567 delay=50
568 568 print('waiting %d seconds for delay...'%(delay))
569 569 time.sleep(delay)
570 570 sizeoffile=os.path.getsize(filename)
571 571 if sizeoffile<1670240:
572 572 delay=50
573 573 print('waiting %d more seconds for delay...'%(delay))
574 574 time.sleep(delay)
575 575
576 576 sizeoffile=os.path.getsize(filename)
577 577 if sizeoffile<1670240:
578 578 delay=50
579 579 print('waiting %d more seconds for delay...'%(delay))
580 580 time.sleep(delay)
581 581
582 582 try:
583 583 hfFilePointer=h5py.File(filename,'r')
584 584
585 585 except:
586 586 print("Error reading file %s"%filename)
587 587
588 588 self.filename_online=filename
589 589 epoc=hfFilePointer['t'].value
590 590
591 591 self.hfFilePointer=hfFilePointer
592 592 hfFilePointer.close()
593 593 self.__t0=epoc
594 594
595 595
596 596 self.flagIsNewFile = 1
597 597 self.filename = filename
598 598
599 599 print("Setting the file: %s"%self.filename)
600 600 return 1
601 601
602 602 def __getExpParameters(self):
603 603 if not(self.status):
604 604 return None
605 605
606 606 def setup(self,
607 607 path = None,
608 608 startDate = None,
609 609 endDate = None,
610 610 startTime = datetime.time(0,0,0),
611 611 endTime = datetime.time(23,59,59),
612 612 set = None,
613 613 expLabel = "",
614 614 ext = None,
615 615 all=0,
616 616 timezone=0,
617 617 online = False,
618 618 delay = 60,
619 619 walk = True):
620 620 '''
621 621 In this method we should set all initial parameters.
622 622
623 623 '''
624 624 if path==None:
625 625 raise ValueError("The path is not valid")
626 626
627 627 if ext==None:
628 628 ext = self.ext
629 629
630 630 self.timezone= timezone
631 631 self.online= online
632 632 self.all=all
633 633 #if set==None:
634 634
635 635 #print set
636 636 if not(online):
637 637 print("Searching files in offline mode...")
638 638
639 639 self.searchFilesOffLine(path, startDate, endDate, ext, startTime, endTime, walk)
640 640 else:
641 641 print("Searching files in online mode...")
642 642 self.searchFilesOnLine(path, walk,ext,set=set)
643 643 if set==None:
644 644 pass
645 645 else:
646 646 self.set=set-10
647 647
648 648 # for nTries in range(self.nTries):
649 649 #
650 650 # fullpath,file,year,month,day,set = self.searchFilesOnLine(path=path,expLabel=expLabel,ext=ext, walk=walk,set=set)
651 651 #
652 652 # if fullpath:
653 653 # break
654 654 # print '\tWaiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
655 655 # time.sleep(self.delay)
656 656 # if not(fullpath):
657 657 # print "There ins't valid files in %s" % path
658 658 # return None
659 659
660 660
661 661 if not(self.filenameList):
662 662 print("There is no files into the folder: %s"%(path))
663 663 sys.exit(-1)
664 664
665 665 self.__getExpParameters()
666 666
667 667
668 668 self.fileIndex = -1
669 669
670 670 self.__setNextFile(online)
671 671
672 672 self.__readMetadata()
673 673
674 674 self.__setLocalVariables()
675 675
676 676 self.__setHeaderDO()
677 677 #self.profileIndex_offset= 0
678 678
679 679 #self.profileIndex = self.profileIndex_offset
680 680
681 681 self.isConfig = True
682 682
683 683 def __readMetadata(self):
684 684 self.__readHeader()
685 685
686 686
687 687 def __setLocalVariables(self):
688 688
689 689 self.datablock = numpy.zeros((self.nChannels, self.nHeights,self.nProfiles), dtype = numpy.complex)
690 690 #
691 691
692 692
693 693
694 694 self.profileIndex = 9999
695 695
696 696
697 697 def __setHeaderDO(self):
698 698
699 699
700 700 self.dataOut.radarControllerHeaderObj = RadarControllerHeader()
701 701
702 702 self.dataOut.systemHeaderObj = SystemHeader()
703 703
704 704
705 705 #---------------------------------------------------------
706 706 self.dataOut.systemHeaderObj.nProfiles=100
707 707 self.dataOut.systemHeaderObj.nSamples=1000
708 708
709 709
710 710 SAMPLING_STRUCTURE=[('h0', '<f4'), ('dh', '<f4'), ('nsa', '<u4')]
711 711 self.dataOut.radarControllerHeaderObj.samplingWindow=numpy.zeros((1,),SAMPLING_STRUCTURE)
712 712 self.dataOut.radarControllerHeaderObj.samplingWindow['h0']=0
713 713 self.dataOut.radarControllerHeaderObj.samplingWindow['dh']=1.5
714 714 self.dataOut.radarControllerHeaderObj.samplingWindow['nsa']=1000
715 715 self.dataOut.radarControllerHeaderObj.nHeights=int(self.dataOut.radarControllerHeaderObj.samplingWindow['nsa'])
716 716 self.dataOut.radarControllerHeaderObj.firstHeight = self.dataOut.radarControllerHeaderObj.samplingWindow['h0']
717 717 self.dataOut.radarControllerHeaderObj.deltaHeight = self.dataOut.radarControllerHeaderObj.samplingWindow['dh']
718 718 self.dataOut.radarControllerHeaderObj.samplesWin = self.dataOut.radarControllerHeaderObj.samplingWindow['nsa']
719 719
720 720 self.dataOut.radarControllerHeaderObj.nWindows=1
721 721 self.dataOut.radarControllerHeaderObj.codetype=0
722 722 self.dataOut.radarControllerHeaderObj.numTaus=0
723 723 #self.dataOut.radarControllerHeaderObj.Taus = numpy.zeros((1,),'<f4')
724 724
725 725
726 726 #self.dataOut.radarControllerHeaderObj.nCode=numpy.zeros((1,), '<u4')
727 727 #self.dataOut.radarControllerHeaderObj.nBaud=numpy.zeros((1,), '<u4')
728 728 #self.dataOut.radarControllerHeaderObj.code=numpy.zeros(0)
729 729
730 730 self.dataOut.radarControllerHeaderObj.code_size=0
731 731 self.dataOut.nBaud=0
732 732 self.dataOut.nCode=0
733 733 self.dataOut.nPairs=0
734 734
735 735
736 736 #---------------------------------------------------------
737 737
738 738 self.dataOut.type = "Voltage"
739 739
740 740 self.dataOut.data = None
741 741
742 742 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
743 743
744 744 self.dataOut.nProfiles = 1
745 745
746 746 self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype = numpy.float)*self.__deltaHeigth
747 747
748 748 self.dataOut.channelList = list(range(self.nChannels))
749 749
750 750 #self.dataOut.channelIndexList = None
751 751
752 752 self.dataOut.flagNoData = True
753 753
754 754 #Set to TRUE if the data is discontinuous
755 755 self.dataOut.flagDiscontinuousBlock = False
756 756
757 757 self.dataOut.utctime = None
758 758
759 759 self.dataOut.timeZone = self.timezone
760 760
761 761 self.dataOut.dstFlag = 0
762 762
763 763 self.dataOut.errorCount = 0
764 764
765 765 self.dataOut.nCohInt = 1
766 766
767 self.dataOut.blocksize = self.dataOut.getNChannels() * self.dataOut.getNHeights()
767 self.dataOut.blocksize = self.dataOut.nChannels * self.dataOut.nHeights
768 768
769 769 self.dataOut.flagDecodeData = False #asumo que la data esta decodificada
770 770
771 771 self.dataOut.flagDeflipData = False #asumo que la data esta sin flip
772 772
773 773 self.dataOut.flagShiftFFT = False
774 774
775 775 self.dataOut.ippSeconds = 1.0*self.__nSamples/self.__sample_rate
776 776
777 777 #Time interval between profiles
778 778 #self.dataOut.timeInterval =self.dataOut.ippSeconds * self.dataOut.nCohInt
779 779
780 780
781 781 self.dataOut.frequency = self.__frequency
782 782
783 783 self.dataOut.realtime = self.__online
784 784
785 785 def __hasNotDataInBuffer(self):
786 786
787 787 if self.profileIndex >= self.nProfiles:
788 788 return 1
789 789
790 790 return 0
791 791
792 792 def readNextBlock(self):
793 793 if not(self.__setNewBlock()):
794 794 return 0
795 795
796 796 if not(self.readBlock()):
797 797 return 0
798 798
799 799 return 1
800 800
801 801 def __setNewBlock(self):
802 802
803 803 if self.hfFilePointer==None:
804 804 return 0
805 805
806 806 if self.flagIsNewFile:
807 807 return 1
808 808
809 809 if self.profileIndex < self.nProfiles:
810 810 return 1
811 811
812 812 self.__setNextFile(self.online)
813 813
814 814 return 1
815 815
816 816
817 817
818 818 def readBlock(self):
819 819 fp=h5py.File(self.filename,'r')
820 820 #Puntero que apunta al archivo hdf5
821 821 ch0=(fp['ch0']).value #Primer canal (100,1000)--(perfiles,alturas)
822 822 ch1=(fp['ch1']).value #Segundo canal (100,1000)--(perfiles,alturas)
823 823 fp.close()
824 824 ch0= ch0.swapaxes(0,1) #Primer canal (100,1000)--(alturas,perfiles)
825 825 ch1= ch1.swapaxes(0,1) #Segundo canal (100,1000)--(alturas,perfiles)
826 826 self.datablock = numpy.array([ch0,ch1])
827 827 self.flagIsNewFile=0
828 828
829 829 self.profileIndex=0
830 830
831 831 return 1
832 832
833 833 def getData(self):
834 834 if self.flagNoMoreFiles:
835 835 self.dataOut.flagNoData = True
836 836 return 0
837 837
838 838 if self.__hasNotDataInBuffer():
839 839 if not(self.readNextBlock()):
840 840 self.dataOut.flagNodata=True
841 841 return 0
842 842
843 843 ##############################
844 844 ##############################
845 845 self.dataOut.data = self.datablock[:,:,self.profileIndex]
846 846 self.dataOut.utctime = self.__t0 + self.dataOut.ippSeconds*self.profileIndex
847 847 self.dataOut.profileIndex= self.profileIndex
848 848 self.dataOut.flagNoData=False
849 849 self.profileIndex +=1
850 850
851 851 return self.dataOut.data
852 852
853 853
854 854 def run(self, **kwargs):
855 855 '''
856 856 This method will be called many times so here you should put all your code
857 857 '''
858 858
859 859 if not self.isConfig:
860 860 self.setup(**kwargs)
861 861 self.isConfig = True
862 862 self.getData() No newline at end of file
@@ -1,629 +1,629
1 1 '''
2 2 Created on Set 9, 2015
3 3
4 4 @author: roj-idl71 Karim Kuyeng
5 5 '''
6 6
7 7 import os
8 8 import sys
9 9 import glob
10 10 import fnmatch
11 11 import datetime
12 12 import time
13 13 import re
14 14 import h5py
15 15 import numpy
16 16
17 17 try:
18 18 from gevent import sleep
19 19 except:
20 20 from time import sleep
21 21
22 22 from schainpy.model.data.jroheaderIO import RadarControllerHeader, SystemHeader
23 23 from schainpy.model.data.jrodata import Voltage
24 24 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
25 25 from numpy import imag
26 26
27 27 class AMISRReader(ProcessingUnit):
28 28 '''
29 29 classdocs
30 30 '''
31 31
32 32 def __init__(self):
33 33 '''
34 34 Constructor
35 35 '''
36 36
37 37 ProcessingUnit.__init__(self)
38 38
39 39 self.set = None
40 40 self.subset = None
41 41 self.extension_file = '.h5'
42 42 self.dtc_str = 'dtc'
43 43 self.dtc_id = 0
44 44 self.status = True
45 45 self.isConfig = False
46 46 self.dirnameList = []
47 47 self.filenameList = []
48 48 self.fileIndex = None
49 49 self.flagNoMoreFiles = False
50 50 self.flagIsNewFile = 0
51 51 self.filename = ''
52 52 self.amisrFilePointer = None
53 53
54 54
55 55 self.dataset = None
56 56
57 57
58 58
59 59
60 60 self.profileIndex = 0
61 61
62 62
63 63 self.beamCodeByFrame = None
64 64 self.radacTimeByFrame = None
65 65
66 66 self.dataset = None
67 67
68 68
69 69
70 70
71 71 self.__firstFile = True
72 72
73 73 self.buffer = None
74 74
75 75
76 76 self.timezone = 'ut'
77 77
78 78 self.__waitForNewFile = 20
79 79 self.__filename_online = None
80 80 #Is really necessary create the output object in the initializer
81 81 self.dataOut = Voltage()
82 82
83 83 def setup(self,path=None,
84 84 startDate=None,
85 85 endDate=None,
86 86 startTime=None,
87 87 endTime=None,
88 88 walk=True,
89 89 timezone='ut',
90 90 all=0,
91 91 code = None,
92 92 nCode = 0,
93 93 nBaud = 0,
94 94 online=False):
95 95
96 96 self.timezone = timezone
97 97 self.all = all
98 98 self.online = online
99 99
100 100 self.code = code
101 101 self.nCode = int(nCode)
102 102 self.nBaud = int(nBaud)
103 103
104 104
105 105
106 106 #self.findFiles()
107 107 if not(online):
108 108 #Busqueda de archivos offline
109 109 self.searchFilesOffLine(path, startDate, endDate, startTime, endTime, walk)
110 110 else:
111 111 self.searchFilesOnLine(path, startDate, endDate, startTime,endTime,walk)
112 112
113 113 if not(self.filenameList):
114 114 print("There is no files into the folder: %s"%(path))
115 115
116 116 sys.exit(-1)
117 117
118 118 self.fileIndex = -1
119 119
120 120 self.readNextFile(online)
121 121
122 122 '''
123 123 Add code
124 124 '''
125 125 self.isConfig = True
126 126
127 127 pass
128 128
129 129
130 130 def readAMISRHeader(self,fp):
131 131 header = 'Raw11/Data/RadacHeader'
132 132 self.beamCodeByPulse = fp.get(header+'/BeamCode') # LIST OF BEAMS PER PROFILE, TO BE USED ON REARRANGE
133 133 self.beamCode = fp.get('Raw11/Data/Beamcodes') # NUMBER OF CHANNELS AND IDENTIFY POSITION TO CREATE A FILE WITH THAT INFO
134 134 #self.code = fp.get(header+'/Code') # NOT USE FOR THIS
135 135 self.frameCount = fp.get(header+'/FrameCount')# NOT USE FOR THIS
136 136 self.modeGroup = fp.get(header+'/ModeGroup')# NOT USE FOR THIS
137 137 self.nsamplesPulse = fp.get(header+'/NSamplesPulse')# TO GET NSA OR USING DATA FOR THAT
138 138 self.pulseCount = fp.get(header+'/PulseCount')# NOT USE FOR THIS
139 139 self.radacTime = fp.get(header+'/RadacTime')# 1st TIME ON FILE ANDE CALCULATE THE REST WITH IPP*nindexprofile
140 140 self.timeCount = fp.get(header+'/TimeCount')# NOT USE FOR THIS
141 141 self.timeStatus = fp.get(header+'/TimeStatus')# NOT USE FOR THIS
142 142 self.rangeFromFile = fp.get('Raw11/Data/Samples/Range')
143 143 self.frequency = fp.get('Rx/Frequency')
144 144 txAus = fp.get('Raw11/Data/Pulsewidth')
145 145
146 146
147 147 self.nblocks = self.pulseCount.shape[0] #nblocks
148 148
149 149 self.nprofiles = self.pulseCount.shape[1] #nprofile
150 150 self.nsa = self.nsamplesPulse[0,0] #ngates
151 151 self.nchannels = self.beamCode.shape[1]
152 152 self.ippSeconds = (self.radacTime[0][1] -self.radacTime[0][0]) #Ipp in seconds
153 153 #self.__waitForNewFile = self.nblocks # wait depending on the number of blocks since each block is 1 sec
154 154 self.__waitForNewFile = self.nblocks * self.nprofiles * self.ippSeconds # wait until new file is created
155 155
156 156 #filling radar controller header parameters
157 157 self.__ippKm = self.ippSeconds *.15*1e6 # in km
158 158 self.__txA = (txAus.value)*.15 #(ipp[us]*.15km/1us) in km
159 159 self.__txB = 0
160 160 nWindows=1
161 161 self.__nSamples = self.nsa
162 162 self.__firstHeight = self.rangeFromFile[0][0]/1000 #in km
163 163 self.__deltaHeight = (self.rangeFromFile[0][1] - self.rangeFromFile[0][0])/1000
164 164
165 165 #for now until understand why the code saved is different (code included even though code not in tuf file)
166 166 #self.__codeType = 0
167 167 # self.__nCode = None
168 168 # self.__nBaud = None
169 169 self.__code = self.code
170 170 self.__codeType = 0
171 171 if self.code != None:
172 172 self.__codeType = 1
173 173 self.__nCode = self.nCode
174 174 self.__nBaud = self.nBaud
175 175 #self.__code = 0
176 176
177 177 #filling system header parameters
178 178 self.__nSamples = self.nsa
179 179 self.newProfiles = self.nprofiles/self.nchannels
180 180 self.__channelList = list(range(self.nchannels))
181 181
182 182 self.__frequency = self.frequency[0][0]
183 183
184 184
185 185
186 186 def createBuffers(self):
187 187
188 188 pass
189 189
190 190 def __setParameters(self,path='', startDate='',endDate='',startTime='', endTime='', walk=''):
191 191 self.path = path
192 192 self.startDate = startDate
193 193 self.endDate = endDate
194 194 self.startTime = startTime
195 195 self.endTime = endTime
196 196 self.walk = walk
197 197
198 198 def __checkPath(self):
199 199 if os.path.exists(self.path):
200 200 self.status = 1
201 201 else:
202 202 self.status = 0
203 203 print('Path:%s does not exists'%self.path)
204 204
205 205 return
206 206
207 207
208 208 def __selDates(self, amisr_dirname_format):
209 209 try:
210 210 year = int(amisr_dirname_format[0:4])
211 211 month = int(amisr_dirname_format[4:6])
212 212 dom = int(amisr_dirname_format[6:8])
213 213 thisDate = datetime.date(year,month,dom)
214 214
215 215 if (thisDate>=self.startDate and thisDate <= self.endDate):
216 216 return amisr_dirname_format
217 217 except:
218 218 return None
219 219
220 220
221 221 def __findDataForDates(self,online=False):
222 222
223 223 if not(self.status):
224 224 return None
225 225
226 226 pat = '\d+.\d+'
227 227 dirnameList = [re.search(pat,x) for x in os.listdir(self.path)]
228 228 dirnameList = [x for x in dirnameList if x!=None]
229 229 dirnameList = [x.string for x in dirnameList]
230 230 if not(online):
231 231 dirnameList = [self.__selDates(x) for x in dirnameList]
232 232 dirnameList = [x for x in dirnameList if x!=None]
233 233 if len(dirnameList)>0:
234 234 self.status = 1
235 235 self.dirnameList = dirnameList
236 236 self.dirnameList.sort()
237 237 else:
238 238 self.status = 0
239 239 return None
240 240
241 241 def __getTimeFromData(self):
242 242 startDateTime_Reader = datetime.datetime.combine(self.startDate,self.startTime)
243 243 endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
244 244
245 245 print('Filtering Files from %s to %s'%(startDateTime_Reader, endDateTime_Reader))
246 246 print('........................................')
247 247 filter_filenameList = []
248 248 self.filenameList.sort()
249 249 #for i in range(len(self.filenameList)-1):
250 250 for i in range(len(self.filenameList)):
251 251 filename = self.filenameList[i]
252 252 fp = h5py.File(filename,'r')
253 253 time_str = fp.get('Time/RadacTimeString')
254 254
255 255 startDateTimeStr_File = time_str[0][0].split('.')[0]
256 256 junk = time.strptime(startDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
257 257 startDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
258 258
259 259 endDateTimeStr_File = time_str[-1][-1].split('.')[0]
260 260 junk = time.strptime(endDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
261 261 endDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
262 262
263 263 fp.close()
264 264
265 265 if self.timezone == 'lt':
266 266 startDateTime_File = startDateTime_File - datetime.timedelta(minutes = 300)
267 267 endDateTime_File = endDateTime_File - datetime.timedelta(minutes = 300)
268 268
269 269 if (endDateTime_File>=startDateTime_Reader and endDateTime_File<endDateTime_Reader):
270 270 #self.filenameList.remove(filename)
271 271 filter_filenameList.append(filename)
272 272
273 273 if (endDateTime_File>=endDateTime_Reader):
274 274 break
275 275
276 276
277 277 filter_filenameList.sort()
278 278 self.filenameList = filter_filenameList
279 279 return 1
280 280
281 281 def __filterByGlob1(self, dirName):
282 282 filter_files = glob.glob1(dirName, '*.*%s'%self.extension_file)
283 283 filter_files.sort()
284 284 filterDict = {}
285 285 filterDict.setdefault(dirName)
286 286 filterDict[dirName] = filter_files
287 287 return filterDict
288 288
289 289 def __getFilenameList(self, fileListInKeys, dirList):
290 290 for value in fileListInKeys:
291 291 dirName = list(value.keys())[0]
292 292 for file in value[dirName]:
293 293 filename = os.path.join(dirName, file)
294 294 self.filenameList.append(filename)
295 295
296 296
297 297 def __selectDataForTimes(self, online=False):
298 298 #aun no esta implementado el filtro for tiempo
299 299 if not(self.status):
300 300 return None
301 301
302 302 dirList = [os.path.join(self.path,x) for x in self.dirnameList]
303 303
304 304 fileListInKeys = [self.__filterByGlob1(x) for x in dirList]
305 305
306 306 self.__getFilenameList(fileListInKeys, dirList)
307 307 if not(online):
308 308 #filtro por tiempo
309 309 if not(self.all):
310 310 self.__getTimeFromData()
311 311
312 312 if len(self.filenameList)>0:
313 313 self.status = 1
314 314 self.filenameList.sort()
315 315 else:
316 316 self.status = 0
317 317 return None
318 318
319 319 else:
320 320 #get the last file - 1
321 321 self.filenameList = [self.filenameList[-2]]
322 322
323 323 new_dirnameList = []
324 324 for dirname in self.dirnameList:
325 325 junk = numpy.array([dirname in x for x in self.filenameList])
326 326 junk_sum = junk.sum()
327 327 if junk_sum > 0:
328 328 new_dirnameList.append(dirname)
329 329 self.dirnameList = new_dirnameList
330 330 return 1
331 331
332 332 def searchFilesOnLine(self, path, startDate, endDate, startTime=datetime.time(0,0,0),
333 333 endTime=datetime.time(23,59,59),walk=True):
334 334
335 335 if endDate ==None:
336 336 startDate = datetime.datetime.utcnow().date()
337 337 endDate = datetime.datetime.utcnow().date()
338 338
339 339 self.__setParameters(path=path, startDate=startDate, endDate=endDate,startTime = startTime,endTime=endTime, walk=walk)
340 340
341 341 self.__checkPath()
342 342
343 343 self.__findDataForDates(online=True)
344 344
345 345 self.dirnameList = [self.dirnameList[-1]]
346 346
347 347 self.__selectDataForTimes(online=True)
348 348
349 349 return
350 350
351 351
352 352 def searchFilesOffLine(self,
353 353 path,
354 354 startDate,
355 355 endDate,
356 356 startTime=datetime.time(0,0,0),
357 357 endTime=datetime.time(23,59,59),
358 358 walk=True):
359 359
360 360 self.__setParameters(path, startDate, endDate, startTime, endTime, walk)
361 361
362 362 self.__checkPath()
363 363
364 364 self.__findDataForDates()
365 365
366 366 self.__selectDataForTimes()
367 367
368 368 for i in range(len(self.filenameList)):
369 369 print("%s" %(self.filenameList[i]))
370 370
371 371 return
372 372
373 373 def __setNextFileOffline(self):
374 374 idFile = self.fileIndex
375 375
376 376 while (True):
377 377 idFile += 1
378 378 if not(idFile < len(self.filenameList)):
379 379 self.flagNoMoreFiles = 1
380 380 print("No more Files")
381 381 return 0
382 382
383 383 filename = self.filenameList[idFile]
384 384
385 385 amisrFilePointer = h5py.File(filename,'r')
386 386
387 387 break
388 388
389 389 self.flagIsNewFile = 1
390 390 self.fileIndex = idFile
391 391 self.filename = filename
392 392
393 393 self.amisrFilePointer = amisrFilePointer
394 394
395 395 print("Setting the file: %s"%self.filename)
396 396
397 397 return 1
398 398
399 399
400 400 def __setNextFileOnline(self):
401 401 filename = self.filenameList[0]
402 402 if self.__filename_online != None:
403 403 self.__selectDataForTimes(online=True)
404 404 filename = self.filenameList[0]
405 405 wait = 0
406 406 while self.__filename_online == filename:
407 407 print('waiting %d seconds to get a new file...'%(self.__waitForNewFile))
408 408 if wait == 5:
409 409 return 0
410 410 sleep(self.__waitForNewFile)
411 411 self.__selectDataForTimes(online=True)
412 412 filename = self.filenameList[0]
413 413 wait += 1
414 414
415 415 self.__filename_online = filename
416 416
417 417 self.amisrFilePointer = h5py.File(filename,'r')
418 418 self.flagIsNewFile = 1
419 419 self.filename = filename
420 420 print("Setting the file: %s"%self.filename)
421 421 return 1
422 422
423 423
424 424 def readData(self):
425 425 buffer = self.amisrFilePointer.get('Raw11/Data/Samples/Data')
426 426 re = buffer[:,:,:,0]
427 427 im = buffer[:,:,:,1]
428 428 dataset = re + im*1j
429 429 self.radacTime = self.amisrFilePointer.get('Raw11/Data/RadacHeader/RadacTime')
430 430 timeset = self.radacTime[:,0]
431 431 return dataset,timeset
432 432
433 433 def reshapeData(self):
434 434 #self.beamCodeByPulse, self.beamCode, self.nblocks, self.nprofiles, self.nsa,
435 435 channels = self.beamCodeByPulse[0,:]
436 436 nchan = self.nchannels
437 437 #self.newProfiles = self.nprofiles/nchan #must be defined on filljroheader
438 438 nblocks = self.nblocks
439 439 nsamples = self.nsa
440 440
441 441 #Dimensions : nChannels, nProfiles, nSamples
442 442 new_block = numpy.empty((nblocks, nchan, self.newProfiles, nsamples), dtype="complex64")
443 443 ############################################
444 444
445 445 for thisChannel in range(nchan):
446 446 new_block[:,thisChannel,:,:] = self.dataset[:,numpy.where(channels==self.beamCode[0][thisChannel])[0],:]
447 447
448 448
449 449 new_block = numpy.transpose(new_block, (1,0,2,3))
450 450 new_block = numpy.reshape(new_block, (nchan,-1, nsamples))
451 451
452 452 return new_block
453 453
454 454 def updateIndexes(self):
455 455
456 456 pass
457 457
458 458 def fillJROHeader(self):
459 459
460 460 #fill radar controller header
461 461 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(ippKm=self.__ippKm,
462 462 txA=self.__txA,
463 463 txB=0,
464 464 nWindows=1,
465 465 nHeights=self.__nSamples,
466 466 firstHeight=self.__firstHeight,
467 467 deltaHeight=self.__deltaHeight,
468 468 codeType=self.__codeType,
469 469 nCode=self.__nCode, nBaud=self.__nBaud,
470 470 code = self.__code,
471 471 fClock=1)
472 472
473 473
474 474
475 475 #fill system header
476 476 self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples,
477 477 nProfiles=self.newProfiles,
478 478 nChannels=len(self.__channelList),
479 479 adcResolution=14,
480 480 pciDioBusWith=32)
481 481
482 482 self.dataOut.type = "Voltage"
483 483
484 484 self.dataOut.data = None
485 485
486 486 self.dataOut.dtype = numpy.dtype([('real','<i8'),('imag','<i8')])
487 487
488 488 # self.dataOut.nChannels = 0
489 489
490 490 # self.dataOut.nHeights = 0
491 491
492 492 self.dataOut.nProfiles = self.newProfiles*self.nblocks
493 493
494 494 #self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype = numpy.float)*self.__deltaHeigth
495 495 ranges = numpy.reshape(self.rangeFromFile.value,(-1))
496 496 self.dataOut.heightList = ranges/1000.0 #km
497 497
498 498
499 499 self.dataOut.channelList = self.__channelList
500 500
501 self.dataOut.blocksize = self.dataOut.getNChannels() * self.dataOut.getNHeights()
501 self.dataOut.blocksize = self.dataOut.nChannels * self.dataOut.nHeights
502 502
503 503 # self.dataOut.channelIndexList = None
504 504
505 505 self.dataOut.flagNoData = True
506 506
507 507 #Set to TRUE if the data is discontinuous
508 508 self.dataOut.flagDiscontinuousBlock = False
509 509
510 510 self.dataOut.utctime = None
511 511
512 512 #self.dataOut.timeZone = -5 #self.__timezone/60 #timezone like jroheader, difference in minutes between UTC and localtime
513 513 if self.timezone == 'lt':
514 514 self.dataOut.timeZone = time.timezone / 60. #get the timezone in minutes
515 515 else:
516 516 self.dataOut.timeZone = 0 #by default time is UTC
517 517
518 518 self.dataOut.dstFlag = 0
519 519
520 520 self.dataOut.errorCount = 0
521 521
522 522 self.dataOut.nCohInt = 1
523 523
524 524 self.dataOut.flagDecodeData = False #asumo que la data esta decodificada
525 525
526 526 self.dataOut.flagDeflipData = False #asumo que la data esta sin flip
527 527
528 528 self.dataOut.flagShiftFFT = False
529 529
530 530 self.dataOut.ippSeconds = self.ippSeconds
531 531
532 532 #Time interval between profiles
533 533 #self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
534 534
535 535 self.dataOut.frequency = self.__frequency
536 536
537 537 self.dataOut.realtime = self.online
538 538 pass
539 539
540 540 def readNextFile(self,online=False):
541 541
542 542 if not(online):
543 543 newFile = self.__setNextFileOffline()
544 544 else:
545 545 newFile = self.__setNextFileOnline()
546 546
547 547 if not(newFile):
548 548 return 0
549 549
550 550 #if self.__firstFile:
551 551 self.readAMISRHeader(self.amisrFilePointer)
552 552 self.createBuffers()
553 553 self.fillJROHeader()
554 554 #self.__firstFile = False
555 555
556 556
557 557
558 558 self.dataset,self.timeset = self.readData()
559 559
560 560 if self.endDate!=None:
561 561 endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
562 562 time_str = self.amisrFilePointer.get('Time/RadacTimeString')
563 563 startDateTimeStr_File = time_str[0][0].split('.')[0]
564 564 junk = time.strptime(startDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
565 565 startDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
566 566 if self.timezone == 'lt':
567 567 startDateTime_File = startDateTime_File - datetime.timedelta(minutes = 300)
568 568 if (startDateTime_File>endDateTime_Reader):
569 569 return 0
570 570
571 571 self.jrodataset = self.reshapeData()
572 572 #----self.updateIndexes()
573 573 self.profileIndex = 0
574 574
575 575 return 1
576 576
577 577
578 578 def __hasNotDataInBuffer(self):
579 579 if self.profileIndex >= (self.newProfiles*self.nblocks):
580 580 return 1
581 581 return 0
582 582
583 583
584 584 def getData(self):
585 585
586 586 if self.flagNoMoreFiles:
587 587 self.dataOut.flagNoData = True
588 588 return 0
589 589
590 590 if self.__hasNotDataInBuffer():
591 591 if not (self.readNextFile(self.online)):
592 592 return 0
593 593
594 594
595 595 if self.dataset is None: # setear esta condicion cuando no hayan datos por leers
596 596 self.dataOut.flagNoData = True
597 597 return 0
598 598
599 599 #self.dataOut.data = numpy.reshape(self.jrodataset[self.profileIndex,:],(1,-1))
600 600
601 601 self.dataOut.data = self.jrodataset[:,self.profileIndex,:]
602 602
603 603 #self.dataOut.utctime = self.jrotimeset[self.profileIndex]
604 604 #verificar basic header de jro data y ver si es compatible con este valor
605 605 #self.dataOut.utctime = self.timeset + (self.profileIndex * self.ippSeconds * self.nchannels)
606 606 indexprof = numpy.mod(self.profileIndex, self.newProfiles)
607 607 indexblock = self.profileIndex/self.newProfiles
608 608 #print indexblock, indexprof
609 609 self.dataOut.utctime = self.timeset[indexblock] + (indexprof * self.ippSeconds * self.nchannels)
610 610 self.dataOut.profileIndex = self.profileIndex
611 611 self.dataOut.flagNoData = False
612 612 # if indexprof == 0:
613 613 # print self.dataOut.utctime
614 614
615 615 self.profileIndex += 1
616 616
617 617 return self.dataOut.data
618 618
619 619
620 620 def run(self, **kwargs):
621 621 '''
622 622 This method will be called many times so here you should put all your code
623 623 '''
624 624
625 625 if not self.isConfig:
626 626 self.setup(**kwargs)
627 627 self.isConfig = True
628 628
629 629 self.getData()
@@ -1,800 +1,800
1 1 import os
2 2 import sys
3 3 import glob
4 4 import fnmatch
5 5 import datetime
6 6 import time
7 7 import re
8 8 import h5py
9 9 import numpy
10 10
11 11 from scipy.optimize import curve_fit
12 12 from scipy import asarray as ar, exp
13 13 from scipy import stats
14 14
15 15 from numpy.ma.core import getdata
16 16
17 17 SPEED_OF_LIGHT = 299792458
18 18 SPEED_OF_LIGHT = 3e8
19 19
20 20 try:
21 21 from gevent import sleep
22 22 except:
23 23 from time import sleep
24 24
25 25 from schainpy.model.data.jrodata import Spectra
26 26 #from schainpy.model.data.BLTRheaderIO import FileHeader, RecordHeader
27 27 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
28 28 #from schainpy.model.io.jroIO_bltr import BLTRReader
29 29 from numpy import imag, shape, NaN, empty
30 30
31 31
32 32 class Header(object):
33 33
34 34 def __init__(self):
35 35 raise NotImplementedError
36 36
37 37 def read(self):
38 38
39 39 raise NotImplementedError
40 40
41 41 def write(self):
42 42
43 43 raise NotImplementedError
44 44
45 45 def printInfo(self):
46 46
47 47 message = "#" * 50 + "\n"
48 48 message += self.__class__.__name__.upper() + "\n"
49 49 message += "#" * 50 + "\n"
50 50
51 51 keyList = list(self.__dict__.keys())
52 52 keyList.sort()
53 53
54 54 for key in keyList:
55 55 message += "%s = %s" % (key, self.__dict__[key]) + "\n"
56 56
57 57 if "size" not in keyList:
58 58 attr = getattr(self, "size")
59 59
60 60 if attr:
61 61 message += "%s = %s" % ("size", attr) + "\n"
62 62
63 63 # print message
64 64
65 65
66 66 FILE_HEADER = numpy.dtype([ # HEADER 1024bytes
67 67 ('Hname', 'a32'), # Original file name
68 68 # Date and time when the file was created
69 69 ('Htime', numpy.str_, 32),
70 70 # Name of operator who created the file
71 71 ('Hoper', numpy.str_, 64),
72 72 # Place where the measurements was carried out
73 73 ('Hplace', numpy.str_, 128),
74 74 # Description of measurements
75 75 ('Hdescr', numpy.str_, 256),
76 76 ('Hdummy', numpy.str_, 512), # Reserved space
77 77 # Main chunk 8bytes
78 78 # Main chunk signature FZKF or NUIG
79 79 ('Msign', numpy.str_, 4),
80 80 ('MsizeData', '<i4'), # Size of data block main chunk
81 81 # Processing DSP parameters 36bytes
82 82 ('PPARsign', numpy.str_, 4), # PPAR signature
83 83 ('PPARsize', '<i4'), # PPAR size of block
84 84 ('PPARprf', '<i4'), # Pulse repetition frequency
85 85 ('PPARpdr', '<i4'), # Pulse duration
86 86 ('PPARsft', '<i4'), # FFT length
87 87 # Number of spectral (in-coherent) averages
88 88 ('PPARavc', '<i4'),
89 89 # Number of lowest range gate for moment estimation
90 90 ('PPARihp', '<i4'),
91 91 # Count for gates for moment estimation
92 92 ('PPARchg', '<i4'),
93 93 # switch on/off polarimetric measurements. Should be 1.
94 94 ('PPARpol', '<i4'),
95 95 # Service DSP parameters 112bytes
96 96 # STC attenuation on the lowest ranges on/off
97 97 ('SPARatt', '<i4'),
98 98 ('SPARtx', '<i4'), # OBSOLETE
99 99 ('SPARaddGain0', '<f4'), # OBSOLETE
100 100 ('SPARaddGain1', '<f4'), # OBSOLETE
101 101 # Debug only. It normal mode it is 0.
102 102 ('SPARwnd', '<i4'),
103 103 # Delay between sync pulse and tx pulse for phase corr, ns
104 104 ('SPARpos', '<i4'),
105 105 # "add to pulse" to compensate for delay between the leading edge of driver pulse and envelope of the RF signal.
106 106 ('SPARadd', '<i4'),
107 107 # Time for measuring txn pulse phase. OBSOLETE
108 108 ('SPARlen', '<i4'),
109 109 ('SPARcal', '<i4'), # OBSOLETE
110 110 ('SPARnos', '<i4'), # OBSOLETE
111 111 ('SPARof0', '<i4'), # detection threshold
112 112 ('SPARof1', '<i4'), # OBSOLETE
113 113 ('SPARswt', '<i4'), # 2nd moment estimation threshold
114 114 ('SPARsum', '<i4'), # OBSOLETE
115 115 ('SPARosc', '<i4'), # flag Oscillosgram mode
116 116 ('SPARtst', '<i4'), # OBSOLETE
117 117 ('SPARcor', '<i4'), # OBSOLETE
118 118 ('SPARofs', '<i4'), # OBSOLETE
119 119 # Hildebrand div noise detection on noise gate
120 120 ('SPARhsn', '<i4'),
121 121 # Hildebrand div noise detection on all gates
122 122 ('SPARhsa', '<f4'),
123 123 ('SPARcalibPow_M', '<f4'), # OBSOLETE
124 124 ('SPARcalibSNR_M', '<f4'), # OBSOLETE
125 125 ('SPARcalibPow_S', '<f4'), # OBSOLETE
126 126 ('SPARcalibSNR_S', '<f4'), # OBSOLETE
127 127 # Lowest range gate for spectra saving Raw_Gate1 >=5
128 128 ('SPARrawGate1', '<i4'),
129 129 # Number of range gates with atmospheric signal
130 130 ('SPARrawGate2', '<i4'),
131 131 # flag - IQ or spectra saving on/off
132 132 ('SPARraw', '<i4'),
133 133 ('SPARprc', '<i4'), ]) # flag - Moment estimation switched on/off
134 134
135 135
136 136 class FileHeaderMIRA35c(Header):
137 137
138 138 def __init__(self):
139 139
140 140 self.Hname = None
141 141 self.Htime = None
142 142 self.Hoper = None
143 143 self.Hplace = None
144 144 self.Hdescr = None
145 145 self.Hdummy = None
146 146
147 147 self.Msign = None
148 148 self.MsizeData = None
149 149
150 150 self.PPARsign = None
151 151 self.PPARsize = None
152 152 self.PPARprf = None
153 153 self.PPARpdr = None
154 154 self.PPARsft = None
155 155 self.PPARavc = None
156 156 self.PPARihp = None
157 157 self.PPARchg = None
158 158 self.PPARpol = None
159 159 # Service DSP parameters
160 160 self.SPARatt = None
161 161 self.SPARtx = None
162 162 self.SPARaddGain0 = None
163 163 self.SPARaddGain1 = None
164 164 self.SPARwnd = None
165 165 self.SPARpos = None
166 166 self.SPARadd = None
167 167 self.SPARlen = None
168 168 self.SPARcal = None
169 169 self.SPARnos = None
170 170 self.SPARof0 = None
171 171 self.SPARof1 = None
172 172 self.SPARswt = None
173 173 self.SPARsum = None
174 174 self.SPARosc = None
175 175 self.SPARtst = None
176 176 self.SPARcor = None
177 177 self.SPARofs = None
178 178 self.SPARhsn = None
179 179 self.SPARhsa = None
180 180 self.SPARcalibPow_M = None
181 181 self.SPARcalibSNR_M = None
182 182 self.SPARcalibPow_S = None
183 183 self.SPARcalibSNR_S = None
184 184 self.SPARrawGate1 = None
185 185 self.SPARrawGate2 = None
186 186 self.SPARraw = None
187 187 self.SPARprc = None
188 188
189 189 self.FHsize = 1180
190 190
191 191 def FHread(self, fp):
192 192
193 193 header = numpy.fromfile(fp, FILE_HEADER, 1)
194 194 ''' numpy.fromfile(file, dtype, count, sep='')
195 195 file : file or str
196 196 Open file object or filename.
197 197
198 198 dtype : data-type
199 199 Data type of the returned array. For binary files, it is used to determine
200 200 the size and byte-order of the items in the file.
201 201
202 202 count : int
203 203 Number of items to read. -1 means all items (i.e., the complete file).
204 204
205 205 sep : str
206 206 Separator between items if file is a text file. Empty ("") separator means
207 207 the file should be treated as binary. Spaces (" ") in the separator match zero
208 208 or more whitespace characters. A separator consisting only of spaces must match
209 209 at least one whitespace.
210 210
211 211 '''
212 212
213 213 self.Hname = str(header['Hname'][0])
214 214 self.Htime = str(header['Htime'][0])
215 215 self.Hoper = str(header['Hoper'][0])
216 216 self.Hplace = str(header['Hplace'][0])
217 217 self.Hdescr = str(header['Hdescr'][0])
218 218 self.Hdummy = str(header['Hdummy'][0])
219 219 # 1024
220 220
221 221 self.Msign = str(header['Msign'][0])
222 222 self.MsizeData = header['MsizeData'][0]
223 223 # 8
224 224
225 225 self.PPARsign = str(header['PPARsign'][0])
226 226 self.PPARsize = header['PPARsize'][0]
227 227 self.PPARprf = header['PPARprf'][0]
228 228 self.PPARpdr = header['PPARpdr'][0]
229 229 self.PPARsft = header['PPARsft'][0]
230 230 self.PPARavc = header['PPARavc'][0]
231 231 self.PPARihp = header['PPARihp'][0]
232 232 self.PPARchg = header['PPARchg'][0]
233 233 self.PPARpol = header['PPARpol'][0]
234 234 # Service DSP parameters
235 235 # 36
236 236
237 237 self.SPARatt = header['SPARatt'][0]
238 238 self.SPARtx = header['SPARtx'][0]
239 239 self.SPARaddGain0 = header['SPARaddGain0'][0]
240 240 self.SPARaddGain1 = header['SPARaddGain1'][0]
241 241 self.SPARwnd = header['SPARwnd'][0]
242 242 self.SPARpos = header['SPARpos'][0]
243 243 self.SPARadd = header['SPARadd'][0]
244 244 self.SPARlen = header['SPARlen'][0]
245 245 self.SPARcal = header['SPARcal'][0]
246 246 self.SPARnos = header['SPARnos'][0]
247 247 self.SPARof0 = header['SPARof0'][0]
248 248 self.SPARof1 = header['SPARof1'][0]
249 249 self.SPARswt = header['SPARswt'][0]
250 250 self.SPARsum = header['SPARsum'][0]
251 251 self.SPARosc = header['SPARosc'][0]
252 252 self.SPARtst = header['SPARtst'][0]
253 253 self.SPARcor = header['SPARcor'][0]
254 254 self.SPARofs = header['SPARofs'][0]
255 255 self.SPARhsn = header['SPARhsn'][0]
256 256 self.SPARhsa = header['SPARhsa'][0]
257 257 self.SPARcalibPow_M = header['SPARcalibPow_M'][0]
258 258 self.SPARcalibSNR_M = header['SPARcalibSNR_M'][0]
259 259 self.SPARcalibPow_S = header['SPARcalibPow_S'][0]
260 260 self.SPARcalibSNR_S = header['SPARcalibSNR_S'][0]
261 261 self.SPARrawGate1 = header['SPARrawGate1'][0]
262 262 self.SPARrawGate2 = header['SPARrawGate2'][0]
263 263 self.SPARraw = header['SPARraw'][0]
264 264 self.SPARprc = header['SPARprc'][0]
265 265 # 112
266 266 # 1180
267 267 # print 'Pointer fp header', fp.tell()
268 268 # print ' '
269 269 # print 'SPARrawGate'
270 270 # print self.SPARrawGate2 - self.SPARrawGate1
271 271
272 272 # print ' '
273 273 # print 'Hname'
274 274 # print self.Hname
275 275
276 276 # print ' '
277 277 # print 'Msign'
278 278 # print self.Msign
279 279
280 280 def write(self, fp):
281 281
282 282 headerTuple = (self.Hname,
283 283 self.Htime,
284 284 self.Hoper,
285 285 self.Hplace,
286 286 self.Hdescr,
287 287 self.Hdummy)
288 288
289 289 header = numpy.array(headerTuple, FILE_HEADER)
290 290 # numpy.array(object, dtype=None, copy=True, order=None, subok=False, ndmin=0)
291 291 header.tofile(fp)
292 292 ''' ndarray.tofile(fid, sep, format) Write array to a file as text or binary (default).
293 293
294 294 fid : file or str
295 295 An open file object, or a string containing a filename.
296 296
297 297 sep : str
298 298 Separator between array items for text output. If "" (empty), a binary file is written,
299 299 equivalent to file.write(a.tobytes()).
300 300
301 301 format : str
302 302 Format string for text file output. Each entry in the array is formatted to text by
303 303 first converting it to the closest Python type, and then using "format" % item.
304 304
305 305 '''
306 306
307 307 return 1
308 308
309 309
310 310 SRVI_HEADER = numpy.dtype([
311 311 ('SignatureSRVI1', numpy.str_, 4),
312 312 ('SizeOfDataBlock1', '<i4'),
313 313 ('DataBlockTitleSRVI1', numpy.str_, 4),
314 314 ('SizeOfSRVI1', '<i4'), ])
315 315
316 316
317 317 class SRVIHeader(Header):
318 318 def __init__(self, SignatureSRVI1=0, SizeOfDataBlock1=0, DataBlockTitleSRVI1=0, SizeOfSRVI1=0):
319 319
320 320 self.SignatureSRVI1 = SignatureSRVI1
321 321 self.SizeOfDataBlock1 = SizeOfDataBlock1
322 322 self.DataBlockTitleSRVI1 = DataBlockTitleSRVI1
323 323 self.SizeOfSRVI1 = SizeOfSRVI1
324 324
325 325 self.SRVIHsize = 16
326 326
327 327 def SRVIread(self, fp):
328 328
329 329 header = numpy.fromfile(fp, SRVI_HEADER, 1)
330 330
331 331 self.SignatureSRVI1 = str(header['SignatureSRVI1'][0])
332 332 self.SizeOfDataBlock1 = header['SizeOfDataBlock1'][0]
333 333 self.DataBlockTitleSRVI1 = str(header['DataBlockTitleSRVI1'][0])
334 334 self.SizeOfSRVI1 = header['SizeOfSRVI1'][0]
335 335 # 16
336 336 print('Pointer fp SRVIheader', fp.tell())
337 337
338 338
339 339 SRVI_STRUCTURE = numpy.dtype([
340 340 ('frame_cnt', '<u4'),
341 341 ('time_t', '<u4'), #
342 342 ('tpow', '<f4'), #
343 343 ('npw1', '<f4'), #
344 344 ('npw2', '<f4'), #
345 345 ('cpw1', '<f4'), #
346 346 ('pcw2', '<f4'), #
347 347 ('ps_err', '<u4'), #
348 348 ('te_err', '<u4'), #
349 349 ('rc_err', '<u4'), #
350 350 ('grs1', '<u4'), #
351 351 ('grs2', '<u4'), #
352 352 ('azipos', '<f4'), #
353 353 ('azivel', '<f4'), #
354 354 ('elvpos', '<f4'), #
355 355 ('elvvel', '<f4'), #
356 356 ('northAngle', '<f4'),
357 357 ('microsec', '<u4'), #
358 358 ('azisetvel', '<f4'), #
359 359 ('elvsetpos', '<f4'), #
360 360 ('RadarConst', '<f4'), ]) #
361 361
362 362
363 363 class RecordHeader(Header):
364 364
365 365 def __init__(self, frame_cnt=0, time_t=0, tpow=0, npw1=0, npw2=0,
366 366 cpw1=0, pcw2=0, ps_err=0, te_err=0, rc_err=0, grs1=0,
367 367 grs2=0, azipos=0, azivel=0, elvpos=0, elvvel=0, northangle=0,
368 368 microsec=0, azisetvel=0, elvsetpos=0, RadarConst=0, RecCounter=0, Off2StartNxtRec=0):
369 369
370 370 self.frame_cnt = frame_cnt
371 371 self.dwell = time_t
372 372 self.tpow = tpow
373 373 self.npw1 = npw1
374 374 self.npw2 = npw2
375 375 self.cpw1 = cpw1
376 376 self.pcw2 = pcw2
377 377 self.ps_err = ps_err
378 378 self.te_err = te_err
379 379 self.rc_err = rc_err
380 380 self.grs1 = grs1
381 381 self.grs2 = grs2
382 382 self.azipos = azipos
383 383 self.azivel = azivel
384 384 self.elvpos = elvpos
385 385 self.elvvel = elvvel
386 386 self.northAngle = northangle
387 387 self.microsec = microsec
388 388 self.azisetvel = azisetvel
389 389 self.elvsetpos = elvsetpos
390 390 self.RadarConst = RadarConst
391 391 self.RHsize = 84
392 392 self.RecCounter = RecCounter
393 393 self.Off2StartNxtRec = Off2StartNxtRec
394 394
395 395 def RHread(self, fp):
396 396
397 397 # startFp = open(fp,"rb") #The method tell() returns the current position of the file read/write pointer within the file.
398 398
399 399 #OffRHeader= 1180 + self.RecCounter*(self.Off2StartNxtRec)
400 400 #startFp.seek(OffRHeader, os.SEEK_SET)
401 401
402 402 # print 'Posicion del bloque: ',OffRHeader
403 403
404 404 header = numpy.fromfile(fp, SRVI_STRUCTURE, 1)
405 405
406 406 self.frame_cnt = header['frame_cnt'][0]
407 407 self.time_t = header['time_t'][0] #
408 408 self.tpow = header['tpow'][0] #
409 409 self.npw1 = header['npw1'][0] #
410 410 self.npw2 = header['npw2'][0] #
411 411 self.cpw1 = header['cpw1'][0] #
412 412 self.pcw2 = header['pcw2'][0] #
413 413 self.ps_err = header['ps_err'][0] #
414 414 self.te_err = header['te_err'][0] #
415 415 self.rc_err = header['rc_err'][0] #
416 416 self.grs1 = header['grs1'][0] #
417 417 self.grs2 = header['grs2'][0] #
418 418 self.azipos = header['azipos'][0] #
419 419 self.azivel = header['azivel'][0] #
420 420 self.elvpos = header['elvpos'][0] #
421 421 self.elvvel = header['elvvel'][0] #
422 422 self.northAngle = header['northAngle'][0] #
423 423 self.microsec = header['microsec'][0] #
424 424 self.azisetvel = header['azisetvel'][0] #
425 425 self.elvsetpos = header['elvsetpos'][0] #
426 426 self.RadarConst = header['RadarConst'][0] #
427 427 # 84
428 428
429 429 # print 'Pointer fp RECheader', fp.tell()
430 430
431 431 #self.ipp= 0.5*(SPEED_OF_LIGHT/self.PRFhz)
432 432
433 433 #self.RHsize = 180+20*self.nChannels
434 434 #self.Datasize= self.nProfiles*self.nChannels*self.nHeights*2*4
435 435 # print 'Datasize',self.Datasize
436 436 #endFp = self.OffsetStartHeader + self.RecCounter*self.Off2StartNxtRec
437 437
438 438 print('==============================================')
439 439
440 440 print('==============================================')
441 441
442 442 return 1
443 443
444 444
445 445 class MIRA35CReader (ProcessingUnit, FileHeaderMIRA35c, SRVIHeader, RecordHeader):
446 446
447 447 path = None
448 448 startDate = None
449 449 endDate = None
450 450 startTime = None
451 451 endTime = None
452 452 walk = None
453 453 isConfig = False
454 454
455 455 fileList = None
456 456
457 457 # metadata
458 458 TimeZone = None
459 459 Interval = None
460 460 heightList = None
461 461
462 462 # data
463 463 data = None
464 464 utctime = None
465 465
466 466 def __init__(self, **kwargs):
467 467
468 468 # Eliminar de la base la herencia
469 469 ProcessingUnit.__init__(self, **kwargs)
470 470 self.PointerReader = 0
471 471 self.FileHeaderFlag = False
472 472 self.utc = None
473 473 self.ext = ".zspca"
474 474 self.optchar = "P"
475 475 self.fpFile = None
476 476 self.fp = None
477 477 self.BlockCounter = 0
478 478 self.dtype = None
479 479 self.fileSizeByHeader = None
480 480 self.filenameList = []
481 481 self.fileSelector = 0
482 482 self.Off2StartNxtRec = 0
483 483 self.RecCounter = 0
484 484 self.flagNoMoreFiles = 0
485 485 self.data_spc = None
486 486 # self.data_cspc=None
487 487 self.data_output = None
488 488 self.path = None
489 489 self.OffsetStartHeader = 0
490 490 self.Off2StartData = 0
491 491 self.ipp = 0
492 492 self.nFDTdataRecors = 0
493 493 self.blocksize = 0
494 494 self.dataOut = Spectra()
495 495 self.profileIndex = 1 # Always
496 496 self.dataOut.flagNoData = False
497 497 self.dataOut.nRdPairs = 0
498 498 self.dataOut.data_spc = None
499 499 self.nextfileflag = True
500 500 self.dataOut.RadarConst = 0
501 501 self.dataOut.HSDV = []
502 502 self.dataOut.NPW = []
503 503 self.dataOut.COFA = []
504 504 # self.dataOut.noise = 0
505 505
506 506 def Files2Read(self, fp):
507 507 '''
508 508 Function that indicates the number of .fdt files that exist in the folder to be read.
509 509 It also creates an organized list with the names of the files to read.
510 510 '''
511 511 # self.__checkPath()
512 512
513 513 # Gets the list of files within the fp address
514 514 ListaData = os.listdir(fp)
515 515 # Sort the list of files from least to largest by names
516 516 ListaData = sorted(ListaData)
517 517 nFiles = 0 # File Counter
518 518 FileList = [] # A list is created that will contain the .fdt files
519 519 for IndexFile in ListaData:
520 520 if '.zspca' in IndexFile and '.gz' not in IndexFile:
521 521 FileList.append(IndexFile)
522 522 nFiles += 1
523 523
524 524 # print 'Files2Read'
525 525 # print 'Existen '+str(nFiles)+' archivos .fdt'
526 526
527 527 self.filenameList = FileList # List of files from least to largest by names
528 528
529 529 def run(self, **kwargs):
530 530 '''
531 531 This method will be the one that will initiate the data entry, will be called constantly.
532 532 You should first verify that your Setup () is set up and then continue to acquire
533 533 the data to be processed with getData ().
534 534 '''
535 535 if not self.isConfig:
536 536 self.setup(**kwargs)
537 537 self.isConfig = True
538 538
539 539 self.getData()
540 540
541 541 def setup(self, path=None,
542 542 startDate=None,
543 543 endDate=None,
544 544 startTime=None,
545 545 endTime=None,
546 546 walk=True,
547 547 timezone='utc',
548 548 code=None,
549 549 online=False,
550 550 ReadMode=None, **kwargs):
551 551
552 552 self.isConfig = True
553 553
554 554 self.path = path
555 555 self.startDate = startDate
556 556 self.endDate = endDate
557 557 self.startTime = startTime
558 558 self.endTime = endTime
559 559 self.walk = walk
560 560 # self.ReadMode=int(ReadMode)
561 561
562 562 pass
563 563
564 564 def getData(self):
565 565 '''
566 566 Before starting this function, you should check that there is still an unread file,
567 567 If there are still blocks to read or if the data block is empty.
568 568
569 569 You should call the file "read".
570 570
571 571 '''
572 572
573 573 if self.flagNoMoreFiles:
574 574 self.dataOut.flagNoData = True
575 575 print('NoData se vuelve true')
576 576 return 0
577 577
578 578 self.fp = self.path
579 579 self.Files2Read(self.fp)
580 580 self.readFile(self.fp)
581 581
582 582 self.dataOut.data_spc = self.dataOut_spc # self.data_spc.copy()
583 583 self.dataOut.RadarConst = self.RadarConst
584 584 self.dataOut.data_output = self.data_output
585 585 self.dataOut.noise = self.dataOut.getNoise()
586 586 # print 'ACAAAAAA', self.dataOut.noise
587 587 self.dataOut.data_spc = self.dataOut.data_spc + self.dataOut.noise
588 588 self.dataOut.normFactor = 1
589 589 # print 'self.dataOut.noise',self.dataOut.noise
590 590
591 591 return self.dataOut.data_spc
592 592
593 593 def readFile(self, fp):
594 594 '''
595 595 You must indicate if you are reading in Online or Offline mode and load the
596 596 The parameters for this file reading mode.
597 597
598 598 Then you must do 2 actions:
599 599
600 600 1. Get the BLTR FileHeader.
601 601 2. Start reading the first block.
602 602 '''
603 603
604 604 # The address of the folder is generated the name of the .fdt file that will be read
605 605 print("File: ", self.fileSelector + 1)
606 606
607 607 if self.fileSelector < len(self.filenameList):
608 608
609 609 self.fpFile = str(fp) + '/' + \
610 610 str(self.filenameList[self.fileSelector])
611 611
612 612 if self.nextfileflag == True:
613 613 self.fp = open(self.fpFile, "rb")
614 614 self.nextfileflag == False
615 615
616 616 '''HERE STARTING THE FILE READING'''
617 617
618 618 self.fheader = FileHeaderMIRA35c()
619 619 self.fheader.FHread(self.fp) # Bltr FileHeader Reading
620 620
621 621 self.SPARrawGate1 = self.fheader.SPARrawGate1
622 622 self.SPARrawGate2 = self.fheader.SPARrawGate2
623 623 self.Num_Hei = self.SPARrawGate2 - self.SPARrawGate1
624 624 self.Num_Bins = self.fheader.PPARsft
625 625 self.dataOut.nFFTPoints = self.fheader.PPARsft
626 626
627 627 self.Num_inCoh = self.fheader.PPARavc
628 628 self.dataOut.PRF = self.fheader.PPARprf
629 629 self.dataOut.frequency = 34.85 * 10**9
630 630 self.Lambda = SPEED_OF_LIGHT / self.dataOut.frequency
631 631 self.dataOut.ippSeconds = 1. / float(self.dataOut.PRF)
632 632
633 633 pulse_width = self.fheader.PPARpdr * 10**-9
634 634 self.__deltaHeigth = 0.5 * SPEED_OF_LIGHT * pulse_width
635 635
636 636 self.data_spc = numpy.zeros((self.Num_Hei, self.Num_Bins, 2))
637 637 self.dataOut.HSDV = numpy.zeros((self.Num_Hei, 2))
638 638
639 639 self.Ze = numpy.zeros(self.Num_Hei)
640 640 self.ETA = numpy.zeros(([2, self.Num_Hei]))
641 641
642 642 self.readBlock() # Block reading
643 643
644 644 else:
645 645 print('readFile FlagNoData becomes true')
646 646 self.flagNoMoreFiles = True
647 647 self.dataOut.flagNoData = True
648 648 self.FileHeaderFlag == True
649 649 return 0
650 650
651 651 def readBlock(self):
652 652 '''
653 653 It should be checked if the block has data, if it is not passed to the next file.
654 654
655 655 Then the following is done:
656 656
657 657 1. Read the RecordHeader
658 658 2. Fill the buffer with the current block number.
659 659
660 660 '''
661 661
662 662 if self.PointerReader > 1180:
663 663 self.fp.seek(self.PointerReader, os.SEEK_SET)
664 664 self.FirstPoint = self.PointerReader
665 665
666 666 else:
667 667 self.FirstPoint = 1180
668 668
669 669 self.srviHeader = SRVIHeader()
670 670
671 671 self.srviHeader.SRVIread(self.fp) # Se obtiene la cabecera del SRVI
672 672
673 673 self.blocksize = self.srviHeader.SizeOfDataBlock1 # Se obtiene el tamao del bloque
674 674
675 675 if self.blocksize == 148:
676 676 print('blocksize == 148 bug')
677 677 jump = numpy.fromfile(self.fp, [('jump', numpy.str_, 140)], 1)
678 678
679 679 # Se obtiene la cabecera del SRVI
680 680 self.srviHeader.SRVIread(self.fp)
681 681
682 682 if not self.srviHeader.SizeOfSRVI1:
683 683 self.fileSelector += 1
684 684 self.nextfileflag == True
685 685 self.FileHeaderFlag == True
686 686
687 687 self.recordheader = RecordHeader()
688 688 self.recordheader.RHread(self.fp)
689 689 self.RadarConst = self.recordheader.RadarConst
690 690 dwell = self.recordheader.time_t
691 691 npw1 = self.recordheader.npw1
692 692 npw2 = self.recordheader.npw2
693 693
694 694 self.dataOut.channelList = list(range(1))
695 695 self.dataOut.nIncohInt = self.Num_inCoh
696 696 self.dataOut.nProfiles = self.Num_Bins
697 697 self.dataOut.nCohInt = 1
698 698 self.dataOut.windowOfFilter = 1
699 699 self.dataOut.utctime = dwell
700 700 self.dataOut.timeZone = 0
701 701
702 self.dataOut.outputInterval = self.dataOut.getTimeInterval()
702 self.dataOut.outputInterval = self.dataOut.timeInterval
703 703 self.dataOut.heightList = self.SPARrawGate1 * self.__deltaHeigth + \
704 704 numpy.array(list(range(self.Num_Hei))) * self.__deltaHeigth
705 705
706 706 self.HSDVsign = numpy.fromfile(self.fp, [('HSDV', numpy.str_, 4)], 1)
707 707 self.SizeHSDV = numpy.fromfile(self.fp, [('SizeHSDV', '<i4')], 1)
708 708 self.HSDV_Co = numpy.fromfile(
709 709 self.fp, [('HSDV_Co', '<f4')], self.Num_Hei)
710 710 self.HSDV_Cx = numpy.fromfile(
711 711 self.fp, [('HSDV_Cx', '<f4')], self.Num_Hei)
712 712
713 713 self.COFAsign = numpy.fromfile(self.fp, [('COFA', numpy.str_, 4)], 1)
714 714 self.SizeCOFA = numpy.fromfile(self.fp, [('SizeCOFA', '<i4')], 1)
715 715 self.COFA_Co = numpy.fromfile(
716 716 self.fp, [('COFA_Co', '<f4')], self.Num_Hei)
717 717 self.COFA_Cx = numpy.fromfile(
718 718 self.fp, [('COFA_Cx', '<f4')], self.Num_Hei)
719 719
720 720 self.ZSPCsign = numpy.fromfile(
721 721 self.fp, [('ZSPCsign', numpy.str_, 4)], 1)
722 722 self.SizeZSPC = numpy.fromfile(self.fp, [('SizeZSPC', '<i4')], 1)
723 723
724 724 self.dataOut.HSDV[0] = self.HSDV_Co[:][0]
725 725 self.dataOut.HSDV[1] = self.HSDV_Cx[:][0]
726 726
727 727 for irg in range(self.Num_Hei):
728 728 # Number of spectral sub pieces containing significant power
729 729 nspc = numpy.fromfile(self.fp, [('nspc', 'int16')], 1)[0][0]
730 730
731 731 for k in range(nspc):
732 732 # Index of the spectral bin where the piece is beginning
733 733 binIndex = numpy.fromfile(
734 734 self.fp, [('binIndex', 'int16')], 1)[0][0]
735 735 nbins = numpy.fromfile(self.fp, [('nbins', 'int16')], 1)[
736 736 0][0] # Number of bins of the piece
737 737
738 738 # Co_Channel
739 739 jbin = numpy.fromfile(self.fp, [('jbin', 'uint16')], nbins)[
740 740 0][0] # Spectrum piece to be normaliced
741 741 jmax = numpy.fromfile(self.fp, [('jmax', 'float32')], 1)[
742 742 0][0] # Maximun piece to be normaliced
743 743
744 744 self.data_spc[irg, binIndex:binIndex + nbins, 0] = self.data_spc[irg,
745 745 binIndex:binIndex + nbins, 0] + jbin / 65530. * jmax
746 746
747 747 # Cx_Channel
748 748 jbin = numpy.fromfile(
749 749 self.fp, [('jbin', 'uint16')], nbins)[0][0]
750 750 jmax = numpy.fromfile(self.fp, [('jmax', 'float32')], 1)[0][0]
751 751
752 752 self.data_spc[irg, binIndex:binIndex + nbins, 1] = self.data_spc[irg,
753 753 binIndex:binIndex + nbins, 1] + jbin / 65530. * jmax
754 754
755 755 for bin in range(self.Num_Bins):
756 756
757 757 self.data_spc[:, bin, 0] = self.data_spc[:,
758 758 bin, 0] - self.dataOut.HSDV[:, 0]
759 759
760 760 self.data_spc[:, bin, 1] = self.data_spc[:,
761 761 bin, 1] - self.dataOut.HSDV[:, 1]
762 762
763 763 numpy.set_printoptions(threshold='nan')
764 764
765 765 self.data_spc = numpy.where(self.data_spc > 0., self.data_spc, 0)
766 766
767 767 self.dataOut.COFA = numpy.array([self.COFA_Co, self.COFA_Cx])
768 768
769 769 print(' ')
770 770 print('SPC', numpy.shape(self.dataOut.data_spc))
771 771 # print 'SPC',self.dataOut.data_spc
772 772
773 773 noinor1 = 713031680
774 774 noinor2 = 30
775 775
776 776 npw1 = 1 # 0**(npw1/10) * noinor1 * noinor2
777 777 npw2 = 1 # 0**(npw2/10) * noinor1 * noinor2
778 778 self.dataOut.NPW = numpy.array([npw1, npw2])
779 779
780 780 print(' ')
781 781
782 782 self.data_spc = numpy.transpose(self.data_spc, (2, 1, 0))
783 783 self.data_spc = numpy.fft.fftshift(self.data_spc, axes=1)
784 784
785 785 self.data_spc = numpy.fliplr(self.data_spc)
786 786
787 787 self.data_spc = numpy.where(self.data_spc > 0., self.data_spc, 0)
788 788 self.dataOut_spc = numpy.ones([1, self.Num_Bins, self.Num_Hei])
789 789 self.dataOut_spc[0, :, :] = self.data_spc[0, :, :]
790 790 # print 'SHAPE', self.dataOut_spc.shape
791 791 # For nyquist correction:
792 792 # fix = 20 # ~3m/s
793 793 #shift = self.Num_Bins/2 + fix
794 794 #self.data_spc = numpy.array([ self.data_spc[: , self.Num_Bins-shift+1: , :] , self.data_spc[: , 0:self.Num_Bins-shift , :]])
795 795
796 796 '''Block Reading, the Block Data is received and Reshape is used to give it
797 797 shape.
798 798 '''
799 799
800 800 self.PointerReader = self.fp.tell() No newline at end of file
@@ -1,519 +1,519
1 1 import numpy,math,random,time
2 2 #---------------1 Heredamos JRODatareader
3 3 from schainpy.model.io.jroIO_base import *
4 4 #---------------2 Heredamos las propiedades de ProcessingUnit
5 5 from schainpy.model.proc.jroproc_base import ProcessingUnit,Operation,MPDecorator
6 6 #---------------3 Importaremos las clases BascicHeader, SystemHeader, RadarControlHeader, ProcessingHeader
7 7 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader,SystemHeader,RadarControllerHeader, ProcessingHeader
8 8 #---------------4 Importaremos el objeto Voltge
9 9 from schainpy.model.data.jrodata import Voltage
10 10
11 11 class SimulatorReader(JRODataReader, ProcessingUnit):
12 12 incIntFactor = 1
13 13 nFFTPoints = 0
14 14 FixPP_IncInt = 1
15 15 FixRCP_IPP = 1000
16 16 FixPP_CohInt = 1
17 17 Tau_0 = 250
18 18 AcqH0_0 = 70
19 19 H0 = AcqH0_0
20 20 AcqDH_0 = 1.25
21 21 DH0 = AcqDH_0
22 22 Bauds = 32
23 23 BaudWidth = None
24 24 FixRCP_TXA = 40
25 25 FixRCP_TXB = 70
26 26 fAngle = 2.0*math.pi*(1/16)
27 27 DC_level = 500
28 28 stdev = 8
29 29 Num_Codes = 2
30 30 #code0 = numpy.array([1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1])
31 31 #code1 = numpy.array([1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,1,0,1,1,1,0,0,0,1,0])
32 32 #Dyn_snCode = numpy.array([Num_Codes,Bauds])
33 33 Dyn_snCode = None
34 34 Samples = 200
35 35 channels = 2
36 36 pulses = None
37 37 Reference = None
38 38 pulse_size = None
39 39 prof_gen = None
40 40 Fdoppler = 100
41 41 Hdoppler = 36
42 42 Adoppler = 300
43 43 frequency = 9345
44 44 nTotalReadFiles = 1000
45 45
46 46 def __init__(self):
47 47 """
48 48 Inicializador de la clases SimulatorReader para
49 49 generar datos de voltage simulados.
50 50 Input:
51 51 dataOut: Objeto de la clase Voltage.
52 52 Este Objeto sera utilizado apra almacenar
53 53 un perfil de datos cada vez qe se haga
54 54 un requerimiento (getData)
55 55 """
56 56 ProcessingUnit.__init__(self)
57 57 print(" [ START ] init - Metodo Simulator Reader")
58 58
59 59 self.isConfig = False
60 60 self.basicHeaderObj = BasicHeader(LOCALTIME)
61 61 self.systemHeaderObj = SystemHeader()
62 62 self.radarControllerHeaderObj = RadarControllerHeader()
63 63 self.processingHeaderObj = ProcessingHeader()
64 64 self.profileIndex = 2**32-1
65 65 self.dataOut = Voltage()
66 66 #code0 = numpy.array([1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,1,1,1,0,1,1,0,1,0,0,0,1,1,1,0,1])
67 67 code0 = numpy.array([1,1,1,-1,1,1,-1,1,1,1,1,-1,-1,-1,1,-1,1,1,1,-1,1,1,-1,1,-1,-1,-1,1,1,1,-1,1])
68 68 #code1 = numpy.array([1,1,1,0,1,1,0,1,1,1,1,0,0,0,1,0,0,0,0,1,0,0,1,0,1,1,1,0,0,0,1,0])
69 69 code1 = numpy.array([1,1,1,-1,1,1,-1,1,1,1,1,-1,-1,-1,1,-1,-1,-1,-1,1,-1,-1,1,-1,1,1,1,-1,-1,-1,1,-1])
70 70 #self.Dyn_snCode = numpy.array([code0,code1])
71 71 self.Dyn_snCode = None
72 72
73 73 def set_kwargs(self, **kwargs):
74 74 for key, value in kwargs.items():
75 75 setattr(self, key, value)
76 76
77 77 def __hasNotDataInBuffer(self):
78 78
79 79 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock* self.nTxs:
80 80 if self.nReadBlocks>0:
81 81 tmp = self.dataOut.utctime
82 82 tmp_utc = int(self.dataOut.utctime)
83 83 tmp_milisecond = int((tmp-tmp_utc)*1000)
84 84 self.basicHeaderObj.utc = tmp_utc
85 85 self.basicHeaderObj.miliSecond= tmp_milisecond
86 86 return 1
87 87 return 0
88 88
89 89 def setNextFile(self):
90 90 """Set the next file to be readed open it and parse de file header"""
91 91
92 92 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
93 93 self.nReadFiles=self.nReadFiles+1
94 94 if self.nReadFiles > self.nTotalReadFiles:
95 95 self.flagNoMoreFiles=1
96 96 raise schainpy.admin.SchainWarning('No more files to read')
97 97
98 98 print('------------------- [Opening file] ------------------------------',self.nReadFiles)
99 99 self.nReadBlocks = 0
100 100 #if self.nReadBlocks==0:
101 101 # self.readFirstHeader()
102 102
103 103 def __setNewBlock(self):
104 104 self.setNextFile()
105 105 if self.flagIsNewFile:
106 106 return 1
107 107
108 108 def readNextBlock(self):
109 109 while True:
110 110 self.__setNewBlock()
111 111 if not(self.readBlock()):
112 112 return 0
113 113 self.getBasicHeader()
114 114 break
115 115 if self.verbose:
116 116 print("[Reading] Block No. %d/%d -> %s" %(self.nReadBlocks,
117 117 self.processingHeaderObj.dataBlocksPerFile,
118 118 self.dataOut.datatime.ctime()) )
119 119 return 1
120 120
121 121 def getFirstHeader(self):
122 122 self.getBasicHeader()
123 123 self.dataOut.processingHeaderObj = self.processingHeaderObj.copy()
124 124 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
125 125 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
126 126 self.dataOut.dtype = self.dtype
127 127
128 128 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
129 129 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.nHeights) * self.processingHeaderObj.deltaHeight + self.processingHeaderObj.firstHeight
130 130 self.dataOut.channelList = list(range(self.systemHeaderObj.nChannels))
131 131 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
132 132 # asumo q la data no esta decodificada
133 133 self.dataOut.flagDecodeData = self.processingHeaderObj.flag_decode
134 134 # asumo q la data no esta sin flip
135 135 self.dataOut.flagDeflipData = self.processingHeaderObj.flag_deflip
136 136 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
137 137 self.dataOut.frequency = self.frequency
138 138
139 139 def getBasicHeader(self):
140 140 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond / \
141 141 1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds
142 142
143 143 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
144 144 self.dataOut.timeZone = self.basicHeaderObj.timeZone
145 145 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
146 146 self.dataOut.errorCount = self.basicHeaderObj.errorCount
147 147 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
148 148 self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds / self.nTxs
149 149
150 150 def readFirstHeader(self):
151 151
152 152 datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
153 153 PROCFLAG.DATATYPE_MASK)) - numpy.log2(PROCFLAG.DATATYPE_CHAR))
154 154 if datatype == 0:
155 155 datatype_str = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
156 156 elif datatype == 1:
157 157 datatype_str = numpy.dtype([('real', '<i2'), ('imag', '<i2')])
158 158 elif datatype == 2:
159 159 datatype_str = numpy.dtype([('real', '<i4'), ('imag', '<i4')])
160 160 elif datatype == 3:
161 161 datatype_str = numpy.dtype([('real', '<i8'), ('imag', '<i8')])
162 162 elif datatype == 4:
163 163 datatype_str = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
164 164 elif datatype == 5:
165 165 datatype_str = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
166 166 else:
167 167 raise ValueError('Data type was not defined')
168 168
169 169 self.dtype = datatype_str
170 170
171 171
172 172 def set_RCH(self, expType=2, nTx=1,ipp=None, txA=0, txB=0,
173 173 nWindows=None, nHeights=None, firstHeight=None, deltaHeight=None,
174 174 numTaus=0, line6Function=0, line5Function=0, fClock=None,
175 175 prePulseBefore=0, prePulseAfter=0,
176 176 codeType=0, nCode=0, nBaud=0, code=None,
177 177 flip1=0, flip2=0,Taus=0):
178 178 self.radarControllerHeaderObj.expType = expType
179 179 self.radarControllerHeaderObj.nTx = nTx
180 180 self.radarControllerHeaderObj.ipp = float(ipp)
181 181 self.radarControllerHeaderObj.txA = float(txA)
182 182 self.radarControllerHeaderObj.txB = float(txB)
183 183 self.radarControllerHeaderObj.rangeIpp = b'A\n'#ipp
184 184 self.radarControllerHeaderObj.rangeTxA = b''
185 185 self.radarControllerHeaderObj.rangeTxB = b''
186 186
187 187 self.radarControllerHeaderObj.nHeights = int(nHeights)
188 188 self.radarControllerHeaderObj.firstHeight = numpy.array([firstHeight])
189 189 self.radarControllerHeaderObj.deltaHeight = numpy.array([deltaHeight])
190 190 self.radarControllerHeaderObj.samplesWin = numpy.array([nHeights])
191 191
192 192
193 193 self.radarControllerHeaderObj.nWindows = nWindows
194 194 self.radarControllerHeaderObj.numTaus = numTaus
195 195 self.radarControllerHeaderObj.codeType = codeType
196 196 self.radarControllerHeaderObj.line6Function = line6Function
197 197 self.radarControllerHeaderObj.line5Function = line5Function
198 198 #self.radarControllerHeaderObj.fClock = fClock
199 199 self.radarControllerHeaderObj.prePulseBefore= prePulseBefore
200 200 self.radarControllerHeaderObj.prePulseAfter = prePulseAfter
201 201
202 202 self.radarControllerHeaderObj.flip1 = flip1
203 203 self.radarControllerHeaderObj.flip2 = flip2
204 204
205 205 self.radarControllerHeaderObj.code_size = 0
206 206 if self.radarControllerHeaderObj.codeType != 0:
207 207 self.radarControllerHeaderObj.nCode = nCode
208 208 self.radarControllerHeaderObj.nBaud = nBaud
209 209 self.radarControllerHeaderObj.code = code
210 210 self.radarControllerHeaderObj.code_size = int(numpy.ceil(nBaud / 32.)) * nCode * 4
211 211
212 212 if fClock is None and deltaHeight is not None:
213 213 self.fClock = 0.15 / (deltaHeight * 1e-6)
214 214 self.radarControllerHeaderObj.fClock = self.fClock
215 215 if numTaus==0:
216 216 self.radarControllerHeaderObj.Taus = numpy.array(0,'<f4')
217 217 else:
218 218 self.radarControllerHeaderObj.Taus = numpy.array(Taus,'<f4')
219 219
220 220 def set_PH(self, dtype=0, blockSize=0, profilesPerBlock=0,
221 221 dataBlocksPerFile=0, nWindows=0, processFlags=0, nCohInt=0,
222 222 nIncohInt=0, totalSpectra=0, nHeights=0, firstHeight=0,
223 223 deltaHeight=0, samplesWin=0, spectraComb=0, nCode=0,
224 224 code=0, nBaud=None, shif_fft=False, flag_dc=False,
225 225 flag_cspc=False, flag_decode=False, flag_deflip=False):
226 226
227 227 self.processingHeaderObj.dtype = dtype
228 228 self.processingHeaderObj.profilesPerBlock = profilesPerBlock
229 229 self.processingHeaderObj.dataBlocksPerFile = dataBlocksPerFile
230 230 self.processingHeaderObj.nWindows = nWindows
231 231 self.processingHeaderObj.processFlags = processFlags
232 232 self.processingHeaderObj.nCohInt = nCohInt
233 233 self.processingHeaderObj.nIncohInt = nIncohInt
234 234 self.processingHeaderObj.totalSpectra = totalSpectra
235 235
236 236 self.processingHeaderObj.nHeights = int(nHeights)
237 237 self.processingHeaderObj.firstHeight = firstHeight#numpy.array([firstHeight])#firstHeight
238 238 self.processingHeaderObj.deltaHeight = deltaHeight#numpy.array([deltaHeight])#deltaHeight
239 239 self.processingHeaderObj.samplesWin = nHeights#numpy.array([nHeights])#nHeights
240 240
241 241 def set_BH(self, utc = 0, miliSecond = 0, timeZone = 0):
242 242 self.basicHeaderObj.utc = utc
243 243 self.basicHeaderObj.miliSecond = miliSecond
244 244 self.basicHeaderObj.timeZone = timeZone
245 245
246 246 def set_SH(self, nSamples=0, nProfiles=0, nChannels=0, adcResolution=14, pciDioBusWidth=32):
247 247 #self.systemHeaderObj.size = size
248 248 self.systemHeaderObj.nSamples = nSamples
249 249 self.systemHeaderObj.nProfiles = nProfiles
250 250 self.systemHeaderObj.nChannels = nChannels
251 251 self.systemHeaderObj.adcResolution = adcResolution
252 252 self.systemHeaderObj.pciDioBusWidth = pciDioBusWidth
253 253
254 254 def init_acquisition(self):
255 255
256 256 if self.nFFTPoints != 0:
257 257 self.incIntFactor = m_nProfilesperBlock/self.nFFTPoints
258 258 if (self.FixPP_IncInt > self.incIntFactor):
259 259 self.incIntFactor = self.FixPP_IncInt/ self.incIntFactor
260 260 elif(self.FixPP_IncInt< self.incIntFactor):
261 261 print("False alert...")
262 262
263 263 ProfilesperBlock = self.processingHeaderObj.profilesPerBlock
264 264
265 265 self.timeperblock =int(((self.FixRCP_IPP
266 266 *ProfilesperBlock
267 267 *self.FixPP_CohInt
268 268 *self.incIntFactor)
269 269 /150.0)
270 270 *0.9
271 271 +0.5)
272 272 # para cada canal
273 273 self.profiles = ProfilesperBlock*self.FixPP_CohInt
274 274 self.profiles = ProfilesperBlock
275 275 self.Reference = int((self.Tau_0-self.AcqH0_0)/(self.AcqDH_0)+0.5)
276 276 self.BaudWidth = int((self.FixRCP_TXA/self.AcqDH_0)/self.Bauds + 0.5 )
277 277
278 278 if (self.BaudWidth==0):
279 279 self.BaudWidth=1
280 280
281 281 def init_pulse(self,Num_Codes=Num_Codes,Bauds=Bauds,BaudWidth=BaudWidth,Dyn_snCode=Dyn_snCode):
282 282
283 283 Num_Codes = Num_Codes
284 284 Bauds = Bauds
285 285 BaudWidth = BaudWidth
286 286 Dyn_snCode = Dyn_snCode
287 287
288 288 if Dyn_snCode:
289 289 print("EXISTE")
290 290 else:
291 291 print("No existe")
292 292
293 293 if Dyn_snCode: # if Bauds:
294 294 pulses = list(range(0,Num_Codes))
295 295 num_codes = Num_Codes
296 296 for i in range(num_codes):
297 297 pulse_size = Bauds*BaudWidth
298 298 pulses[i] = numpy.zeros(pulse_size)
299 299 for j in range(Bauds):
300 300 for k in range(BaudWidth):
301 301 pulses[i][j*BaudWidth+k] = int(Dyn_snCode[i][j]*600)
302 302 else:
303 303 print("sin code")
304 304 pulses = list(range(1))
305 305 if self.AcqDH_0>0.149:
306 306 pulse_size = int(self.FixRCP_TXB/0.15+0.5)
307 307 else:
308 308 pulse_size = int((self.FixRCP_TXB/self.AcqDH_0)+0.5) #0.0375
309 309 pulses[0] = numpy.ones(pulse_size)
310 310 pulses = 600*pulses[0]
311 311
312 312 return pulses,pulse_size
313 313
314 314 def jro_GenerateBlockOfData(self,Samples=Samples,DC_level= DC_level,stdev=stdev,
315 315 Reference= Reference,pulses= pulses,
316 316 Num_Codes= Num_Codes,pulse_size=pulse_size,
317 317 prof_gen= prof_gen,H0 = H0,DH0=DH0,
318 318 Adoppler=Adoppler,Fdoppler= Fdoppler,Hdoppler=Hdoppler):
319 319 Samples = Samples
320 320 DC_level = DC_level
321 321 stdev = stdev
322 322 m_nR = Reference
323 323 pulses = pulses
324 324 num_codes = Num_Codes
325 325 ps = pulse_size
326 326 prof_gen = prof_gen
327 327 channels = self.channels
328 328 H0 = H0
329 329 DH0 = DH0
330 330 ippSec = self.radarControllerHeaderObj.ippSeconds
331 331 Fdoppler = self.Fdoppler
332 332 Hdoppler = self.Hdoppler
333 333 Adoppler = self.Adoppler
334 334
335 335 self.datablock = numpy.zeros([channels,prof_gen,Samples],dtype= numpy.complex64)
336 336 for i in range(channels):
337 337 for k in range(prof_gen):
338 #Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·NOISEΒ·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·
338 #-----------------------NOISE---------------
339 339 Noise_r = numpy.random.normal(DC_level,stdev,Samples)
340 340 Noise_i = numpy.random.normal(DC_level,stdev,Samples)
341 341 Noise = numpy.zeros(Samples,dtype=complex)
342 342 Noise.real = Noise_r
343 343 Noise.imag = Noise_i
344 #Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·PULSOSΒ·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·
344 #-----------------------PULSOS--------------
345 345 Pulso = numpy.zeros(pulse_size,dtype=complex)
346 346 Pulso.real = pulses[k%num_codes]
347 347 Pulso.imag = pulses[k%num_codes]
348 #Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β· PULSES+NOISEΒ·Β·Β·Β·Β·Β·Β·Β·Β·Β·
348 #--------------------- PULSES+NOISE----------
349 349 InBuffer = numpy.zeros(Samples,dtype=complex)
350 350 InBuffer[m_nR:m_nR+ps] = Pulso
351 351 InBuffer = InBuffer+Noise
352 #Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β· ANGLE Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·
352 #--------------------- ANGLE -------------------------------
353 353 InBuffer.real[m_nR:m_nR+ps] = InBuffer.real[m_nR:m_nR+ps]*(math.cos( self.fAngle)*5)
354 354 InBuffer.imag[m_nR:m_nR+ps] = InBuffer.imag[m_nR:m_nR+ps]*(math.sin( self.fAngle)*5)
355 355 InBuffer=InBuffer
356 356 self.datablock[i][k]= InBuffer
357 357
358 #Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·DOPPLER SIGNAL...............................................
358 #----------------DOPPLER SIGNAL...............................................
359 359 time_vec = numpy.linspace(0,(prof_gen-1)*ippSec,int(prof_gen))+self.nReadBlocks*ippSec*prof_gen+(self.nReadFiles-1)*ippSec*prof_gen
360 360 fd = Fdoppler #+(600.0/120)*self.nReadBlocks
361 361 d_signal = Adoppler*numpy.array(numpy.exp(1.0j*2.0*math.pi*fd*time_vec),dtype=numpy.complex64)
362 #Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·SeΓ±al con ancho espectralΒ·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·
362 #-------------Senal con ancho espectral--------------------
363 363 if prof_gen%2==0:
364 364 min = int(prof_gen/2.0-1.0)
365 365 max = int(prof_gen/2.0)
366 366 else:
367 367 min = int(prof_gen/2.0)
368 368 max = int(prof_gen/2.0)
369 369 specw_sig = numpy.linspace(-min,max,prof_gen)
370 370 w = 4
371 371 A = 20
372 372 specw_sig = specw_sig/w
373 373 specw_sig = numpy.sinc(specw_sig)
374 374 specw_sig = A*numpy.array(specw_sig,dtype=numpy.complex64)
375 #Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β· DATABLOCK + DOPPLERΒ·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·
375 #------------------ DATABLOCK + DOPPLER--------------------
376 376 HD=int(Hdoppler/self.AcqDH_0)
377 377 for i in range(12):
378 378 self.datablock[0,:,HD+i]=self.datablock[0,:,HD+i]+ d_signal# RESULT
379 #Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β· DATABLOCK + DOPPLER*Sinc(x)Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·
379 #------------------ DATABLOCK + DOPPLER*Sinc(x)--------------------
380 380 HD=int(Hdoppler/self.AcqDH_0)
381 381 HD=int(HD/2)
382 382 for i in range(12):
383 383 self.datablock[0,:,HD+i]=self.datablock[0,:,HD+i]+ specw_sig*d_signal# RESULT
384 384
385 385 def readBlock(self):
386 386
387 387 self.jro_GenerateBlockOfData(Samples= self.samples,DC_level=self.DC_level,
388 388 stdev=self.stdev,Reference= self.Reference,
389 389 pulses = self.pulses,Num_Codes=self.Num_Codes,
390 390 pulse_size=self.pulse_size,prof_gen=self.profiles,
391 391 H0=self.H0,DH0=self.DH0)
392 392
393 393 self.profileIndex = 0
394 394 self.flagIsNewFile = 0
395 395 self.flagIsNewBlock = 1
396 396 self.nTotalBlocks += 1
397 397 self.nReadBlocks += 1
398 398
399 399 return 1
400 400
401 401
402 402 def getData(self):
403 403 if self.flagNoMoreFiles:
404 404 self.dataOut.flagNodata = True
405 405 return 0
406 406 self.flagDiscontinuousBlock = 0
407 407 self.flagIsNewBlock = 0
408 408 if self.__hasNotDataInBuffer(): # aqui es verdad
409 409 if not(self.readNextBlock()): # return 1 y por eso el if not salta a getBasic Header
410 410 return 0
411 411 self.getFirstHeader() # atributo
412 412
413 413 if not self.getByBlock:
414 414 self.dataOut.flagDataAsBlock = False
415 415 self.dataOut.data = self.datablock[:, self.profileIndex, :]
416 416 self.dataOut.profileIndex = self.profileIndex
417 417 self.profileIndex += 1
418 418 else:
419 419 pass
420 420 self.dataOut.flagNoData = False
421 421 self.getBasicHeader()
422 422 self.dataOut.realtime = self.online
423 423 return self.dataOut.data
424 424
425 425
426 426 def setup(self,frequency=49.92e6,incIntFactor= 1, nFFTPoints = 0, FixPP_IncInt=1,FixRCP_IPP=1000,
427 427 FixPP_CohInt= 1,Tau_0= 250,AcqH0_0 = 70 ,AcqDH_0=1.25, Bauds= 32,
428 428 FixRCP_TXA = 40, FixRCP_TXB = 50, fAngle = 2.0*math.pi*(1/16),DC_level= 50,
429 429 stdev= 8,Num_Codes = 1 , Dyn_snCode = None, samples=200,
430 430 channels=2,Fdoppler=20,Hdoppler=36,Adoppler=500,
431 431 profilesPerBlock=300,dataBlocksPerFile=120,nTotalReadFiles=10000,
432 432 **kwargs):
433 433
434 434 self.set_kwargs(**kwargs)
435 435 self.nReadBlocks = 0
436 436 self.nReadFiles = 1
437 437 print('------------------- [Opening file: ] ------------------------------',self.nReadFiles)
438 438
439 439 tmp = time.time()
440 440 tmp_utc = int(tmp)
441 441 tmp_milisecond = int((tmp-tmp_utc)*1000)
442 442 print(" SETUP -basicHeaderObj.utc",datetime.datetime.utcfromtimestamp(tmp))
443 443 if Dyn_snCode is None:
444 444 Num_Codes=1
445 445 Bauds =1
446 446
447 447
448 448
449 449 self.set_BH(utc= tmp_utc,miliSecond= tmp_milisecond,timeZone=300 )
450 450 self.set_RCH( expType=0, nTx=150,ipp=FixRCP_IPP, txA=FixRCP_TXA, txB= FixRCP_TXB,
451 451 nWindows=1 , nHeights=samples, firstHeight=AcqH0_0, deltaHeight=AcqDH_0,
452 452 numTaus=1, line6Function=0, line5Function=0, fClock=None,
453 453 prePulseBefore=0, prePulseAfter=0,
454 454 codeType=0, nCode=Num_Codes, nBaud=32, code=Dyn_snCode,
455 455 flip1=0, flip2=0,Taus=Tau_0)
456 456
457 457 self.set_PH(dtype=0, blockSize=0, profilesPerBlock=profilesPerBlock,
458 458 dataBlocksPerFile=dataBlocksPerFile, nWindows=1, processFlags=numpy.array([1024]), nCohInt=1,
459 459 nIncohInt=1, totalSpectra=0, nHeights=samples, firstHeight=AcqH0_0,
460 460 deltaHeight=AcqDH_0, samplesWin=samples, spectraComb=0, nCode=0,
461 461 code=0, nBaud=None, shif_fft=False, flag_dc=False,
462 462 flag_cspc=False, flag_decode=False, flag_deflip=False)
463 463
464 464 self.set_SH(nSamples=samples, nProfiles=profilesPerBlock, nChannels=channels)
465 465
466 466 self.readFirstHeader()
467 467
468 468 self.frequency = frequency
469 469 self.incIntFactor = incIntFactor
470 470 self.nFFTPoints = nFFTPoints
471 471 self.FixPP_IncInt = FixPP_IncInt
472 472 self.FixRCP_IPP = FixRCP_IPP
473 473 self.FixPP_CohInt = FixPP_CohInt
474 474 self.Tau_0 = Tau_0
475 475 self.AcqH0_0 = AcqH0_0
476 476 self.H0 = AcqH0_0
477 477 self.AcqDH_0 = AcqDH_0
478 478 self.DH0 = AcqDH_0
479 479 self.Bauds = Bauds
480 480 self.FixRCP_TXA = FixRCP_TXA
481 481 self.FixRCP_TXB = FixRCP_TXB
482 482 self.fAngle = fAngle
483 483 self.DC_level = DC_level
484 484 self.stdev = stdev
485 485 self.Num_Codes = Num_Codes
486 486 self.Dyn_snCode = Dyn_snCode
487 487 self.samples = samples
488 488 self.channels = channels
489 489 self.profiles = None
490 490 self.m_nReference = None
491 491 self.Baudwidth = None
492 492 self.Fdoppler = Fdoppler
493 493 self.Hdoppler = Hdoppler
494 494 self.Adoppler = Adoppler
495 495 self.nTotalReadFiles = int(nTotalReadFiles)
496 496
497 497 print("IPP ", self.FixRCP_IPP)
498 498 print("Tau_0 ",self.Tau_0)
499 499 print("AcqH0_0",self.AcqH0_0)
500 500 print("samples,window ",self.samples)
501 501 print("AcqDH_0",AcqDH_0)
502 502 print("FixRCP_TXA",self.FixRCP_TXA)
503 503 print("FixRCP_TXB",self.FixRCP_TXB)
504 504 print("Dyn_snCode",Dyn_snCode)
505 505 print("Fdoppler", Fdoppler)
506 506 print("Hdoppler",Hdoppler)
507 507 print("Vdopplermax",Fdoppler*(3.0e8/self.frequency)/2.0)
508 508 print("nTotalReadFiles", nTotalReadFiles)
509 509
510 510 self.init_acquisition()
511 511 self.pulses,self.pulse_size=self.init_pulse(Num_Codes=self.Num_Codes,Bauds=self.Bauds,BaudWidth=self.BaudWidth,Dyn_snCode=Dyn_snCode)
512 512 print(" [ END ] - SETUP metodo")
513 513 return
514 514
515 515 def run(self,**kwargs): # metodo propio
516 516 if not(self.isConfig):
517 517 self.setup(**kwargs)
518 518 self.isConfig = True
519 519 self.getData()
@@ -1,602 +1,602
1 1 '''
2 2 Created on Jul 3, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6 import os
7 7 import datetime
8 8 import numpy
9 9
10 10 try:
11 11 from gevent import sleep
12 12 except:
13 13 from time import sleep
14 14
15 15 from schainpy.model.data.jroheaderIO import RadarControllerHeader, SystemHeader
16 16 from schainpy.model.data.jrodata import Voltage
17 17 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
18 18
19 19 try:
20 20 import digital_rf_hdf5
21 21 except:
22 22 pass
23 23
24 24 class USRPReader(ProcessingUnit):
25 25 '''
26 26 classdocs
27 27 '''
28 28
29 29 def __init__(self, **kwargs):
30 30 '''
31 31 Constructor
32 32 '''
33 33
34 34 ProcessingUnit.__init__(self, **kwargs)
35 35
36 36 self.dataOut = Voltage()
37 37 self.__printInfo = True
38 38 self.__flagDiscontinuousBlock = False
39 39 self.__bufferIndex = 9999999
40 40
41 41 self.__ippKm = None
42 42 self.__codeType = 0
43 43 self.__nCode = None
44 44 self.__nBaud = None
45 45 self.__code = None
46 46
47 47 def __getCurrentSecond(self):
48 48
49 49 return self.__thisUnixSample/self.__sample_rate
50 50
51 51 thisSecond = property(__getCurrentSecond, "I'm the 'thisSecond' property.")
52 52
53 53 def __setFileHeader(self):
54 54 '''
55 55 In this method will be initialized every parameter of dataOut object (header, no data)
56 56 '''
57 57 ippSeconds = 1.0*self.__nSamples/self.__sample_rate
58 58
59 59 nProfiles = 1.0/ippSeconds #Number of profiles in one second
60 60
61 61 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(ipp=self.__ippKm,
62 62 txA=0,
63 63 txB=0,
64 64 nWindows=1,
65 65 nHeights=self.__nSamples,
66 66 firstHeight=self.__firstHeigth,
67 67 deltaHeight=self.__deltaHeigth,
68 68 codeType=self.__codeType,
69 69 nCode=self.__nCode, nBaud=self.__nBaud,
70 70 code = self.__code)
71 71
72 72 self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples,
73 73 nProfiles=nProfiles,
74 74 nChannels=len(self.__channelList),
75 75 adcResolution=14)
76 76
77 77 self.dataOut.type = "Voltage"
78 78
79 79 self.dataOut.data = None
80 80
81 81 self.dataOut.dtype = numpy.dtype([('real','<i8'),('imag','<i8')])
82 82
83 83 # self.dataOut.nChannels = 0
84 84
85 85 # self.dataOut.nHeights = 0
86 86
87 87 self.dataOut.nProfiles = nProfiles
88 88
89 89 self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype = numpy.float)*self.__deltaHeigth
90 90
91 91 self.dataOut.channelList = self.__channelList
92 92
93 self.dataOut.blocksize = self.dataOut.getNChannels() * self.dataOut.getNHeights()
93 self.dataOut.blocksize = self.dataOut.nChannels * self.dataOut.nHeights
94 94
95 95 # self.dataOut.channelIndexList = None
96 96
97 97 self.dataOut.flagNoData = True
98 98
99 99 #Set to TRUE if the data is discontinuous
100 100 self.dataOut.flagDiscontinuousBlock = False
101 101
102 102 self.dataOut.utctime = None
103 103
104 104 self.dataOut.timeZone = self.__timezone/60 #timezone like jroheader, difference in minutes between UTC and localtime
105 105
106 106 self.dataOut.dstFlag = 0
107 107
108 108 self.dataOut.errorCount = 0
109 109
110 110 self.dataOut.nCohInt = 1
111 111
112 112 self.dataOut.flagDecodeData = False #asumo que la data esta decodificada
113 113
114 114 self.dataOut.flagDeflipData = False #asumo que la data esta sin flip
115 115
116 116 self.dataOut.flagShiftFFT = False
117 117
118 118 self.dataOut.ippSeconds = ippSeconds
119 119
120 120 #Time interval between profiles
121 121 #self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
122 122
123 123 self.dataOut.frequency = self.__frequency
124 124
125 125 self.dataOut.realtime = self.__online
126 126
127 127 def findDatafiles(self, path, startDate=None, endDate=None):
128 128
129 129 if not os.path.isdir(path):
130 130 return []
131 131
132 132 try:
133 133 digitalReadObj = digital_rf_hdf5.read_hdf5(path, load_all_metadata=True)
134 134 except:
135 135 digitalReadObj = digital_rf_hdf5.read_hdf5(path)
136 136
137 137 channelNameList = digitalReadObj.get_channels()
138 138
139 139 if not channelNameList:
140 140 return []
141 141
142 142 metadata_dict = digitalReadObj.get_rf_file_metadata(channelNameList[0])
143 143
144 144 sample_rate = metadata_dict['sample_rate'][0]
145 145
146 146 this_metadata_file = digitalReadObj.get_metadata(channelNameList[0])
147 147
148 148 try:
149 149 timezone = this_metadata_file['timezone'].value
150 150 except:
151 151 timezone = 0
152 152
153 153 startUTCSecond, endUTCSecond = digitalReadObj.get_bounds(channelNameList[0])/sample_rate - timezone
154 154
155 155 startDatetime = datetime.datetime.utcfromtimestamp(startUTCSecond)
156 156 endDatatime = datetime.datetime.utcfromtimestamp(endUTCSecond)
157 157
158 158 if not startDate:
159 159 startDate = startDatetime.date()
160 160
161 161 if not endDate:
162 162 endDate = endDatatime.date()
163 163
164 164 dateList = []
165 165
166 166 thisDatetime = startDatetime
167 167
168 168 while(thisDatetime<=endDatatime):
169 169
170 170 thisDate = thisDatetime.date()
171 171
172 172 if thisDate < startDate:
173 173 continue
174 174
175 175 if thisDate > endDate:
176 176 break
177 177
178 178 dateList.append(thisDate)
179 179 thisDatetime += datetime.timedelta(1)
180 180
181 181 return dateList
182 182
183 183 def setup(self, path = None,
184 184 startDate = None,
185 185 endDate = None,
186 186 startTime = datetime.time(0,0,0),
187 187 endTime = datetime.time(23,59,59),
188 188 channelList = None,
189 189 nSamples = None,
190 190 ippKm = 60,
191 191 online = False,
192 192 delay = 60,
193 193 buffer_size = 1024,
194 194 **kwargs):
195 195 '''
196 196 In this method we should set all initial parameters.
197 197
198 198 Inputs:
199 199 path
200 200 startDate
201 201 endDate
202 202 startTime
203 203 endTime
204 204 set
205 205 expLabel
206 206 ext
207 207 online
208 208 delay
209 209 '''
210 210
211 211 if not os.path.isdir(path):
212 212 raise ValueError("[Reading] Directory %s does not exist" %path)
213 213
214 214 try:
215 215 self.digitalReadObj = digital_rf_hdf5.read_hdf5(path, load_all_metadata=True)
216 216 except:
217 217 self.digitalReadObj = digital_rf_hdf5.read_hdf5(path)
218 218
219 219 channelNameList = self.digitalReadObj.get_channels()
220 220
221 221 if not channelNameList:
222 222 raise ValueError("[Reading] Directory %s does not have any files" %path)
223 223
224 224 if not channelList:
225 225 channelList = list(range(len(channelNameList)))
226 226
227 227 ########## Reading metadata ######################
228 228
229 229 metadata_dict = self.digitalReadObj.get_rf_file_metadata(channelNameList[channelList[0]])
230 230
231 231 self.__sample_rate = metadata_dict['sample_rate'][0]
232 232 # self.__samples_per_file = metadata_dict['samples_per_file'][0]
233 233 self.__deltaHeigth = 1e6*0.15/self.__sample_rate
234 234
235 235 this_metadata_file = self.digitalReadObj.get_metadata(channelNameList[channelList[0]])
236 236
237 237 self.__frequency = None
238 238 try:
239 239 self.__frequency = this_metadata_file['center_frequencies'].value
240 240 except:
241 241 self.__frequency = this_metadata_file['fc'].value
242 242
243 243 if not self.__frequency:
244 244 raise ValueError("Center Frequency is not defined in metadata file")
245 245
246 246 try:
247 247 self.__timezone = this_metadata_file['timezone'].value
248 248 except:
249 249 self.__timezone = 0
250 250
251 251 self.__firstHeigth = 0
252 252
253 253 try:
254 254 codeType = this_metadata_file['codeType'].value
255 255 except:
256 256 codeType = 0
257 257
258 258 nCode = 1
259 259 nBaud = 1
260 260 code = numpy.ones((nCode, nBaud), dtype=numpy.int)
261 261
262 262 if codeType:
263 263 nCode = this_metadata_file['nCode'].value
264 264 nBaud = this_metadata_file['nBaud'].value
265 265 code = this_metadata_file['code'].value
266 266
267 267 if not ippKm:
268 268 try:
269 269 #seconds to km
270 270 ippKm = 1e6*0.15*this_metadata_file['ipp'].value
271 271 except:
272 272 ippKm = None
273 273
274 274 ####################################################
275 275 startUTCSecond = None
276 276 endUTCSecond = None
277 277
278 278 if startDate:
279 279 startDatetime = datetime.datetime.combine(startDate, startTime)
280 280 startUTCSecond = (startDatetime-datetime.datetime(1970,1,1)).total_seconds() + self.__timezone
281 281
282 282 if endDate:
283 283 endDatetime = datetime.datetime.combine(endDate, endTime)
284 284 endUTCSecond = (endDatetime-datetime.datetime(1970,1,1)).total_seconds() + self.__timezone
285 285
286 286 start_index, end_index = self.digitalReadObj.get_bounds(channelNameList[channelList[0]])
287 287
288 288 if not startUTCSecond:
289 289 startUTCSecond = start_index/self.__sample_rate
290 290
291 291 if start_index > startUTCSecond*self.__sample_rate:
292 292 startUTCSecond = start_index/self.__sample_rate
293 293
294 294 if not endUTCSecond:
295 295 endUTCSecond = end_index/self.__sample_rate
296 296
297 297 if end_index < endUTCSecond*self.__sample_rate:
298 298 endUTCSecond = end_index/self.__sample_rate
299 299
300 300 if not nSamples:
301 301 if not ippKm:
302 302 raise ValueError("[Reading] nSamples or ippKm should be defined")
303 303
304 304 nSamples = int(ippKm / (1e6*0.15/self.__sample_rate))
305 305
306 306 channelBoundList = []
307 307 channelNameListFiltered = []
308 308
309 309 for thisIndexChannel in channelList:
310 310 thisChannelName = channelNameList[thisIndexChannel]
311 311 start_index, end_index = self.digitalReadObj.get_bounds(thisChannelName)
312 312 channelBoundList.append((start_index, end_index))
313 313 channelNameListFiltered.append(thisChannelName)
314 314
315 315 self.profileIndex = 0
316 316
317 317 self.__delay = delay
318 318 self.__ippKm = ippKm
319 319 self.__codeType = codeType
320 320 self.__nCode = nCode
321 321 self.__nBaud = nBaud
322 322 self.__code = code
323 323
324 324 self.__datapath = path
325 325 self.__online = online
326 326 self.__channelList = channelList
327 327 self.__channelNameList = channelNameListFiltered
328 328 self.__channelBoundList = channelBoundList
329 329 self.__nSamples = nSamples
330 330 self.__samples_to_read = int(buffer_size*nSamples)
331 331 self.__nChannels = len(self.__channelList)
332 332
333 333 self.__startUTCSecond = startUTCSecond
334 334 self.__endUTCSecond = endUTCSecond
335 335
336 336 self.__timeInterval = 1.0 * self.__samples_to_read/self.__sample_rate #Time interval
337 337
338 338 if online:
339 339 # self.__thisUnixSample = int(endUTCSecond*self.__sample_rate - 4*self.__samples_to_read)
340 340 startUTCSecond = numpy.floor(endUTCSecond)
341 341
342 342 self.__thisUnixSample = int(startUTCSecond*self.__sample_rate) - self.__samples_to_read
343 343
344 344 self.__data_buffer = numpy.zeros((self.__nChannels, self.__samples_to_read), dtype = numpy.complex)
345 345
346 346 self.__setFileHeader()
347 347 self.isConfig = True
348 348
349 349 print("[Reading] USRP Data was found from %s to %s " %(
350 350 datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
351 351 datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
352 352 ))
353 353
354 354 print("[Reading] Starting process from %s to %s" %(datetime.datetime.utcfromtimestamp(startUTCSecond - self.__timezone),
355 355 datetime.datetime.utcfromtimestamp(endUTCSecond - self.__timezone)
356 356 ))
357 357
358 358 def __reload(self):
359 359
360 360 if not self.__online:
361 361 return
362 362
363 363 # print
364 364 # print "%s not in range [%s, %s]" %(
365 365 # datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
366 366 # datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
367 367 # datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
368 368 # )
369 369 print("[Reading] reloading metadata ...")
370 370
371 371 try:
372 372 self.digitalReadObj.reload(complete_update=True)
373 373 except:
374 374 self.digitalReadObj.reload()
375 375
376 376 start_index, end_index = self.digitalReadObj.get_bounds(self.__channelNameList[self.__channelList[0]])
377 377
378 378 if start_index > self.__startUTCSecond*self.__sample_rate:
379 379 self.__startUTCSecond = 1.0*start_index/self.__sample_rate
380 380
381 381 if end_index > self.__endUTCSecond*self.__sample_rate:
382 382 self.__endUTCSecond = 1.0*end_index/self.__sample_rate
383 383 print()
384 384 print("[Reading] New timerange found [%s, %s] " %(
385 385 datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
386 386 datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
387 387 ))
388 388
389 389 return True
390 390
391 391 return False
392 392
393 393 def __readNextBlock(self, seconds=30, volt_scale = 218776):
394 394 '''
395 395 '''
396 396
397 397 #Set the next data
398 398 self.__flagDiscontinuousBlock = False
399 399 self.__thisUnixSample += self.__samples_to_read
400 400
401 401 if self.__thisUnixSample + 2*self.__samples_to_read > self.__endUTCSecond*self.__sample_rate:
402 402 print("[Reading] There are no more data into selected time-range")
403 403
404 404 self.__reload()
405 405
406 406 if self.__thisUnixSample + 2*self.__samples_to_read > self.__endUTCSecond*self.__sample_rate:
407 407 self.__thisUnixSample -= self.__samples_to_read
408 408 return False
409 409
410 410 indexChannel = 0
411 411
412 412 dataOk = False
413 413
414 414 for thisChannelName in self.__channelNameList:
415 415
416 416 try:
417 417 result = self.digitalReadObj.read_vector_c81d(self.__thisUnixSample,
418 418 self.__samples_to_read,
419 419 thisChannelName)
420 420
421 421 except IOError as e:
422 422 #read next profile
423 423 self.__flagDiscontinuousBlock = True
424 424 print("[Reading] %s" %datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), e)
425 425 break
426 426
427 427 if result.shape[0] != self.__samples_to_read:
428 428 self.__flagDiscontinuousBlock = True
429 429 print("[Reading] %s: Too few samples were found, just %d/%d samples" %(datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
430 430 result.shape[0],
431 431 self.__samples_to_read))
432 432 break
433 433
434 434 self.__data_buffer[indexChannel,:] = result*volt_scale
435 435
436 436 indexChannel += 1
437 437
438 438 dataOk = True
439 439
440 440 self.__utctime = self.__thisUnixSample/self.__sample_rate
441 441
442 442 if not dataOk:
443 443 return False
444 444
445 445 print("[Reading] %s: %d samples <> %f sec" %(datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
446 446 self.__samples_to_read,
447 447 self.__timeInterval))
448 448
449 449 self.__bufferIndex = 0
450 450
451 451 return True
452 452
453 453 def __isBufferEmpty(self):
454 454
455 455 if self.__bufferIndex <= self.__samples_to_read - self.__nSamples:
456 456 return False
457 457
458 458 return True
459 459
460 460 def getData(self, seconds=30, nTries=5):
461 461
462 462 '''
463 463 This method gets the data from files and put the data into the dataOut object
464 464
465 465 In addition, increase el the buffer counter in one.
466 466
467 467 Return:
468 468 data : retorna un perfil de voltages (alturas * canales) copiados desde el
469 469 buffer. Si no hay mas archivos a leer retorna None.
470 470
471 471 Affected:
472 472 self.dataOut
473 473 self.profileIndex
474 474 self.flagDiscontinuousBlock
475 475 self.flagIsNewBlock
476 476 '''
477 477
478 478 err_counter = 0
479 479 self.dataOut.flagNoData = True
480 480
481 481 if self.__isBufferEmpty():
482 482
483 483 self.__flagDiscontinuousBlock = False
484 484
485 485 while True:
486 486 if self.__readNextBlock():
487 487 break
488 488
489 489 if self.__thisUnixSample > self.__endUTCSecond*self.__sample_rate:
490 490 return False
491 491
492 492 if self.__flagDiscontinuousBlock:
493 493 print('[Reading] discontinuous block found ... continue with the next block')
494 494 continue
495 495
496 496 if not self.__online:
497 497 return False
498 498
499 499 err_counter += 1
500 500 if err_counter > nTries:
501 501 return False
502 502
503 503 print('[Reading] waiting %d seconds to read a new block' %seconds)
504 504 sleep(seconds)
505 505
506 506 self.dataOut.data = self.__data_buffer[:,self.__bufferIndex:self.__bufferIndex+self.__nSamples]
507 507 self.dataOut.utctime = (self.__thisUnixSample + self.__bufferIndex)/self.__sample_rate
508 508 self.dataOut.flagNoData = False
509 509 self.dataOut.flagDiscontinuousBlock = self.__flagDiscontinuousBlock
510 510 self.dataOut.profileIndex = self.profileIndex
511 511
512 512 self.__bufferIndex += self.__nSamples
513 513 self.profileIndex += 1
514 514
515 515 if self.profileIndex == self.dataOut.nProfiles:
516 516 self.profileIndex = 0
517 517
518 518 return True
519 519
520 520 def printInfo(self):
521 521 '''
522 522 '''
523 523 if self.__printInfo == False:
524 524 return
525 525
526 526 # self.systemHeaderObj.printInfo()
527 527 # self.radarControllerHeaderObj.printInfo()
528 528
529 529 self.__printInfo = False
530 530
531 531 def printNumberOfBlock(self):
532 532 '''
533 533 '''
534 534
535 535 print(self.profileIndex)
536 536
537 537 def run(self, **kwargs):
538 538 '''
539 539 This method will be called many times so here you should put all your code
540 540 '''
541 541
542 542 if not self.isConfig:
543 543 self.setup(**kwargs)
544 544
545 545 self.getData(seconds=self.__delay)
546 546
547 547 return
548 548
549 549
550 550 @MPDecorator
551 551 class USRPWriter(Operation):
552 552 '''
553 553 classdocs
554 554 '''
555 555
556 556 def __init__(self, **kwargs):
557 557 '''
558 558 Constructor
559 559 '''
560 560 Operation.__init__(self, **kwargs)
561 561 self.dataOut = None
562 562
563 563 def setup(self, dataIn, path, blocksPerFile, set=0, ext=None):
564 564 '''
565 565 In this method we should set all initial parameters.
566 566
567 567 Input:
568 568 dataIn : Input data will also be outputa data
569 569
570 570 '''
571 571 self.dataOut = dataIn
572 572
573 573
574 574
575 575
576 576
577 577 self.isConfig = True
578 578
579 579 return
580 580
581 581 def run(self, dataIn, **kwargs):
582 582 '''
583 583 This method will be called many times so here you should put all your code
584 584
585 585 Inputs:
586 586
587 587 dataIn : object with the data
588 588
589 589 '''
590 590
591 591 if not self.isConfig:
592 592 self.setup(dataIn, **kwargs)
593 593
594 594
595 595 if __name__ == '__main__':
596 596
597 597 readObj = USRPReader()
598 598
599 599 while True:
600 600 readObj.run(path='/Volumes/DATA/haystack/passive_radar/')
601 601 # readObj.printInfo()
602 602 readObj.printNumberOfBlock() No newline at end of file
@@ -1,350 +1,347
1 1 import numpy
2 2
3 3 from .jroproc_base import ProcessingUnit, Operation, MPDecorator
4 4 from schainpy.model.data.jrodata import SpectraHeis
5 5 from schainpy.utils import log
6 6
7 7
8 8
9 9 class SpectraHeisProc(ProcessingUnit):
10 10
11 11 def __init__(self):#, **kwargs):
12 12
13 13 ProcessingUnit.__init__(self)#, **kwargs)
14 14
15 15 # self.buffer = None
16 16 # self.firstdatatime = None
17 17 # self.profIndex = 0
18 18 self.dataOut = SpectraHeis()
19 19
20 20 def __updateObjFromVoltage(self):
21 21
22 22 self.dataOut.timeZone = self.dataIn.timeZone
23 23 self.dataOut.dstFlag = self.dataIn.dstFlag
24 24 self.dataOut.errorCount = self.dataIn.errorCount
25 25 self.dataOut.useLocalTime = self.dataIn.useLocalTime
26 26
27 27 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()#
28 28 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()#
29 29 self.dataOut.channelList = self.dataIn.channelList
30 30 self.dataOut.heightList = self.dataIn.heightList
31 31 # self.dataOut.dtype = self.dataIn.dtype
32 32 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
33 33 # self.dataOut.nHeights = self.dataIn.nHeights
34 34 # self.dataOut.nChannels = self.dataIn.nChannels
35 35 self.dataOut.nBaud = self.dataIn.nBaud
36 36 self.dataOut.nCode = self.dataIn.nCode
37 37 self.dataOut.code = self.dataIn.code
38 38 # self.dataOut.nProfiles = 1
39 39 self.dataOut.ippFactor = 1
40 40 self.dataOut.noise_estimation = None
41 41 # self.dataOut.nProfiles = self.dataOut.nFFTPoints
42 42 self.dataOut.nFFTPoints = self.dataIn.nHeights
43 43 # self.dataOut.channelIndexList = self.dataIn.channelIndexList
44 44 # self.dataOut.flagNoData = self.dataIn.flagNoData
45 45 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
46 46 self.dataOut.utctime = self.dataIn.utctime
47 47 # self.dataOut.utctime = self.firstdatatime
48 48 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
49 49 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
50 50 # self.dataOut.flagShiftFFT = self.dataIn.flagShiftFFT
51 51 self.dataOut.nCohInt = self.dataIn.nCohInt
52 52 self.dataOut.nIncohInt = 1
53 53 # self.dataOut.ippSeconds= self.dataIn.ippSeconds
54 54 self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
55 55
56 56 # self.dataOut.timeInterval = self.dataIn.timeInterval*self.dataOut.nIncohInt
57 57 # self.dataOut.set=self.dataIn.set
58 58 # self.dataOut.deltaHeight=self.dataIn.deltaHeight
59 59
60 60
61 61 def __updateObjFromFits(self):
62 62
63 63 self.dataOut.utctime = self.dataIn.utctime
64 64 # self.dataOut.channelIndexList = self.dataIn.channelIndexList
65 65
66 66 self.dataOut.channelList = self.dataIn.channelList
67 67 self.dataOut.heightList = self.dataIn.heightList
68 68 self.dataOut.data_spc = self.dataIn.data
69 69 self.dataOut.ippSeconds = self.dataIn.ippSeconds
70 70 self.dataOut.nCohInt = self.dataIn.nCohInt
71 71 self.dataOut.nIncohInt = self.dataIn.nIncohInt
72 72 # self.dataOut.timeInterval = self.dataIn.timeInterval
73 73 self.dataOut.timeZone = self.dataIn.timeZone
74 74 self.dataOut.useLocalTime = True
75 75 # self.dataOut.
76 76 # self.dataOut.
77 77
78 78 def __getFft(self):
79 79
80 80 fft_volt = numpy.fft.fft(self.dataIn.data, axis=1)
81 81 fft_volt = numpy.fft.fftshift(fft_volt,axes=(1,))
82 82 spc = numpy.abs(fft_volt * numpy.conjugate(fft_volt))/(self.dataOut.nFFTPoints)
83 83 self.dataOut.data_spc = spc
84 84
85 85 def run(self):
86 86
87 87 self.dataOut.flagNoData = True
88 88
89 89 if self.dataIn.type == "Fits":
90 90 self.__updateObjFromFits()
91 91 self.dataOut.flagNoData = False
92 92 return
93 93
94 94 if self.dataIn.type == "SpectraHeis":
95 95 self.dataOut.copy(self.dataIn)
96 96 return
97 97
98 98 if self.dataIn.type == "Voltage":
99 99 self.__updateObjFromVoltage()
100 100 self.__getFft()
101 101 self.dataOut.flagNoData = False
102 102
103 103 return
104 104
105 105 raise ValueError("The type object %s is not valid"%(self.dataIn.type))
106 106
107 107
108 108 def selectChannels(self, channelList):
109 109
110 110 channelIndexList = []
111 111
112 112 for channel in channelList:
113 113 index = self.dataOut.channelList.index(channel)
114 114 channelIndexList.append(index)
115 115
116 116 self.selectChannelsByIndex(channelIndexList)
117 117
118 118 def selectChannelsByIndex(self, channelIndexList):
119 119 """
120 120 Selecciona un bloque de datos en base a canales segun el channelIndexList
121 121
122 122 Input:
123 123 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
124 124
125 125 Affected:
126 126 self.dataOut.data
127 127 self.dataOut.channelIndexList
128 128 self.dataOut.nChannels
129 129 self.dataOut.m_ProcessingHeader.totalSpectra
130 130 self.dataOut.systemHeaderObj.numChannels
131 131 self.dataOut.m_ProcessingHeader.blockSize
132 132
133 133 Return:
134 134 None
135 135 """
136 136
137 137 for channelIndex in channelIndexList:
138 138 if channelIndex not in self.dataOut.channelIndexList:
139 print(channelIndexList)
140 139 raise ValueError("The value %d in channelIndexList is not valid" %channelIndex)
141 140
142 # nChannels = len(channelIndexList)
143
144 141 data_spc = self.dataOut.data_spc[channelIndexList,:]
145 142
146 143 self.dataOut.data_spc = data_spc
147 144 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
148 145
149 146 return 1
150 147
151 148
152 149 class IncohInt4SpectraHeis(Operation):
153 150
154 151 isConfig = False
155 152
156 153 __profIndex = 0
157 154 __withOverapping = False
158 155
159 156 __byTime = False
160 157 __initime = None
161 158 __lastdatatime = None
162 159 __integrationtime = None
163 160
164 161 __buffer = None
165 162
166 163 __dataReady = False
167 164
168 165 n = None
169 166
170 167 def __init__(self):#, **kwargs):
171 168
172 169 Operation.__init__(self)#, **kwargs)
173 170 # self.isConfig = False
174 171
175 172 def setup(self, n=None, timeInterval=None, overlapping=False):
176 173 """
177 174 Set the parameters of the integration class.
178 175
179 176 Inputs:
180 177
181 178 n : Number of coherent integrations
182 179 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
183 180 overlapping :
184 181
185 182 """
186 183
187 184 self.__initime = None
188 185 self.__lastdatatime = 0
189 186 self.__buffer = None
190 187 self.__dataReady = False
191 188
192 189
193 190 if n == None and timeInterval == None:
194 191 raise ValueError("n or timeInterval should be specified ...")
195 192
196 193 if n != None:
197 194 self.n = n
198 195 self.__byTime = False
199 196 else:
200 197 self.__integrationtime = timeInterval #* 60. #if (type(timeInterval)!=integer) -> change this line
201 198 self.n = 9999
202 199 self.__byTime = True
203 200
204 201 if overlapping:
205 202 self.__withOverapping = True
206 203 self.__buffer = None
207 204 else:
208 205 self.__withOverapping = False
209 206 self.__buffer = 0
210 207
211 208 self.__profIndex = 0
212 209
213 210 def putData(self, data):
214 211
215 212 """
216 213 Add a profile to the __buffer and increase in one the __profileIndex
217 214
218 215 """
219 216
220 217 if not self.__withOverapping:
221 218 self.__buffer += data.copy()
222 219 self.__profIndex += 1
223 220 return
224 221
225 222 #Overlapping data
226 223 nChannels, nHeis = data.shape
227 224 data = numpy.reshape(data, (1, nChannels, nHeis))
228 225
229 226 #If the buffer is empty then it takes the data value
230 227 if self.__buffer is None:
231 228 self.__buffer = data
232 229 self.__profIndex += 1
233 230 return
234 231
235 232 #If the buffer length is lower than n then stakcing the data value
236 233 if self.__profIndex < self.n:
237 234 self.__buffer = numpy.vstack((self.__buffer, data))
238 235 self.__profIndex += 1
239 236 return
240 237
241 238 #If the buffer length is equal to n then replacing the last buffer value with the data value
242 239 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
243 240 self.__buffer[self.n-1] = data
244 241 self.__profIndex = self.n
245 242 return
246 243
247 244
248 245 def pushData(self):
249 246 """
250 247 Return the sum of the last profiles and the profiles used in the sum.
251 248
252 249 Affected:
253 250
254 251 self.__profileIndex
255 252
256 253 """
257 254
258 255 if not self.__withOverapping:
259 256 data = self.__buffer
260 257 n = self.__profIndex
261 258
262 259 self.__buffer = 0
263 260 self.__profIndex = 0
264 261
265 262 return data, n
266 263
267 264 #Integration with Overlapping
268 265 data = numpy.sum(self.__buffer, axis=0)
269 266 n = self.__profIndex
270 267
271 268 return data, n
272 269
273 270 def byProfiles(self, data):
274 271
275 272 self.__dataReady = False
276 273 avgdata = None
277 274 # n = None
278 275
279 276 self.putData(data)
280 277
281 278 if self.__profIndex == self.n:
282 279
283 280 avgdata, n = self.pushData()
284 281 self.__dataReady = True
285 282
286 283 return avgdata
287 284
288 285 def byTime(self, data, datatime):
289 286
290 287 self.__dataReady = False
291 288 avgdata = None
292 289 n = None
293 290
294 291 self.putData(data)
295 292
296 293 if (datatime - self.__initime) >= self.__integrationtime:
297 294 avgdata, n = self.pushData()
298 295 self.n = n
299 296 self.__dataReady = True
300 297
301 298 return avgdata
302 299
303 300 def integrate(self, data, datatime=None):
304 301
305 302 if self.__initime == None:
306 303 self.__initime = datatime
307 304
308 305 if self.__byTime:
309 306 avgdata = self.byTime(data, datatime)
310 307 else:
311 308 avgdata = self.byProfiles(data)
312 309
313 310
314 311 self.__lastdatatime = datatime
315 312
316 313 if avgdata is None:
317 314 return None, None
318 315
319 316 avgdatatime = self.__initime
320 317
321 318 deltatime = datatime -self.__lastdatatime
322 319
323 320 if not self.__withOverapping:
324 321 self.__initime = datatime
325 322 else:
326 323 self.__initime += deltatime
327 324
328 325 return avgdata, avgdatatime
329 326
330 327 def run(self, dataOut, n=None, timeInterval=None, overlapping=False, **kwargs):
331 328
332 329 if not self.isConfig:
333 330 self.setup(n=n, timeInterval=timeInterval, overlapping=overlapping)
334 331 self.isConfig = True
335 332
336 333 avgdata, avgdatatime = self.integrate(dataOut.data_spc, dataOut.utctime)
337 334
338 335 # dataOut.timeInterval *= n
339 336 dataOut.flagNoData = True
340 337
341 338 if self.__dataReady:
342 339 dataOut.data_spc = avgdata
343 340 dataOut.nIncohInt *= self.n
344 341 # dataOut.nCohInt *= self.n
345 342 dataOut.utctime = avgdatatime
346 343 # dataOut.timeInterval = dataOut.ippSeconds * dataOut.nIncohInt
347 344 # dataOut.timeInterval = self.__timeInterval*self.n
348 345 dataOut.flagNoData = False
349 346
350 347 return dataOut No newline at end of file
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,882 +1,874
1 # Copyright (c) 2012-2020 Jicamarca Radio Observatory
2 # All rights reserved.
3 #
4 # Distributed under the terms of the BSD 3-clause license.
5 """Spectra processing Unit and operations
6
7 Here you will find the processing unit `SpectraProc` and several operations
8 to work with Spectra data type
9 """
10
1 11 import time
2 12 import itertools
3 13
4 14 import numpy
5 15
6 16 from schainpy.model.proc.jroproc_base import ProcessingUnit, MPDecorator, Operation
7 17 from schainpy.model.data.jrodata import Spectra
8 18 from schainpy.model.data.jrodata import hildebrand_sekhon
9 19 from schainpy.utils import log
10 20
11 21
12 22 class SpectraProc(ProcessingUnit):
13 23
14
15 24 def __init__(self):
16 25
17 26 ProcessingUnit.__init__(self)
18 27
19 28 self.buffer = None
20 29 self.firstdatatime = None
21 30 self.profIndex = 0
22 31 self.dataOut = Spectra()
23 32 self.id_min = None
24 33 self.id_max = None
25 34 self.setupReq = False #Agregar a todas las unidades de proc
26 35
27 36 def __updateSpecFromVoltage(self):
28 37
29 38 self.dataOut.timeZone = self.dataIn.timeZone
30 39 self.dataOut.dstFlag = self.dataIn.dstFlag
31 40 self.dataOut.errorCount = self.dataIn.errorCount
32 41 self.dataOut.useLocalTime = self.dataIn.useLocalTime
33 42 try:
34 43 self.dataOut.processingHeaderObj = self.dataIn.processingHeaderObj.copy()
35 44 except:
36 45 pass
37 46 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
47
38 48 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
39 49 self.dataOut.channelList = self.dataIn.channelList
40 50 self.dataOut.heightList = self.dataIn.heightList
41 51 self.dataOut.dtype = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
42
43 self.dataOut.nBaud = self.dataIn.nBaud
44 self.dataOut.nCode = self.dataIn.nCode
45 self.dataOut.code = self.dataIn.code
46 52 self.dataOut.nProfiles = self.dataOut.nFFTPoints
47
48 53 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
49 54 self.dataOut.utctime = self.firstdatatime
50 # asumo q la data esta decodificada
51 55 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData
52 # asumo q la data esta sin flip
53 56 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData
54 57 self.dataOut.flagShiftFFT = False
55
56 58 self.dataOut.nCohInt = self.dataIn.nCohInt
57 59 self.dataOut.nIncohInt = 1
58
59 60 self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
60
61 61 self.dataOut.frequency = self.dataIn.frequency
62 62 self.dataOut.realtime = self.dataIn.realtime
63
64 63 self.dataOut.azimuth = self.dataIn.azimuth
65 64 self.dataOut.zenith = self.dataIn.zenith
66
67 65 self.dataOut.beam.codeList = self.dataIn.beam.codeList
68 66 self.dataOut.beam.azimuthList = self.dataIn.beam.azimuthList
69 67 self.dataOut.beam.zenithList = self.dataIn.beam.zenithList
70 68
71 69 def __getFft(self):
72 70 """
73 71 Convierte valores de Voltaje a Spectra
74 72
75 73 Affected:
76 74 self.dataOut.data_spc
77 75 self.dataOut.data_cspc
78 76 self.dataOut.data_dc
79 77 self.dataOut.heightList
80 78 self.profIndex
81 79 self.buffer
82 80 self.dataOut.flagNoData
83 81 """
84 82 fft_volt = numpy.fft.fft(
85 83 self.buffer, n=self.dataOut.nFFTPoints, axis=1)
86 84 fft_volt = fft_volt.astype(numpy.dtype('complex'))
87 85 dc = fft_volt[:, 0, :]
88 86
89 87 # calculo de self-spectra
90 88 fft_volt = numpy.fft.fftshift(fft_volt, axes=(1,))
91 89 spc = fft_volt * numpy.conjugate(fft_volt)
92 90 spc = spc.real
93 91
94 92 blocksize = 0
95 93 blocksize += dc.size
96 94 blocksize += spc.size
97 95
98 96 cspc = None
99 97 pairIndex = 0
100 98 if self.dataOut.pairsList != None:
101 99 # calculo de cross-spectra
102 100 cspc = numpy.zeros(
103 101 (self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex')
104 102 for pair in self.dataOut.pairsList:
105 103 if pair[0] not in self.dataOut.channelList:
106 104 raise ValueError("Error getting CrossSpectra: pair 0 of %s is not in channelList = %s" % (
107 105 str(pair), str(self.dataOut.channelList)))
108 106 if pair[1] not in self.dataOut.channelList:
109 107 raise ValueError("Error getting CrossSpectra: pair 1 of %s is not in channelList = %s" % (
110 108 str(pair), str(self.dataOut.channelList)))
111 109
112 110 cspc[pairIndex, :, :] = fft_volt[pair[0], :, :] * \
113 111 numpy.conjugate(fft_volt[pair[1], :, :])
114 112 pairIndex += 1
115 113 blocksize += cspc.size
116 114
117 115 self.dataOut.data_spc = spc
118 116 self.dataOut.data_cspc = cspc
119 117 self.dataOut.data_dc = dc
120 118 self.dataOut.blockSize = blocksize
121 119 self.dataOut.flagShiftFFT = False
122 120
123 def run(self, nProfiles=None, nFFTPoints=None, pairsList=[], ippFactor=None, shift_fft=False):
121 def run(self, nProfiles=None, nFFTPoints=None, pairsList=None, ippFactor=None, shift_fft=False):
124 122
125 123 if self.dataIn.type == "Spectra":
126 124 self.dataOut.copy(self.dataIn)
127 125 if shift_fft:
128 126 #desplaza a la derecha en el eje 2 determinadas posiciones
129 127 shift = int(self.dataOut.nFFTPoints/2)
130 128 self.dataOut.data_spc = numpy.roll(self.dataOut.data_spc, shift , axis=1)
131 129
132 130 if self.dataOut.data_cspc is not None:
133 131 #desplaza a la derecha en el eje 2 determinadas posiciones
134 132 self.dataOut.data_cspc = numpy.roll(self.dataOut.data_cspc, shift, axis=1)
135 133
136 return True
137
138 if self.dataIn.type == "Voltage":
134 elif self.dataIn.type == "Voltage":
139 135
140 136 self.dataOut.flagNoData = True
141 137
142 138 if nFFTPoints == None:
143 139 raise ValueError("This SpectraProc.run() need nFFTPoints input variable")
144 140
145 141 if nProfiles == None:
146 142 nProfiles = nFFTPoints
147 143
148 144 if ippFactor == None:
149 ippFactor = 1
150
151 self.dataOut.ippFactor = ippFactor
152
145 self.dataOut.ippFactor = 1
146
153 147 self.dataOut.nFFTPoints = nFFTPoints
154 self.dataOut.pairsList = pairsList
155 148
156 149 if self.buffer is None:
157 150 self.buffer = numpy.zeros((self.dataIn.nChannels,
158 151 nProfiles,
159 152 self.dataIn.nHeights),
160 153 dtype='complex')
161 154
162 155 if self.dataIn.flagDataAsBlock:
163 156 nVoltProfiles = self.dataIn.data.shape[1]
164 157
165 158 if nVoltProfiles == nProfiles:
166 159 self.buffer = self.dataIn.data.copy()
167 160 self.profIndex = nVoltProfiles
168 161
169 162 elif nVoltProfiles < nProfiles:
170 163
171 164 if self.profIndex == 0:
172 165 self.id_min = 0
173 166 self.id_max = nVoltProfiles
174 167
175 168 self.buffer[:, self.id_min:self.id_max,
176 169 :] = self.dataIn.data
177 170 self.profIndex += nVoltProfiles
178 171 self.id_min += nVoltProfiles
179 172 self.id_max += nVoltProfiles
180 173 else:
181 174 raise ValueError("The type object %s has %d profiles, it should just has %d profiles" % (
182 175 self.dataIn.type, self.dataIn.data.shape[1], nProfiles))
183 176 self.dataOut.flagNoData = True
184 return 0
185 177 else:
186 178 self.buffer[:, self.profIndex, :] = self.dataIn.data.copy()
187 179 self.profIndex += 1
188 180
189 181 if self.firstdatatime == None:
190 182 self.firstdatatime = self.dataIn.utctime
191 183
192 184 if self.profIndex == nProfiles:
193 185 self.__updateSpecFromVoltage()
186 if pairsList == None:
187 self.dataOut.pairsList = [pair for pair in itertools.combinations(self.dataOut.channelList, 2)]
194 188 self.__getFft()
195 189
196 190 self.dataOut.flagNoData = False
197 191 self.firstdatatime = None
198 192 self.profIndex = 0
199
200 return True
201
202 raise ValueError("The type of input object '%s' is not valid" % (
203 self.dataIn.type))
193 else:
194 raise ValueError("The type of input object '%s' is not valid".format(
195 self.dataIn.type))
204 196
205 197 def __selectPairs(self, pairsList):
206 198
207 199 if not pairsList:
208 200 return
209 201
210 202 pairs = []
211 203 pairsIndex = []
212 204
213 205 for pair in pairsList:
214 206 if pair[0] not in self.dataOut.channelList or pair[1] not in self.dataOut.channelList:
215 207 continue
216 208 pairs.append(pair)
217 209 pairsIndex.append(pairs.index(pair))
218 210
219 211 self.dataOut.data_cspc = self.dataOut.data_cspc[pairsIndex]
220 212 self.dataOut.pairsList = pairs
221 213
222 214 return
223 215
224 216 def selectFFTs(self, minFFT, maxFFT ):
225 217 """
226 218 Selecciona un bloque de datos en base a un grupo de valores de puntos FFTs segun el rango
227 219 minFFT<= FFT <= maxFFT
228 220 """
229 221
230 222 if (minFFT > maxFFT):
231 223 raise ValueError("Error selecting heights: Height range (%d,%d) is not valid" % (minFFT, maxFFT))
232 224
233 225 if (minFFT < self.dataOut.getFreqRange()[0]):
234 226 minFFT = self.dataOut.getFreqRange()[0]
235 227
236 228 if (maxFFT > self.dataOut.getFreqRange()[-1]):
237 229 maxFFT = self.dataOut.getFreqRange()[-1]
238 230
239 231 minIndex = 0
240 232 maxIndex = 0
241 233 FFTs = self.dataOut.getFreqRange()
242 234
243 235 inda = numpy.where(FFTs >= minFFT)
244 236 indb = numpy.where(FFTs <= maxFFT)
245 237
246 238 try:
247 239 minIndex = inda[0][0]
248 240 except:
249 241 minIndex = 0
250 242
251 243 try:
252 244 maxIndex = indb[0][-1]
253 245 except:
254 246 maxIndex = len(FFTs)
255 247
256 248 self.selectFFTsByIndex(minIndex, maxIndex)
257 249
258 250 return 1
259 251
260 252 def getBeaconSignal(self, tauindex=0, channelindex=0, hei_ref=None):
261 253 newheis = numpy.where(
262 254 self.dataOut.heightList > self.dataOut.radarControllerHeaderObj.Taus[tauindex])
263 255
264 256 if hei_ref != None:
265 257 newheis = numpy.where(self.dataOut.heightList > hei_ref)
266 258
267 259 minIndex = min(newheis[0])
268 260 maxIndex = max(newheis[0])
269 261 data_spc = self.dataOut.data_spc[:, :, minIndex:maxIndex + 1]
270 262 heightList = self.dataOut.heightList[minIndex:maxIndex + 1]
271 263
272 264 # determina indices
273 265 nheis = int(self.dataOut.radarControllerHeaderObj.txB /
274 266 (self.dataOut.heightList[1] - self.dataOut.heightList[0]))
275 267 avg_dB = 10 * \
276 268 numpy.log10(numpy.sum(data_spc[channelindex, :, :], axis=0))
277 269 beacon_dB = numpy.sort(avg_dB)[-nheis:]
278 270 beacon_heiIndexList = []
279 271 for val in avg_dB.tolist():
280 272 if val >= beacon_dB[0]:
281 273 beacon_heiIndexList.append(avg_dB.tolist().index(val))
282 274
283 275 #data_spc = data_spc[:,:,beacon_heiIndexList]
284 276 data_cspc = None
285 277 if self.dataOut.data_cspc is not None:
286 278 data_cspc = self.dataOut.data_cspc[:, :, minIndex:maxIndex + 1]
287 279 #data_cspc = data_cspc[:,:,beacon_heiIndexList]
288 280
289 281 data_dc = None
290 282 if self.dataOut.data_dc is not None:
291 283 data_dc = self.dataOut.data_dc[:, minIndex:maxIndex + 1]
292 284 #data_dc = data_dc[:,beacon_heiIndexList]
293 285
294 286 self.dataOut.data_spc = data_spc
295 287 self.dataOut.data_cspc = data_cspc
296 288 self.dataOut.data_dc = data_dc
297 289 self.dataOut.heightList = heightList
298 290 self.dataOut.beacon_heiIndexList = beacon_heiIndexList
299 291
300 292 return 1
301 293
302 294 def selectFFTsByIndex(self, minIndex, maxIndex):
303 295 """
304 296
305 297 """
306 298
307 299 if (minIndex < 0) or (minIndex > maxIndex):
308 300 raise ValueError("Error selecting heights: Index range (%d,%d) is not valid" % (minIndex, maxIndex))
309 301
310 302 if (maxIndex >= self.dataOut.nProfiles):
311 303 maxIndex = self.dataOut.nProfiles-1
312 304
313 305 #Spectra
314 306 data_spc = self.dataOut.data_spc[:,minIndex:maxIndex+1,:]
315 307
316 308 data_cspc = None
317 309 if self.dataOut.data_cspc is not None:
318 310 data_cspc = self.dataOut.data_cspc[:,minIndex:maxIndex+1,:]
319 311
320 312 data_dc = None
321 313 if self.dataOut.data_dc is not None:
322 314 data_dc = self.dataOut.data_dc[minIndex:maxIndex+1,:]
323 315
324 316 self.dataOut.data_spc = data_spc
325 317 self.dataOut.data_cspc = data_cspc
326 318 self.dataOut.data_dc = data_dc
327 319
328 320 self.dataOut.ippSeconds = self.dataOut.ippSeconds*(self.dataOut.nFFTPoints / numpy.shape(data_cspc)[1])
329 321 self.dataOut.nFFTPoints = numpy.shape(data_cspc)[1]
330 322 self.dataOut.profilesPerBlock = numpy.shape(data_cspc)[1]
331 323
332 324 return 1
333 325
334 326 def getNoise(self, minHei=None, maxHei=None, minVel=None, maxVel=None):
335 327 # validacion de rango
336 328 if minHei == None:
337 329 minHei = self.dataOut.heightList[0]
338 330
339 331 if maxHei == None:
340 332 maxHei = self.dataOut.heightList[-1]
341 333
342 334 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
343 335 print('minHei: %.2f is out of the heights range' % (minHei))
344 336 print('minHei is setting to %.2f' % (self.dataOut.heightList[0]))
345 337 minHei = self.dataOut.heightList[0]
346 338
347 339 if (maxHei > self.dataOut.heightList[-1]) or (maxHei < minHei):
348 340 print('maxHei: %.2f is out of the heights range' % (maxHei))
349 341 print('maxHei is setting to %.2f' % (self.dataOut.heightList[-1]))
350 342 maxHei = self.dataOut.heightList[-1]
351 343
352 344 # validacion de velocidades
353 345 velrange = self.dataOut.getVelRange(1)
354 346
355 347 if minVel == None:
356 348 minVel = velrange[0]
357 349
358 350 if maxVel == None:
359 351 maxVel = velrange[-1]
360 352
361 353 if (minVel < velrange[0]) or (minVel > maxVel):
362 354 print('minVel: %.2f is out of the velocity range' % (minVel))
363 355 print('minVel is setting to %.2f' % (velrange[0]))
364 356 minVel = velrange[0]
365 357
366 358 if (maxVel > velrange[-1]) or (maxVel < minVel):
367 359 print('maxVel: %.2f is out of the velocity range' % (maxVel))
368 360 print('maxVel is setting to %.2f' % (velrange[-1]))
369 361 maxVel = velrange[-1]
370 362
371 363 # seleccion de indices para rango
372 364 minIndex = 0
373 365 maxIndex = 0
374 366 heights = self.dataOut.heightList
375 367
376 368 inda = numpy.where(heights >= minHei)
377 369 indb = numpy.where(heights <= maxHei)
378 370
379 371 try:
380 372 minIndex = inda[0][0]
381 373 except:
382 374 minIndex = 0
383 375
384 376 try:
385 377 maxIndex = indb[0][-1]
386 378 except:
387 379 maxIndex = len(heights)
388 380
389 381 if (minIndex < 0) or (minIndex > maxIndex):
390 382 raise ValueError("some value in (%d,%d) is not valid" % (
391 383 minIndex, maxIndex))
392 384
393 385 if (maxIndex >= self.dataOut.nHeights):
394 386 maxIndex = self.dataOut.nHeights - 1
395 387
396 388 # seleccion de indices para velocidades
397 389 indminvel = numpy.where(velrange >= minVel)
398 390 indmaxvel = numpy.where(velrange <= maxVel)
399 391 try:
400 392 minIndexVel = indminvel[0][0]
401 393 except:
402 394 minIndexVel = 0
403 395
404 396 try:
405 397 maxIndexVel = indmaxvel[0][-1]
406 398 except:
407 399 maxIndexVel = len(velrange)
408 400
409 401 # seleccion del espectro
410 402 data_spc = self.dataOut.data_spc[:,
411 403 minIndexVel:maxIndexVel + 1, minIndex:maxIndex + 1]
412 404 # estimacion de ruido
413 405 noise = numpy.zeros(self.dataOut.nChannels)
414 406
415 407 for channel in range(self.dataOut.nChannels):
416 408 daux = data_spc[channel, :, :]
417 409 sortdata = numpy.sort(daux, axis=None)
418 410 noise[channel] = hildebrand_sekhon(sortdata, self.dataOut.nIncohInt)
419 411
420 412 self.dataOut.noise_estimation = noise.copy()
421 413
422 414 return 1
423 415
424 416 class removeDC(Operation):
425 417
426 418 def run(self, dataOut, mode=2):
427 419 self.dataOut = dataOut
428 420 jspectra = self.dataOut.data_spc
429 421 jcspectra = self.dataOut.data_cspc
430 422
431 423 num_chan = jspectra.shape[0]
432 424 num_hei = jspectra.shape[2]
433 425
434 426 if jcspectra is not None:
435 427 jcspectraExist = True
436 428 num_pairs = jcspectra.shape[0]
437 429 else:
438 430 jcspectraExist = False
439 431
440 432 freq_dc = int(jspectra.shape[1] / 2)
441 433 ind_vel = numpy.array([-2, -1, 1, 2]) + freq_dc
442 434 ind_vel = ind_vel.astype(int)
443 435
444 436 if ind_vel[0] < 0:
445 437 ind_vel[list(range(0, 1))] = ind_vel[list(range(0, 1))] + self.num_prof
446 438
447 439 if mode == 1:
448 440 jspectra[:, freq_dc, :] = (
449 441 jspectra[:, ind_vel[1], :] + jspectra[:, ind_vel[2], :]) / 2 # CORRECCION
450 442
451 443 if jcspectraExist:
452 444 jcspectra[:, freq_dc, :] = (
453 445 jcspectra[:, ind_vel[1], :] + jcspectra[:, ind_vel[2], :]) / 2
454 446
455 447 if mode == 2:
456 448
457 449 vel = numpy.array([-2, -1, 1, 2])
458 450 xx = numpy.zeros([4, 4])
459 451
460 452 for fil in range(4):
461 453 xx[fil, :] = vel[fil]**numpy.asarray(list(range(4)))
462 454
463 455 xx_inv = numpy.linalg.inv(xx)
464 456 xx_aux = xx_inv[0, :]
465 457
466 458 for ich in range(num_chan):
467 459 yy = jspectra[ich, ind_vel, :]
468 460 jspectra[ich, freq_dc, :] = numpy.dot(xx_aux, yy)
469 461
470 462 junkid = jspectra[ich, freq_dc, :] <= 0
471 463 cjunkid = sum(junkid)
472 464
473 465 if cjunkid.any():
474 466 jspectra[ich, freq_dc, junkid.nonzero()] = (
475 467 jspectra[ich, ind_vel[1], junkid] + jspectra[ich, ind_vel[2], junkid]) / 2
476 468
477 469 if jcspectraExist:
478 470 for ip in range(num_pairs):
479 471 yy = jcspectra[ip, ind_vel, :]
480 472 jcspectra[ip, freq_dc, :] = numpy.dot(xx_aux, yy)
481 473
482 474 self.dataOut.data_spc = jspectra
483 475 self.dataOut.data_cspc = jcspectra
484 476
485 477 return self.dataOut
486 478
487 479 class removeInterference(Operation):
488 480
489 481 def removeInterference2(self):
490 482
491 483 cspc = self.dataOut.data_cspc
492 484 spc = self.dataOut.data_spc
493 485 Heights = numpy.arange(cspc.shape[2])
494 486 realCspc = numpy.abs(cspc)
495 487
496 488 for i in range(cspc.shape[0]):
497 489 LinePower= numpy.sum(realCspc[i], axis=0)
498 490 Threshold = numpy.amax(LinePower)-numpy.sort(LinePower)[len(Heights)-int(len(Heights)*0.1)]
499 491 SelectedHeights = Heights[ numpy.where( LinePower < Threshold ) ]
500 492 InterferenceSum = numpy.sum( realCspc[i,:,SelectedHeights], axis=0 )
501 493 InterferenceThresholdMin = numpy.sort(InterferenceSum)[int(len(InterferenceSum)*0.98)]
502 494 InterferenceThresholdMax = numpy.sort(InterferenceSum)[int(len(InterferenceSum)*0.99)]
503 495
504 496
505 497 InterferenceRange = numpy.where( ([InterferenceSum > InterferenceThresholdMin]))# , InterferenceSum < InterferenceThresholdMax]) )
506 498 #InterferenceRange = numpy.where( ([InterferenceRange < InterferenceThresholdMax]))
507 499 if len(InterferenceRange)<int(cspc.shape[1]*0.3):
508 500 cspc[i,InterferenceRange,:] = numpy.NaN
509 501
510 502 self.dataOut.data_cspc = cspc
511 503
512 504 def removeInterference(self, interf = 2, hei_interf = None, nhei_interf = None, offhei_interf = None):
513 505
514 506 jspectra = self.dataOut.data_spc
515 507 jcspectra = self.dataOut.data_cspc
516 508 jnoise = self.dataOut.getNoise()
517 509 num_incoh = self.dataOut.nIncohInt
518 510
519 511 num_channel = jspectra.shape[0]
520 512 num_prof = jspectra.shape[1]
521 513 num_hei = jspectra.shape[2]
522 514
523 515 # hei_interf
524 516 if hei_interf is None:
525 517 count_hei = int(num_hei / 2)
526 518 hei_interf = numpy.asmatrix(list(range(count_hei))) + num_hei - count_hei
527 519 hei_interf = numpy.asarray(hei_interf)[0]
528 520 # nhei_interf
529 521 if (nhei_interf == None):
530 522 nhei_interf = 5
531 523 if (nhei_interf < 1):
532 524 nhei_interf = 1
533 525 if (nhei_interf > count_hei):
534 526 nhei_interf = count_hei
535 527 if (offhei_interf == None):
536 528 offhei_interf = 0
537 529
538 530 ind_hei = list(range(num_hei))
539 531 # mask_prof = numpy.asarray(range(num_prof - 2)) + 1
540 532 # mask_prof[range(num_prof/2 - 1,len(mask_prof))] += 1
541 533 mask_prof = numpy.asarray(list(range(num_prof)))
542 534 num_mask_prof = mask_prof.size
543 535 comp_mask_prof = [0, num_prof / 2]
544 536
545 537 # noise_exist: Determina si la variable jnoise ha sido definida y contiene la informacion del ruido de cada canal
546 538 if (jnoise.size < num_channel or numpy.isnan(jnoise).any()):
547 539 jnoise = numpy.nan
548 540 noise_exist = jnoise[0] < numpy.Inf
549 541
550 542 # Subrutina de Remocion de la Interferencia
551 543 for ich in range(num_channel):
552 544 # Se ordena los espectros segun su potencia (menor a mayor)
553 545 power = jspectra[ich, mask_prof, :]
554 546 power = power[:, hei_interf]
555 547 power = power.sum(axis=0)
556 548 psort = power.ravel().argsort()
557 549
558 550 # Se estima la interferencia promedio en los Espectros de Potencia empleando
559 551 junkspc_interf = jspectra[ich, :, hei_interf[psort[list(range(
560 552 offhei_interf, nhei_interf + offhei_interf))]]]
561 553
562 554 if noise_exist:
563 555 # tmp_noise = jnoise[ich] / num_prof
564 556 tmp_noise = jnoise[ich]
565 557 junkspc_interf = junkspc_interf - tmp_noise
566 558 #junkspc_interf[:,comp_mask_prof] = 0
567 559
568 560 jspc_interf = junkspc_interf.sum(axis=0) / nhei_interf
569 561 jspc_interf = jspc_interf.transpose()
570 562 # Calculando el espectro de interferencia promedio
571 563 noiseid = numpy.where(
572 564 jspc_interf <= tmp_noise / numpy.sqrt(num_incoh))
573 565 noiseid = noiseid[0]
574 566 cnoiseid = noiseid.size
575 567 interfid = numpy.where(
576 568 jspc_interf > tmp_noise / numpy.sqrt(num_incoh))
577 569 interfid = interfid[0]
578 570 cinterfid = interfid.size
579 571
580 572 if (cnoiseid > 0):
581 573 jspc_interf[noiseid] = 0
582 574
583 575 # Expandiendo los perfiles a limpiar
584 576 if (cinterfid > 0):
585 577 new_interfid = (
586 578 numpy.r_[interfid - 1, interfid, interfid + 1] + num_prof) % num_prof
587 579 new_interfid = numpy.asarray(new_interfid)
588 580 new_interfid = {x for x in new_interfid}
589 581 new_interfid = numpy.array(list(new_interfid))
590 582 new_cinterfid = new_interfid.size
591 583 else:
592 584 new_cinterfid = 0
593 585
594 586 for ip in range(new_cinterfid):
595 587 ind = junkspc_interf[:, new_interfid[ip]].ravel().argsort()
596 588 jspc_interf[new_interfid[ip]
597 589 ] = junkspc_interf[ind[nhei_interf // 2], new_interfid[ip]]
598 590
599 591 jspectra[ich, :, ind_hei] = jspectra[ich, :,
600 592 ind_hei] - jspc_interf # Corregir indices
601 593
602 594 # Removiendo la interferencia del punto de mayor interferencia
603 595 ListAux = jspc_interf[mask_prof].tolist()
604 596 maxid = ListAux.index(max(ListAux))
605 597
606 598 if cinterfid > 0:
607 599 for ip in range(cinterfid * (interf == 2) - 1):
608 600 ind = (jspectra[ich, interfid[ip], :] < tmp_noise *
609 601 (1 + 1 / numpy.sqrt(num_incoh))).nonzero()
610 602 cind = len(ind)
611 603
612 604 if (cind > 0):
613 605 jspectra[ich, interfid[ip], ind] = tmp_noise * \
614 606 (1 + (numpy.random.uniform(cind) - 0.5) /
615 607 numpy.sqrt(num_incoh))
616 608
617 609 ind = numpy.array([-2, -1, 1, 2])
618 610 xx = numpy.zeros([4, 4])
619 611
620 612 for id1 in range(4):
621 613 xx[:, id1] = ind[id1]**numpy.asarray(list(range(4)))
622 614
623 615 xx_inv = numpy.linalg.inv(xx)
624 616 xx = xx_inv[:, 0]
625 617 ind = (ind + maxid + num_mask_prof) % num_mask_prof
626 618 yy = jspectra[ich, mask_prof[ind], :]
627 619 jspectra[ich, mask_prof[maxid], :] = numpy.dot(
628 620 yy.transpose(), xx)
629 621
630 622 indAux = (jspectra[ich, :, :] < tmp_noise *
631 623 (1 - 1 / numpy.sqrt(num_incoh))).nonzero()
632 624 jspectra[ich, indAux[0], indAux[1]] = tmp_noise * \
633 625 (1 - 1 / numpy.sqrt(num_incoh))
634 626
635 627 # Remocion de Interferencia en el Cross Spectra
636 628 if jcspectra is None:
637 629 return jspectra, jcspectra
638 630 num_pairs = int(jcspectra.size / (num_prof * num_hei))
639 631 jcspectra = jcspectra.reshape(num_pairs, num_prof, num_hei)
640 632
641 633 for ip in range(num_pairs):
642 634
643 635 #-------------------------------------------
644 636
645 637 cspower = numpy.abs(jcspectra[ip, mask_prof, :])
646 638 cspower = cspower[:, hei_interf]
647 639 cspower = cspower.sum(axis=0)
648 640
649 641 cspsort = cspower.ravel().argsort()
650 642 junkcspc_interf = jcspectra[ip, :, hei_interf[cspsort[list(range(
651 643 offhei_interf, nhei_interf + offhei_interf))]]]
652 644 junkcspc_interf = junkcspc_interf.transpose()
653 645 jcspc_interf = junkcspc_interf.sum(axis=1) / nhei_interf
654 646
655 647 ind = numpy.abs(jcspc_interf[mask_prof]).ravel().argsort()
656 648
657 649 median_real = int(numpy.median(numpy.real(
658 650 junkcspc_interf[mask_prof[ind[list(range(3 * num_prof // 4))]], :])))
659 651 median_imag = int(numpy.median(numpy.imag(
660 652 junkcspc_interf[mask_prof[ind[list(range(3 * num_prof // 4))]], :])))
661 653 comp_mask_prof = [int(e) for e in comp_mask_prof]
662 654 junkcspc_interf[comp_mask_prof, :] = numpy.complex(
663 655 median_real, median_imag)
664 656
665 657 for iprof in range(num_prof):
666 658 ind = numpy.abs(junkcspc_interf[iprof, :]).ravel().argsort()
667 659 jcspc_interf[iprof] = junkcspc_interf[iprof, ind[nhei_interf // 2]]
668 660
669 661 # Removiendo la Interferencia
670 662 jcspectra[ip, :, ind_hei] = jcspectra[ip,
671 663 :, ind_hei] - jcspc_interf
672 664
673 665 ListAux = numpy.abs(jcspc_interf[mask_prof]).tolist()
674 666 maxid = ListAux.index(max(ListAux))
675 667
676 668 ind = numpy.array([-2, -1, 1, 2])
677 669 xx = numpy.zeros([4, 4])
678 670
679 671 for id1 in range(4):
680 672 xx[:, id1] = ind[id1]**numpy.asarray(list(range(4)))
681 673
682 674 xx_inv = numpy.linalg.inv(xx)
683 675 xx = xx_inv[:, 0]
684 676
685 677 ind = (ind + maxid + num_mask_prof) % num_mask_prof
686 678 yy = jcspectra[ip, mask_prof[ind], :]
687 679 jcspectra[ip, mask_prof[maxid], :] = numpy.dot(yy.transpose(), xx)
688 680
689 681 # Guardar Resultados
690 682 self.dataOut.data_spc = jspectra
691 683 self.dataOut.data_cspc = jcspectra
692 684
693 685 return 1
694 686
695 687 def run(self, dataOut, interf = 2,hei_interf = None, nhei_interf = None, offhei_interf = None, mode=1):
696 688
697 689 self.dataOut = dataOut
698 690
699 691 if mode == 1:
700 692 self.removeInterference(interf = 2,hei_interf = None, nhei_interf = None, offhei_interf = None)
701 693 elif mode == 2:
702 694 self.removeInterference2()
703 695
704 696 return self.dataOut
705 697
706 698
707 699 class IncohInt(Operation):
708 700
709 701 __profIndex = 0
710 702 __withOverapping = False
711 703
712 704 __byTime = False
713 705 __initime = None
714 706 __lastdatatime = None
715 707 __integrationtime = None
716 708
717 709 __buffer_spc = None
718 710 __buffer_cspc = None
719 711 __buffer_dc = None
720 712
721 713 __dataReady = False
722 714
723 715 __timeInterval = None
724 716
725 717 n = None
726 718
727 719 def __init__(self):
728 720
729 721 Operation.__init__(self)
730 722
731 723 def setup(self, n=None, timeInterval=None, overlapping=False):
732 724 """
733 725 Set the parameters of the integration class.
734 726
735 727 Inputs:
736 728
737 729 n : Number of coherent integrations
738 730 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
739 731 overlapping :
740 732
741 733 """
742 734
743 735 self.__initime = None
744 736 self.__lastdatatime = 0
745 737
746 738 self.__buffer_spc = 0
747 739 self.__buffer_cspc = 0
748 740 self.__buffer_dc = 0
749 741
750 742 self.__profIndex = 0
751 743 self.__dataReady = False
752 744 self.__byTime = False
753 745
754 746 if n is None and timeInterval is None:
755 747 raise ValueError("n or timeInterval should be specified ...")
756 748
757 749 if n is not None:
758 750 self.n = int(n)
759 751 else:
760 752
761 753 self.__integrationtime = int(timeInterval)
762 754 self.n = None
763 755 self.__byTime = True
764 756
765 757 def putData(self, data_spc, data_cspc, data_dc):
766 758 """
767 759 Add a profile to the __buffer_spc and increase in one the __profileIndex
768 760
769 761 """
770 762
771 763 self.__buffer_spc += data_spc
772 764
773 765 if data_cspc is None:
774 766 self.__buffer_cspc = None
775 767 else:
776 768 self.__buffer_cspc += data_cspc
777 769
778 770 if data_dc is None:
779 771 self.__buffer_dc = None
780 772 else:
781 773 self.__buffer_dc += data_dc
782 774
783 775 self.__profIndex += 1
784 776
785 777 return
786 778
787 779 def pushData(self):
788 780 """
789 781 Return the sum of the last profiles and the profiles used in the sum.
790 782
791 783 Affected:
792 784
793 785 self.__profileIndex
794 786
795 787 """
796 788
797 789 data_spc = self.__buffer_spc
798 790 data_cspc = self.__buffer_cspc
799 791 data_dc = self.__buffer_dc
800 792 n = self.__profIndex
801 793
802 794 self.__buffer_spc = 0
803 795 self.__buffer_cspc = 0
804 796 self.__buffer_dc = 0
805 797 self.__profIndex = 0
806 798
807 799 return data_spc, data_cspc, data_dc, n
808 800
809 801 def byProfiles(self, *args):
810 802
811 803 self.__dataReady = False
812 804 avgdata_spc = None
813 805 avgdata_cspc = None
814 806 avgdata_dc = None
815 807
816 808 self.putData(*args)
817 809
818 810 if self.__profIndex == self.n:
819 811
820 812 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
821 813 self.n = n
822 814 self.__dataReady = True
823 815
824 816 return avgdata_spc, avgdata_cspc, avgdata_dc
825 817
826 818 def byTime(self, datatime, *args):
827 819
828 820 self.__dataReady = False
829 821 avgdata_spc = None
830 822 avgdata_cspc = None
831 823 avgdata_dc = None
832 824
833 825 self.putData(*args)
834 826
835 827 if (datatime - self.__initime) >= self.__integrationtime:
836 828 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
837 829 self.n = n
838 830 self.__dataReady = True
839 831
840 832 return avgdata_spc, avgdata_cspc, avgdata_dc
841 833
842 834 def integrate(self, datatime, *args):
843 835
844 836 if self.__profIndex == 0:
845 837 self.__initime = datatime
846 838
847 839 if self.__byTime:
848 840 avgdata_spc, avgdata_cspc, avgdata_dc = self.byTime(
849 841 datatime, *args)
850 842 else:
851 843 avgdata_spc, avgdata_cspc, avgdata_dc = self.byProfiles(*args)
852 844
853 845 if not self.__dataReady:
854 846 return None, None, None, None
855 847
856 848 return self.__initime, avgdata_spc, avgdata_cspc, avgdata_dc
857 849
858 850 def run(self, dataOut, n=None, timeInterval=None, overlapping=False):
859 851 if n == 1:
860 852 return dataOut
861 853
862 854 dataOut.flagNoData = True
863 855
864 856 if not self.isConfig:
865 857 self.setup(n, timeInterval, overlapping)
866 858 self.isConfig = True
867 859
868 860 avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc = self.integrate(dataOut.utctime,
869 861 dataOut.data_spc,
870 862 dataOut.data_cspc,
871 863 dataOut.data_dc)
872 864
873 865 if self.__dataReady:
874 866
875 867 dataOut.data_spc = avgdata_spc
876 868 dataOut.data_cspc = avgdata_cspc
877 869 dataOut.data_dc = avgdata_dc
878 870 dataOut.nIncohInt *= self.n
879 871 dataOut.utctime = avgdatatime
880 872 dataOut.flagNoData = False
881 873
882 874 return dataOut No newline at end of file
@@ -1,1627 +1,1629
1 1 import sys
2 2 import numpy,math
3 3 from scipy import interpolate
4 4 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation, MPDecorator
5 5 from schainpy.model.data.jrodata import Voltage,hildebrand_sekhon
6 6 from schainpy.utils import log
7 7 from time import time
8 8
9 9
10 10
11 11 class VoltageProc(ProcessingUnit):
12 12
13 13 def __init__(self):
14 14
15 15 ProcessingUnit.__init__(self)
16 16
17 17 self.dataOut = Voltage()
18 18 self.flip = 1
19 19 self.setupReq = False
20 20
21 21 def run(self):
22 22
23 23 if self.dataIn.type == 'AMISR':
24 24 self.__updateObjFromAmisrInput()
25 25
26 26 if self.dataIn.type == 'Voltage':
27 27 self.dataOut.copy(self.dataIn)
28 28
29 29 def __updateObjFromAmisrInput(self):
30 30
31 31 self.dataOut.timeZone = self.dataIn.timeZone
32 32 self.dataOut.dstFlag = self.dataIn.dstFlag
33 33 self.dataOut.errorCount = self.dataIn.errorCount
34 34 self.dataOut.useLocalTime = self.dataIn.useLocalTime
35 35
36 36 self.dataOut.flagNoData = self.dataIn.flagNoData
37 37 self.dataOut.data = self.dataIn.data
38 38 self.dataOut.utctime = self.dataIn.utctime
39 39 self.dataOut.channelList = self.dataIn.channelList
40 40 #self.dataOut.timeInterval = self.dataIn.timeInterval
41 41 self.dataOut.heightList = self.dataIn.heightList
42 42 self.dataOut.nProfiles = self.dataIn.nProfiles
43 43
44 44 self.dataOut.nCohInt = self.dataIn.nCohInt
45 45 self.dataOut.ippSeconds = self.dataIn.ippSeconds
46 46 self.dataOut.frequency = self.dataIn.frequency
47 47
48 48 self.dataOut.azimuth = self.dataIn.azimuth
49 49 self.dataOut.zenith = self.dataIn.zenith
50 50
51 51 self.dataOut.beam.codeList = self.dataIn.beam.codeList
52 52 self.dataOut.beam.azimuthList = self.dataIn.beam.azimuthList
53 53 self.dataOut.beam.zenithList = self.dataIn.beam.zenithList
54 54
55 55
56 56 class selectChannels(Operation):
57 57
58 58 def run(self, dataOut, channelList):
59 59
60 60 channelIndexList = []
61 61 self.dataOut = dataOut
62 62 for channel in channelList:
63 63 if channel not in self.dataOut.channelList:
64 64 raise ValueError("Channel %d is not in %s" %(channel, str(self.dataOut.channelList)))
65 65
66 66 index = self.dataOut.channelList.index(channel)
67 67 channelIndexList.append(index)
68 68 self.selectChannelsByIndex(channelIndexList)
69 69 return self.dataOut
70 70
71 71 def selectChannelsByIndex(self, channelIndexList):
72 72 """
73 73 Selecciona un bloque de datos en base a canales segun el channelIndexList
74 74
75 75 Input:
76 76 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
77 77
78 78 Affected:
79 79 self.dataOut.data
80 80 self.dataOut.channelIndexList
81 81 self.dataOut.nChannels
82 82 self.dataOut.m_ProcessingHeader.totalSpectra
83 83 self.dataOut.systemHeaderObj.numChannels
84 84 self.dataOut.m_ProcessingHeader.blockSize
85 85
86 86 Return:
87 87 None
88 88 """
89 89
90 90 for channelIndex in channelIndexList:
91 91 if channelIndex not in self.dataOut.channelIndexList:
92 92 raise ValueError("The value %d in channelIndexList is not valid" %channelIndex)
93 93
94 94 if self.dataOut.type == 'Voltage':
95 95 if self.dataOut.flagDataAsBlock:
96 96 """
97 97 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
98 98 """
99 99 data = self.dataOut.data[channelIndexList,:,:]
100 100 else:
101 101 data = self.dataOut.data[channelIndexList,:]
102 102
103 103 self.dataOut.data = data
104 104 # self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
105 105 self.dataOut.channelList = range(len(channelIndexList))
106 106
107 107 elif self.dataOut.type == 'Spectra':
108 108 data_spc = self.dataOut.data_spc[channelIndexList, :]
109 109 data_dc = self.dataOut.data_dc[channelIndexList, :]
110 110
111 111 self.dataOut.data_spc = data_spc
112 112 self.dataOut.data_dc = data_dc
113 113
114 114 # self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
115 115 self.dataOut.channelList = range(len(channelIndexList))
116 116 self.__selectPairsByChannel(channelIndexList)
117 117
118 118 return 1
119 119
120 120 def __selectPairsByChannel(self, channelList=None):
121 121
122 122 if channelList == None:
123 123 return
124 124
125 125 pairsIndexListSelected = []
126 126 for pairIndex in self.dataOut.pairsIndexList:
127 127 # First pair
128 128 if self.dataOut.pairsList[pairIndex][0] not in channelList:
129 129 continue
130 130 # Second pair
131 131 if self.dataOut.pairsList[pairIndex][1] not in channelList:
132 132 continue
133 133
134 134 pairsIndexListSelected.append(pairIndex)
135 135
136 136 if not pairsIndexListSelected:
137 137 self.dataOut.data_cspc = None
138 138 self.dataOut.pairsList = []
139 139 return
140 140
141 141 self.dataOut.data_cspc = self.dataOut.data_cspc[pairsIndexListSelected]
142 142 self.dataOut.pairsList = [self.dataOut.pairsList[i]
143 143 for i in pairsIndexListSelected]
144 144
145 145 return
146 146
147 147 class selectHeights(Operation):
148 148
149 149 def run(self, dataOut, minHei=None, maxHei=None):
150 150 """
151 151 Selecciona un bloque de datos en base a un grupo de valores de alturas segun el rango
152 152 minHei <= height <= maxHei
153 153
154 154 Input:
155 155 minHei : valor minimo de altura a considerar
156 156 maxHei : valor maximo de altura a considerar
157 157
158 158 Affected:
159 159 Indirectamente son cambiados varios valores a travez del metodo selectHeightsByIndex
160 160
161 161 Return:
162 162 1 si el metodo se ejecuto con exito caso contrario devuelve 0
163 163 """
164 164
165 165 self.dataOut = dataOut
166 166
167 167 if minHei == None:
168 168 minHei = self.dataOut.heightList[0]
169 169
170 170 if maxHei == None:
171 171 maxHei = self.dataOut.heightList[-1]
172 172
173 173 if (minHei < self.dataOut.heightList[0]):
174 174 minHei = self.dataOut.heightList[0]
175 175
176 176 if (maxHei > self.dataOut.heightList[-1]):
177 177 maxHei = self.dataOut.heightList[-1]
178 178
179 179 minIndex = 0
180 180 maxIndex = 0
181 181 heights = self.dataOut.heightList
182 182
183 183 inda = numpy.where(heights >= minHei)
184 184 indb = numpy.where(heights <= maxHei)
185 185
186 186 try:
187 187 minIndex = inda[0][0]
188 188 except:
189 189 minIndex = 0
190 190
191 191 try:
192 192 maxIndex = indb[0][-1]
193 193 except:
194 194 maxIndex = len(heights)
195 195
196 196 self.selectHeightsByIndex(minIndex, maxIndex)
197 197
198 198 return self.dataOut
199 199
200 200 def selectHeightsByIndex(self, minIndex, maxIndex):
201 201 """
202 202 Selecciona un bloque de datos en base a un grupo indices de alturas segun el rango
203 203 minIndex <= index <= maxIndex
204 204
205 205 Input:
206 206 minIndex : valor de indice minimo de altura a considerar
207 207 maxIndex : valor de indice maximo de altura a considerar
208 208
209 209 Affected:
210 210 self.dataOut.data
211 211 self.dataOut.heightList
212 212
213 213 Return:
214 214 1 si el metodo se ejecuto con exito caso contrario devuelve 0
215 215 """
216 216
217 217 if self.dataOut.type == 'Voltage':
218 218 if (minIndex < 0) or (minIndex > maxIndex):
219 219 raise ValueError("Height index range (%d,%d) is not valid" % (minIndex, maxIndex))
220 220
221 221 if (maxIndex >= self.dataOut.nHeights):
222 222 maxIndex = self.dataOut.nHeights
223 223
224 224 #voltage
225 225 if self.dataOut.flagDataAsBlock:
226 226 """
227 227 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
228 228 """
229 229 data = self.dataOut.data[:,:, minIndex:maxIndex]
230 230 else:
231 231 data = self.dataOut.data[:, minIndex:maxIndex]
232 232
233 233 # firstHeight = self.dataOut.heightList[minIndex]
234 234
235 235 self.dataOut.data = data
236 236 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex]
237 237
238 238 if self.dataOut.nHeights <= 1:
239 239 raise ValueError("selectHeights: Too few heights. Current number of heights is %d" %(self.dataOut.nHeights))
240 240 elif self.dataOut.type == 'Spectra':
241 241 if (minIndex < 0) or (minIndex > maxIndex):
242 242 raise ValueError("Error selecting heights: Index range (%d,%d) is not valid" % (
243 243 minIndex, maxIndex))
244 244
245 245 if (maxIndex >= self.dataOut.nHeights):
246 246 maxIndex = self.dataOut.nHeights - 1
247 247
248 248 # Spectra
249 249 data_spc = self.dataOut.data_spc[:, :, minIndex:maxIndex + 1]
250 250
251 251 data_cspc = None
252 252 if self.dataOut.data_cspc is not None:
253 253 data_cspc = self.dataOut.data_cspc[:, :, minIndex:maxIndex + 1]
254 254
255 255 data_dc = None
256 256 if self.dataOut.data_dc is not None:
257 257 data_dc = self.dataOut.data_dc[:, minIndex:maxIndex + 1]
258 258
259 259 self.dataOut.data_spc = data_spc
260 260 self.dataOut.data_cspc = data_cspc
261 261 self.dataOut.data_dc = data_dc
262 262
263 263 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex + 1]
264 264
265 265 return 1
266 266
267 267
268 268 class filterByHeights(Operation):
269 269
270 270 def run(self, dataOut, window):
271 271
272 272 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
273 273
274 274 if window == None:
275 275 window = (dataOut.radarControllerHeaderObj.txA/dataOut.radarControllerHeaderObj.nBaud) / deltaHeight
276 276
277 277 newdelta = deltaHeight * window
278 278 r = dataOut.nHeights % window
279 279 newheights = (dataOut.nHeights-r)/window
280 280
281 281 if newheights <= 1:
282 282 raise ValueError("filterByHeights: Too few heights. Current number of heights is %d and window is %d" %(dataOut.nHeights, window))
283 283
284 284 if dataOut.flagDataAsBlock:
285 285 """
286 286 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
287 287 """
288 288 buffer = dataOut.data[:, :, 0:int(dataOut.nHeights-r)]
289 289 buffer = buffer.reshape(dataOut.nChannels, dataOut.nProfiles, int(dataOut.nHeights/window), window)
290 290 buffer = numpy.sum(buffer,3)
291 291
292 292 else:
293 293 buffer = dataOut.data[:,0:int(dataOut.nHeights-r)]
294 294 buffer = buffer.reshape(dataOut.nChannels,int(dataOut.nHeights/window),int(window))
295 295 buffer = numpy.sum(buffer,2)
296 296
297 297 dataOut.data = buffer
298 298 dataOut.heightList = dataOut.heightList[0] + numpy.arange( newheights )*newdelta
299 299 dataOut.windowOfFilter = window
300 300
301 301 return dataOut
302 302
303 303
304 304 class setH0(Operation):
305 305
306 306 def run(self, dataOut, h0, deltaHeight = None):
307 307
308 308 if not deltaHeight:
309 309 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
310 310
311 311 nHeights = dataOut.nHeights
312 312
313 313 newHeiRange = h0 + numpy.arange(nHeights)*deltaHeight
314 314
315 315 dataOut.heightList = newHeiRange
316 316
317 317 return dataOut
318 318
319 319
320 320 class deFlip(Operation):
321 321
322 322 def run(self, dataOut, channelList = []):
323 323
324 324 data = dataOut.data.copy()
325 325
326 326 if dataOut.flagDataAsBlock:
327 327 flip = self.flip
328 328 profileList = list(range(dataOut.nProfiles))
329 329
330 330 if not channelList:
331 331 for thisProfile in profileList:
332 332 data[:,thisProfile,:] = data[:,thisProfile,:]*flip
333 333 flip *= -1.0
334 334 else:
335 335 for thisChannel in channelList:
336 336 if thisChannel not in dataOut.channelList:
337 337 continue
338 338
339 339 for thisProfile in profileList:
340 340 data[thisChannel,thisProfile,:] = data[thisChannel,thisProfile,:]*flip
341 341 flip *= -1.0
342 342
343 343 self.flip = flip
344 344
345 345 else:
346 346 if not channelList:
347 347 data[:,:] = data[:,:]*self.flip
348 348 else:
349 349 for thisChannel in channelList:
350 350 if thisChannel not in dataOut.channelList:
351 351 continue
352 352
353 353 data[thisChannel,:] = data[thisChannel,:]*self.flip
354 354
355 355 self.flip *= -1.
356 356
357 357 dataOut.data = data
358 358
359 359 return dataOut
360 360
361 361
362 362 class setAttribute(Operation):
363 363 '''
364 364 Set an arbitrary attribute(s) to dataOut
365 365 '''
366 366
367 367 def __init__(self):
368 368
369 369 Operation.__init__(self)
370 370 self._ready = False
371 371
372 372 def run(self, dataOut, **kwargs):
373 373
374 374 for key, value in kwargs.items():
375 375 setattr(dataOut, key, value)
376 376
377 377 return dataOut
378 378
379 379
380 380 @MPDecorator
381 381 class printAttribute(Operation):
382 382 '''
383 383 Print an arbitrary attribute of dataOut
384 384 '''
385 385
386 386 def __init__(self):
387 387
388 388 Operation.__init__(self)
389 389
390 390 def run(self, dataOut, attributes):
391 391
392 if isinstance(attributes, str):
393 attributes = [attributes]
392 394 for attr in attributes:
393 395 if hasattr(dataOut, attr):
394 396 log.log(getattr(dataOut, attr), attr)
395 397
396 398
397 399 class interpolateHeights(Operation):
398 400
399 401 def run(self, dataOut, topLim, botLim):
400 402 #69 al 72 para julia
401 403 #82-84 para meteoros
402 404 if len(numpy.shape(dataOut.data))==2:
403 405 sampInterp = (dataOut.data[:,botLim-1] + dataOut.data[:,topLim+1])/2
404 406 sampInterp = numpy.transpose(numpy.tile(sampInterp,(topLim-botLim + 1,1)))
405 407 #dataOut.data[:,botLim:limSup+1] = sampInterp
406 408 dataOut.data[:,botLim:topLim+1] = sampInterp
407 409 else:
408 410 nHeights = dataOut.data.shape[2]
409 411 x = numpy.hstack((numpy.arange(botLim),numpy.arange(topLim+1,nHeights)))
410 412 y = dataOut.data[:,:,list(range(botLim))+list(range(topLim+1,nHeights))]
411 413 f = interpolate.interp1d(x, y, axis = 2)
412 414 xnew = numpy.arange(botLim,topLim+1)
413 415 ynew = f(xnew)
414 416 dataOut.data[:,:,botLim:topLim+1] = ynew
415 417
416 418 return dataOut
417 419
418 420
419 421 class CohInt(Operation):
420 422
421 423 isConfig = False
422 424 __profIndex = 0
423 425 __byTime = False
424 426 __initime = None
425 427 __lastdatatime = None
426 428 __integrationtime = None
427 429 __buffer = None
428 430 __bufferStride = []
429 431 __dataReady = False
430 432 __profIndexStride = 0
431 433 __dataToPutStride = False
432 434 n = None
433 435
434 436 def __init__(self, **kwargs):
435 437
436 438 Operation.__init__(self, **kwargs)
437 439
438 440 def setup(self, n=None, timeInterval=None, stride=None, overlapping=False, byblock=False):
439 441 """
440 442 Set the parameters of the integration class.
441 443
442 444 Inputs:
443 445
444 446 n : Number of coherent integrations
445 447 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
446 448 overlapping :
447 449 """
448 450
449 451 self.__initime = None
450 452 self.__lastdatatime = 0
451 453 self.__buffer = None
452 454 self.__dataReady = False
453 455 self.byblock = byblock
454 456 self.stride = stride
455 457
456 458 if n == None and timeInterval == None:
457 459 raise ValueError("n or timeInterval should be specified ...")
458 460
459 461 if n != None:
460 462 self.n = n
461 463 self.__byTime = False
462 464 else:
463 465 self.__integrationtime = timeInterval #* 60. #if (type(timeInterval)!=integer) -> change this line
464 466 self.n = 9999
465 467 self.__byTime = True
466 468
467 469 if overlapping:
468 470 self.__withOverlapping = True
469 471 self.__buffer = None
470 472 else:
471 473 self.__withOverlapping = False
472 474 self.__buffer = 0
473 475
474 476 self.__profIndex = 0
475 477
476 478 def putData(self, data):
477 479
478 480 """
479 481 Add a profile to the __buffer and increase in one the __profileIndex
480 482
481 483 """
482 484
483 485 if not self.__withOverlapping:
484 486 self.__buffer += data.copy()
485 487 self.__profIndex += 1
486 488 return
487 489
488 490 #Overlapping data
489 491 nChannels, nHeis = data.shape
490 492 data = numpy.reshape(data, (1, nChannels, nHeis))
491 493
492 494 #If the buffer is empty then it takes the data value
493 495 if self.__buffer is None:
494 496 self.__buffer = data
495 497 self.__profIndex += 1
496 498 return
497 499
498 500 #If the buffer length is lower than n then stakcing the data value
499 501 if self.__profIndex < self.n:
500 502 self.__buffer = numpy.vstack((self.__buffer, data))
501 503 self.__profIndex += 1
502 504 return
503 505
504 506 #If the buffer length is equal to n then replacing the last buffer value with the data value
505 507 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
506 508 self.__buffer[self.n-1] = data
507 509 self.__profIndex = self.n
508 510 return
509 511
510 512
511 513 def pushData(self):
512 514 """
513 515 Return the sum of the last profiles and the profiles used in the sum.
514 516
515 517 Affected:
516 518
517 519 self.__profileIndex
518 520
519 521 """
520 522
521 523 if not self.__withOverlapping:
522 524 data = self.__buffer
523 525 n = self.__profIndex
524 526
525 527 self.__buffer = 0
526 528 self.__profIndex = 0
527 529
528 530 return data, n
529 531
530 532 #Integration with Overlapping
531 533 data = numpy.sum(self.__buffer, axis=0)
532 534 # print data
533 535 # raise
534 536 n = self.__profIndex
535 537
536 538 return data, n
537 539
538 540 def byProfiles(self, data):
539 541
540 542 self.__dataReady = False
541 543 avgdata = None
542 544 # n = None
543 545 # print data
544 546 # raise
545 547 self.putData(data)
546 548
547 549 if self.__profIndex == self.n:
548 550 avgdata, n = self.pushData()
549 551 self.__dataReady = True
550 552
551 553 return avgdata
552 554
553 555 def byTime(self, data, datatime):
554 556
555 557 self.__dataReady = False
556 558 avgdata = None
557 559 n = None
558 560
559 561 self.putData(data)
560 562
561 563 if (datatime - self.__initime) >= self.__integrationtime:
562 564 avgdata, n = self.pushData()
563 565 self.n = n
564 566 self.__dataReady = True
565 567
566 568 return avgdata
567 569
568 570 def integrateByStride(self, data, datatime):
569 571 # print data
570 572 if self.__profIndex == 0:
571 573 self.__buffer = [[data.copy(), datatime]]
572 574 else:
573 575 self.__buffer.append([data.copy(),datatime])
574 576 self.__profIndex += 1
575 577 self.__dataReady = False
576 578
577 579 if self.__profIndex == self.n * self.stride :
578 580 self.__dataToPutStride = True
579 581 self.__profIndexStride = 0
580 582 self.__profIndex = 0
581 583 self.__bufferStride = []
582 584 for i in range(self.stride):
583 585 current = self.__buffer[i::self.stride]
584 586 data = numpy.sum([t[0] for t in current], axis=0)
585 587 avgdatatime = numpy.average([t[1] for t in current])
586 588 # print data
587 589 self.__bufferStride.append((data, avgdatatime))
588 590
589 591 if self.__dataToPutStride:
590 592 self.__dataReady = True
591 593 self.__profIndexStride += 1
592 594 if self.__profIndexStride == self.stride:
593 595 self.__dataToPutStride = False
594 596 # print self.__bufferStride[self.__profIndexStride - 1]
595 597 # raise
596 598 return self.__bufferStride[self.__profIndexStride - 1]
597 599
598 600
599 601 return None, None
600 602
601 603 def integrate(self, data, datatime=None):
602 604
603 605 if self.__initime == None:
604 606 self.__initime = datatime
605 607
606 608 if self.__byTime:
607 609 avgdata = self.byTime(data, datatime)
608 610 else:
609 611 avgdata = self.byProfiles(data)
610 612
611 613
612 614 self.__lastdatatime = datatime
613 615
614 616 if avgdata is None:
615 617 return None, None
616 618
617 619 avgdatatime = self.__initime
618 620
619 621 deltatime = datatime - self.__lastdatatime
620 622
621 623 if not self.__withOverlapping:
622 624 self.__initime = datatime
623 625 else:
624 626 self.__initime += deltatime
625 627
626 628 return avgdata, avgdatatime
627 629
628 630 def integrateByBlock(self, dataOut):
629 631
630 632 times = int(dataOut.data.shape[1]/self.n)
631 633 avgdata = numpy.zeros((dataOut.nChannels, times, dataOut.nHeights), dtype=numpy.complex)
632 634
633 635 id_min = 0
634 636 id_max = self.n
635 637
636 638 for i in range(times):
637 639 junk = dataOut.data[:,id_min:id_max,:]
638 640 avgdata[:,i,:] = junk.sum(axis=1)
639 641 id_min += self.n
640 642 id_max += self.n
641 643
642 644 timeInterval = dataOut.ippSeconds*self.n
643 645 avgdatatime = (times - 1) * timeInterval + dataOut.utctime
644 646 self.__dataReady = True
645 647 return avgdata, avgdatatime
646 648
647 649 def run(self, dataOut, n=None, timeInterval=None, stride=None, overlapping=False, byblock=False, **kwargs):
648 650
649 651 if not self.isConfig:
650 652 self.setup(n=n, stride=stride, timeInterval=timeInterval, overlapping=overlapping, byblock=byblock, **kwargs)
651 653 self.isConfig = True
652 654
653 655 if dataOut.flagDataAsBlock:
654 656 """
655 657 Si la data es leida por bloques, dimension = [nChannels, nProfiles, nHeis]
656 658 """
657 659 avgdata, avgdatatime = self.integrateByBlock(dataOut)
658 660 dataOut.nProfiles /= self.n
659 661 else:
660 662 if stride is None:
661 663 avgdata, avgdatatime = self.integrate(dataOut.data, dataOut.utctime)
662 664 else:
663 665 avgdata, avgdatatime = self.integrateByStride(dataOut.data, dataOut.utctime)
664 666
665 667
666 668 # dataOut.timeInterval *= n
667 669 dataOut.flagNoData = True
668 670
669 671 if self.__dataReady:
670 672 dataOut.data = avgdata
671 673 if not dataOut.flagCohInt:
672 674 dataOut.nCohInt *= self.n
673 675 dataOut.flagCohInt = True
674 676 dataOut.utctime = avgdatatime
675 677 # print avgdata, avgdatatime
676 678 # raise
677 679 # dataOut.timeInterval = dataOut.ippSeconds * dataOut.nCohInt
678 680 dataOut.flagNoData = False
679 681 return dataOut
680 682
681 683 class Decoder(Operation):
682 684
683 685 isConfig = False
684 686 __profIndex = 0
685 687
686 688 code = None
687 689
688 690 nCode = None
689 691 nBaud = None
690 692
691 693 def __init__(self, **kwargs):
692 694
693 695 Operation.__init__(self, **kwargs)
694 696
695 697 self.times = None
696 698 self.osamp = None
697 699 # self.__setValues = False
698 700 self.isConfig = False
699 701 self.setupReq = False
700 702 def setup(self, code, osamp, dataOut):
701 703
702 704 self.__profIndex = 0
703 705
704 706 self.code = code
705 707
706 708 self.nCode = len(code)
707 709 self.nBaud = len(code[0])
708 710
709 711 if (osamp != None) and (osamp >1):
710 712 self.osamp = osamp
711 713 self.code = numpy.repeat(code, repeats=self.osamp, axis=1)
712 714 self.nBaud = self.nBaud*self.osamp
713 715
714 716 self.__nChannels = dataOut.nChannels
715 717 self.__nProfiles = dataOut.nProfiles
716 718 self.__nHeis = dataOut.nHeights
717 719
718 720 if self.__nHeis < self.nBaud:
719 721 raise ValueError('Number of heights (%d) should be greater than number of bauds (%d)' %(self.__nHeis, self.nBaud))
720 722
721 723 #Frequency
722 724 __codeBuffer = numpy.zeros((self.nCode, self.__nHeis), dtype=numpy.complex)
723 725
724 726 __codeBuffer[:,0:self.nBaud] = self.code
725 727
726 728 self.fft_code = numpy.conj(numpy.fft.fft(__codeBuffer, axis=1))
727 729
728 730 if dataOut.flagDataAsBlock:
729 731
730 732 self.ndatadec = self.__nHeis #- self.nBaud + 1
731 733
732 734 self.datadecTime = numpy.zeros((self.__nChannels, self.__nProfiles, self.ndatadec), dtype=numpy.complex)
733 735
734 736 else:
735 737
736 738 #Time
737 739 self.ndatadec = self.__nHeis #- self.nBaud + 1
738 740
739 741 self.datadecTime = numpy.zeros((self.__nChannels, self.ndatadec), dtype=numpy.complex)
740 742
741 743 def __convolutionInFreq(self, data):
742 744
743 745 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
744 746
745 747 fft_data = numpy.fft.fft(data, axis=1)
746 748
747 749 conv = fft_data*fft_code
748 750
749 751 data = numpy.fft.ifft(conv,axis=1)
750 752
751 753 return data
752 754
753 755 def __convolutionInFreqOpt(self, data):
754 756
755 757 raise NotImplementedError
756 758
757 759 def __convolutionInTime(self, data):
758 760
759 761 code = self.code[self.__profIndex]
760 762 for i in range(self.__nChannels):
761 763 self.datadecTime[i,:] = numpy.correlate(data[i,:], code, mode='full')[self.nBaud-1:]
762 764
763 765 return self.datadecTime
764 766
765 767 def __convolutionByBlockInTime(self, data):
766 768
767 769 repetitions = int(self.__nProfiles / self.nCode)
768 770 junk = numpy.lib.stride_tricks.as_strided(self.code, (repetitions, self.code.size), (0, self.code.itemsize))
769 771 junk = junk.flatten()
770 772 code_block = numpy.reshape(junk, (self.nCode*repetitions, self.nBaud))
771 773 profilesList = range(self.__nProfiles)
772 774
773 775 for i in range(self.__nChannels):
774 776 for j in profilesList:
775 777 self.datadecTime[i,j,:] = numpy.correlate(data[i,j,:], code_block[j,:], mode='full')[self.nBaud-1:]
776 778 return self.datadecTime
777 779
778 780 def __convolutionByBlockInFreq(self, data):
779 781
780 782 raise NotImplementedError("Decoder by frequency fro Blocks not implemented")
781 783
782 784
783 785 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
784 786
785 787 fft_data = numpy.fft.fft(data, axis=2)
786 788
787 789 conv = fft_data*fft_code
788 790
789 791 data = numpy.fft.ifft(conv,axis=2)
790 792
791 793 return data
792 794
793 795
794 796 def run(self, dataOut, code=None, nCode=None, nBaud=None, mode = 0, osamp=None, times=None):
795 797
796 798 if dataOut.flagDecodeData:
797 799 print("This data is already decoded, recoding again ...")
798 800
799 801 if not self.isConfig:
800 802
801 803 if code is None:
802 804 if dataOut.code is None:
803 805 raise ValueError("Code could not be read from %s instance. Enter a value in Code parameter" %dataOut.type)
804 806
805 807 code = dataOut.code
806 808 else:
807 809 code = numpy.array(code).reshape(nCode,nBaud)
808 810 self.setup(code, osamp, dataOut)
809 811
810 812 self.isConfig = True
811 813
812 814 if mode == 3:
813 815 sys.stderr.write("Decoder Warning: mode=%d is not valid, using mode=0\n" %mode)
814 816
815 817 if times != None:
816 818 sys.stderr.write("Decoder Warning: Argument 'times' in not used anymore\n")
817 819
818 820 if self.code is None:
819 821 print("Fail decoding: Code is not defined.")
820 822 return
821 823
822 824 self.__nProfiles = dataOut.nProfiles
823 825 datadec = None
824 826
825 827 if mode == 3:
826 828 mode = 0
827 829
828 830 if dataOut.flagDataAsBlock:
829 831 """
830 832 Decoding when data have been read as block,
831 833 """
832 834
833 835 if mode == 0:
834 836 datadec = self.__convolutionByBlockInTime(dataOut.data)
835 837 if mode == 1:
836 838 datadec = self.__convolutionByBlockInFreq(dataOut.data)
837 839 else:
838 840 """
839 841 Decoding when data have been read profile by profile
840 842 """
841 843 if mode == 0:
842 844 datadec = self.__convolutionInTime(dataOut.data)
843 845
844 846 if mode == 1:
845 847 datadec = self.__convolutionInFreq(dataOut.data)
846 848
847 849 if mode == 2:
848 850 datadec = self.__convolutionInFreqOpt(dataOut.data)
849 851
850 852 if datadec is None:
851 853 raise ValueError("Codification mode selected is not valid: mode=%d. Try selecting 0 or 1" %mode)
852 854
853 855 dataOut.code = self.code
854 856 dataOut.nCode = self.nCode
855 857 dataOut.nBaud = self.nBaud
856 858
857 859 dataOut.data = datadec
858 860
859 861 dataOut.heightList = dataOut.heightList[0:datadec.shape[-1]]
860 862
861 863 dataOut.flagDecodeData = True #asumo q la data esta decodificada
862 864
863 865 if self.__profIndex == self.nCode-1:
864 866 self.__profIndex = 0
865 867 return dataOut
866 868
867 869 self.__profIndex += 1
868 870
869 871 return dataOut
870 872 # dataOut.flagDeflipData = True #asumo q la data no esta sin flip
871 873
872 874
873 875 class ProfileConcat(Operation):
874 876
875 877 isConfig = False
876 878 buffer = None
877 879
878 880 def __init__(self, **kwargs):
879 881
880 882 Operation.__init__(self, **kwargs)
881 883 self.profileIndex = 0
882 884
883 885 def reset(self):
884 886 self.buffer = numpy.zeros_like(self.buffer)
885 887 self.start_index = 0
886 888 self.times = 1
887 889
888 890 def setup(self, data, m, n=1):
889 891 self.buffer = numpy.zeros((data.shape[0],data.shape[1]*m),dtype=type(data[0,0]))
890 892 self.nHeights = data.shape[1]#.nHeights
891 893 self.start_index = 0
892 894 self.times = 1
893 895
894 896 def concat(self, data):
895 897
896 898 self.buffer[:,self.start_index:self.nHeights*self.times] = data.copy()
897 899 self.start_index = self.start_index + self.nHeights
898 900
899 901 def run(self, dataOut, m):
900 902 dataOut.flagNoData = True
901 903
902 904 if not self.isConfig:
903 905 self.setup(dataOut.data, m, 1)
904 906 self.isConfig = True
905 907
906 908 if dataOut.flagDataAsBlock:
907 909 raise ValueError("ProfileConcat can only be used when voltage have been read profile by profile, getBlock = False")
908 910
909 911 else:
910 912 self.concat(dataOut.data)
911 913 self.times += 1
912 914 if self.times > m:
913 915 dataOut.data = self.buffer
914 916 self.reset()
915 917 dataOut.flagNoData = False
916 918 # se deben actualizar mas propiedades del header y del objeto dataOut, por ejemplo, las alturas
917 919 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
918 920 xf = dataOut.heightList[0] + dataOut.nHeights * deltaHeight * m
919 921 dataOut.heightList = numpy.arange(dataOut.heightList[0], xf, deltaHeight)
920 922 dataOut.ippSeconds *= m
921 923 return dataOut
922 924
923 925 class ProfileSelector(Operation):
924 926
925 927 profileIndex = None
926 928 # Tamanho total de los perfiles
927 929 nProfiles = None
928 930
929 931 def __init__(self, **kwargs):
930 932
931 933 Operation.__init__(self, **kwargs)
932 934 self.profileIndex = 0
933 935
934 936 def incProfileIndex(self):
935 937
936 938 self.profileIndex += 1
937 939
938 940 if self.profileIndex >= self.nProfiles:
939 941 self.profileIndex = 0
940 942
941 943 def isThisProfileInRange(self, profileIndex, minIndex, maxIndex):
942 944
943 945 if profileIndex < minIndex:
944 946 return False
945 947
946 948 if profileIndex > maxIndex:
947 949 return False
948 950
949 951 return True
950 952
951 953 def isThisProfileInList(self, profileIndex, profileList):
952 954
953 955 if profileIndex not in profileList:
954 956 return False
955 957
956 958 return True
957 959
958 960 def run(self, dataOut, profileList=None, profileRangeList=None, beam=None, byblock=False, rangeList = None, nProfiles=None):
959 961
960 962 """
961 963 ProfileSelector:
962 964
963 965 Inputs:
964 966 profileList : Index of profiles selected. Example: profileList = (0,1,2,7,8)
965 967
966 968 profileRangeList : Minimum and maximum profile indexes. Example: profileRangeList = (4, 30)
967 969
968 970 rangeList : List of profile ranges. Example: rangeList = ((4, 30), (32, 64), (128, 256))
969 971
970 972 """
971 973
972 974 if rangeList is not None:
973 975 if type(rangeList[0]) not in (tuple, list):
974 976 rangeList = [rangeList]
975 977
976 978 dataOut.flagNoData = True
977 979
978 980 if dataOut.flagDataAsBlock:
979 981 """
980 982 data dimension = [nChannels, nProfiles, nHeis]
981 983 """
982 984 if profileList != None:
983 985 dataOut.data = dataOut.data[:,profileList,:]
984 986
985 987 if profileRangeList != None:
986 988 minIndex = profileRangeList[0]
987 989 maxIndex = profileRangeList[1]
988 990 profileList = list(range(minIndex, maxIndex+1))
989 991
990 992 dataOut.data = dataOut.data[:,minIndex:maxIndex+1,:]
991 993
992 994 if rangeList != None:
993 995
994 996 profileList = []
995 997
996 998 for thisRange in rangeList:
997 999 minIndex = thisRange[0]
998 1000 maxIndex = thisRange[1]
999 1001
1000 1002 profileList.extend(list(range(minIndex, maxIndex+1)))
1001 1003
1002 1004 dataOut.data = dataOut.data[:,profileList,:]
1003 1005
1004 1006 dataOut.nProfiles = len(profileList)
1005 1007 dataOut.profileIndex = dataOut.nProfiles - 1
1006 1008 dataOut.flagNoData = False
1007 1009
1008 1010 return dataOut
1009 1011
1010 1012 """
1011 1013 data dimension = [nChannels, nHeis]
1012 1014 """
1013 1015
1014 1016 if profileList != None:
1015 1017
1016 1018 if self.isThisProfileInList(dataOut.profileIndex, profileList):
1017 1019
1018 1020 self.nProfiles = len(profileList)
1019 1021 dataOut.nProfiles = self.nProfiles
1020 1022 dataOut.profileIndex = self.profileIndex
1021 1023 dataOut.flagNoData = False
1022 1024
1023 1025 self.incProfileIndex()
1024 1026 return dataOut
1025 1027
1026 1028 if profileRangeList != None:
1027 1029
1028 1030 minIndex = profileRangeList[0]
1029 1031 maxIndex = profileRangeList[1]
1030 1032
1031 1033 if self.isThisProfileInRange(dataOut.profileIndex, minIndex, maxIndex):
1032 1034
1033 1035 self.nProfiles = maxIndex - minIndex + 1
1034 1036 dataOut.nProfiles = self.nProfiles
1035 1037 dataOut.profileIndex = self.profileIndex
1036 1038 dataOut.flagNoData = False
1037 1039
1038 1040 self.incProfileIndex()
1039 1041 return dataOut
1040 1042
1041 1043 if rangeList != None:
1042 1044
1043 1045 nProfiles = 0
1044 1046
1045 1047 for thisRange in rangeList:
1046 1048 minIndex = thisRange[0]
1047 1049 maxIndex = thisRange[1]
1048 1050
1049 1051 nProfiles += maxIndex - minIndex + 1
1050 1052
1051 1053 for thisRange in rangeList:
1052 1054
1053 1055 minIndex = thisRange[0]
1054 1056 maxIndex = thisRange[1]
1055 1057
1056 1058 if self.isThisProfileInRange(dataOut.profileIndex, minIndex, maxIndex):
1057 1059
1058 1060 self.nProfiles = nProfiles
1059 1061 dataOut.nProfiles = self.nProfiles
1060 1062 dataOut.profileIndex = self.profileIndex
1061 1063 dataOut.flagNoData = False
1062 1064
1063 1065 self.incProfileIndex()
1064 1066
1065 1067 break
1066 1068
1067 1069 return dataOut
1068 1070
1069 1071
1070 1072 if beam != None: #beam is only for AMISR data
1071 1073 if self.isThisProfileInList(dataOut.profileIndex, dataOut.beamRangeDict[beam]):
1072 1074 dataOut.flagNoData = False
1073 1075 dataOut.profileIndex = self.profileIndex
1074 1076
1075 1077 self.incProfileIndex()
1076 1078
1077 1079 return dataOut
1078 1080
1079 1081 raise ValueError("ProfileSelector needs profileList, profileRangeList or rangeList parameter")
1080 1082
1081 1083
1082 1084 class Reshaper(Operation):
1083 1085
1084 1086 def __init__(self, **kwargs):
1085 1087
1086 1088 Operation.__init__(self, **kwargs)
1087 1089
1088 1090 self.__buffer = None
1089 1091 self.__nitems = 0
1090 1092
1091 1093 def __appendProfile(self, dataOut, nTxs):
1092 1094
1093 1095 if self.__buffer is None:
1094 1096 shape = (dataOut.nChannels, int(dataOut.nHeights/nTxs) )
1095 1097 self.__buffer = numpy.empty(shape, dtype = dataOut.data.dtype)
1096 1098
1097 1099 ini = dataOut.nHeights * self.__nitems
1098 1100 end = ini + dataOut.nHeights
1099 1101
1100 1102 self.__buffer[:, ini:end] = dataOut.data
1101 1103
1102 1104 self.__nitems += 1
1103 1105
1104 1106 return int(self.__nitems*nTxs)
1105 1107
1106 1108 def __getBuffer(self):
1107 1109
1108 1110 if self.__nitems == int(1./self.__nTxs):
1109 1111
1110 1112 self.__nitems = 0
1111 1113
1112 1114 return self.__buffer.copy()
1113 1115
1114 1116 return None
1115 1117
1116 1118 def __checkInputs(self, dataOut, shape, nTxs):
1117 1119
1118 1120 if shape is None and nTxs is None:
1119 1121 raise ValueError("Reshaper: shape of factor should be defined")
1120 1122
1121 1123 if nTxs:
1122 1124 if nTxs < 0:
1123 1125 raise ValueError("nTxs should be greater than 0")
1124 1126
1125 1127 if nTxs < 1 and dataOut.nProfiles % (1./nTxs) != 0:
1126 1128 raise ValueError("nProfiles= %d is not divisibled by (1./nTxs) = %f" %(dataOut.nProfiles, (1./nTxs)))
1127 1129
1128 1130 shape = [dataOut.nChannels, dataOut.nProfiles*nTxs, dataOut.nHeights/nTxs]
1129 1131
1130 1132 return shape, nTxs
1131 1133
1132 1134 if len(shape) != 2 and len(shape) != 3:
1133 1135 raise ValueError("shape dimension should be equal to 2 or 3. shape = (nProfiles, nHeis) or (nChannels, nProfiles, nHeis). Actually shape = (%d, %d, %d)" %(dataOut.nChannels, dataOut.nProfiles, dataOut.nHeights))
1134 1136
1135 1137 if len(shape) == 2:
1136 1138 shape_tuple = [dataOut.nChannels]
1137 1139 shape_tuple.extend(shape)
1138 1140 else:
1139 1141 shape_tuple = list(shape)
1140 1142
1141 1143 nTxs = 1.0*shape_tuple[1]/dataOut.nProfiles
1142 1144
1143 1145 return shape_tuple, nTxs
1144 1146
1145 1147 def run(self, dataOut, shape=None, nTxs=None):
1146 1148
1147 1149 shape_tuple, self.__nTxs = self.__checkInputs(dataOut, shape, nTxs)
1148 1150
1149 1151 dataOut.flagNoData = True
1150 1152 profileIndex = None
1151 1153
1152 1154 if dataOut.flagDataAsBlock:
1153 1155
1154 1156 dataOut.data = numpy.reshape(dataOut.data, shape_tuple)
1155 1157 dataOut.flagNoData = False
1156 1158
1157 1159 profileIndex = int(dataOut.nProfiles*self.__nTxs) - 1
1158 1160
1159 1161 else:
1160 1162
1161 1163 if self.__nTxs < 1:
1162 1164
1163 1165 self.__appendProfile(dataOut, self.__nTxs)
1164 1166 new_data = self.__getBuffer()
1165 1167
1166 1168 if new_data is not None:
1167 1169 dataOut.data = new_data
1168 1170 dataOut.flagNoData = False
1169 1171
1170 1172 profileIndex = dataOut.profileIndex*nTxs
1171 1173
1172 1174 else:
1173 1175 raise ValueError("nTxs should be greater than 0 and lower than 1, or use VoltageReader(..., getblock=True)")
1174 1176
1175 1177 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1176 1178
1177 1179 dataOut.heightList = numpy.arange(dataOut.nHeights/self.__nTxs) * deltaHeight + dataOut.heightList[0]
1178 1180
1179 1181 dataOut.nProfiles = int(dataOut.nProfiles*self.__nTxs)
1180 1182
1181 1183 dataOut.profileIndex = profileIndex
1182 1184
1183 1185 dataOut.ippSeconds /= self.__nTxs
1184 1186
1185 1187 return dataOut
1186 1188
1187 1189 class SplitProfiles(Operation):
1188 1190
1189 1191 def __init__(self, **kwargs):
1190 1192
1191 1193 Operation.__init__(self, **kwargs)
1192 1194
1193 1195 def run(self, dataOut, n):
1194 1196
1195 1197 dataOut.flagNoData = True
1196 1198 profileIndex = None
1197 1199
1198 1200 if dataOut.flagDataAsBlock:
1199 1201
1200 1202 #nchannels, nprofiles, nsamples
1201 1203 shape = dataOut.data.shape
1202 1204
1203 1205 if shape[2] % n != 0:
1204 1206 raise ValueError("Could not split the data, n=%d has to be multiple of %d" %(n, shape[2]))
1205 1207
1206 1208 new_shape = shape[0], shape[1]*n, int(shape[2]/n)
1207 1209
1208 1210 dataOut.data = numpy.reshape(dataOut.data, new_shape)
1209 1211 dataOut.flagNoData = False
1210 1212
1211 1213 profileIndex = int(dataOut.nProfiles/n) - 1
1212 1214
1213 1215 else:
1214 1216
1215 1217 raise ValueError("Could not split the data when is read Profile by Profile. Use VoltageReader(..., getblock=True)")
1216 1218
1217 1219 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1218 1220
1219 1221 dataOut.heightList = numpy.arange(dataOut.nHeights/n) * deltaHeight + dataOut.heightList[0]
1220 1222
1221 1223 dataOut.nProfiles = int(dataOut.nProfiles*n)
1222 1224
1223 1225 dataOut.profileIndex = profileIndex
1224 1226
1225 1227 dataOut.ippSeconds /= n
1226 1228
1227 1229 return dataOut
1228 1230
1229 1231 class CombineProfiles(Operation):
1230 1232 def __init__(self, **kwargs):
1231 1233
1232 1234 Operation.__init__(self, **kwargs)
1233 1235
1234 1236 self.__remData = None
1235 1237 self.__profileIndex = 0
1236 1238
1237 1239 def run(self, dataOut, n):
1238 1240
1239 1241 dataOut.flagNoData = True
1240 1242 profileIndex = None
1241 1243
1242 1244 if dataOut.flagDataAsBlock:
1243 1245
1244 1246 #nchannels, nprofiles, nsamples
1245 1247 shape = dataOut.data.shape
1246 1248 new_shape = shape[0], shape[1]/n, shape[2]*n
1247 1249
1248 1250 if shape[1] % n != 0:
1249 1251 raise ValueError("Could not split the data, n=%d has to be multiple of %d" %(n, shape[1]))
1250 1252
1251 1253 dataOut.data = numpy.reshape(dataOut.data, new_shape)
1252 1254 dataOut.flagNoData = False
1253 1255
1254 1256 profileIndex = int(dataOut.nProfiles*n) - 1
1255 1257
1256 1258 else:
1257 1259
1258 1260 #nchannels, nsamples
1259 1261 if self.__remData is None:
1260 1262 newData = dataOut.data
1261 1263 else:
1262 1264 newData = numpy.concatenate((self.__remData, dataOut.data), axis=1)
1263 1265
1264 1266 self.__profileIndex += 1
1265 1267
1266 1268 if self.__profileIndex < n:
1267 1269 self.__remData = newData
1268 1270 #continue
1269 1271 return
1270 1272
1271 1273 self.__profileIndex = 0
1272 1274 self.__remData = None
1273 1275
1274 1276 dataOut.data = newData
1275 1277 dataOut.flagNoData = False
1276 1278
1277 1279 profileIndex = dataOut.profileIndex/n
1278 1280
1279 1281
1280 1282 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1281 1283
1282 1284 dataOut.heightList = numpy.arange(dataOut.nHeights*n) * deltaHeight + dataOut.heightList[0]
1283 1285
1284 1286 dataOut.nProfiles = int(dataOut.nProfiles/n)
1285 1287
1286 1288 dataOut.profileIndex = profileIndex
1287 1289
1288 1290 dataOut.ippSeconds *= n
1289 1291
1290 1292 return dataOut
1291 1293
1292 1294 class PulsePairVoltage(Operation):
1293 1295 '''
1294 1296 Function PulsePair(Signal Power, Velocity)
1295 1297 The real component of Lag[0] provides Intensity Information
1296 1298 The imag component of Lag[1] Phase provides Velocity Information
1297 1299
1298 1300 Configuration Parameters:
1299 1301 nPRF = Number of Several PRF
1300 1302 theta = Degree Azimuth angel Boundaries
1301 1303
1302 1304 Input:
1303 1305 self.dataOut
1304 1306 lag[N]
1305 1307 Affected:
1306 1308 self.dataOut.spc
1307 1309 '''
1308 1310 isConfig = False
1309 1311 __profIndex = 0
1310 1312 __initime = None
1311 1313 __lastdatatime = None
1312 1314 __buffer = None
1313 1315 noise = None
1314 1316 __dataReady = False
1315 1317 n = None
1316 1318 __nch = 0
1317 1319 __nHeis = 0
1318 1320 removeDC = False
1319 1321 ipp = None
1320 1322 lambda_ = 0
1321 1323
1322 1324 def __init__(self,**kwargs):
1323 1325 Operation.__init__(self,**kwargs)
1324 1326
1325 1327 def setup(self, dataOut, n = None, removeDC=False):
1326 1328 '''
1327 1329 n= Numero de PRF's de entrada
1328 1330 '''
1329 1331 self.__initime = None
1330 1332 self.__lastdatatime = 0
1331 1333 self.__dataReady = False
1332 1334 self.__buffer = 0
1333 1335 self.__profIndex = 0
1334 1336 self.noise = None
1335 1337 self.__nch = dataOut.nChannels
1336 1338 self.__nHeis = dataOut.nHeights
1337 1339 self.removeDC = removeDC
1338 1340 self.lambda_ = 3.0e8/(9345.0e6)
1339 1341 self.ippSec = dataOut.ippSeconds
1340 1342 self.nCohInt = dataOut.nCohInt
1341 1343 print("IPPseconds",dataOut.ippSeconds)
1342 1344
1343 1345 print("ELVALOR DE n es:", n)
1344 1346 if n == None:
1345 1347 raise ValueError("n should be specified.")
1346 1348
1347 1349 if n != None:
1348 1350 if n<2:
1349 1351 raise ValueError("n should be greater than 2")
1350 1352
1351 1353 self.n = n
1352 1354 self.__nProf = n
1353 1355
1354 1356 self.__buffer = numpy.zeros((dataOut.nChannels,
1355 1357 n,
1356 1358 dataOut.nHeights),
1357 1359 dtype='complex')
1358 1360
1359 1361 def putData(self,data):
1360 1362 '''
1361 1363 Add a profile to he __buffer and increase in one the __profiel Index
1362 1364 '''
1363 1365 self.__buffer[:,self.__profIndex,:]= data
1364 1366 self.__profIndex += 1
1365 1367 return
1366 1368
1367 1369 def pushData(self,dataOut):
1368 1370 '''
1369 1371 Return the PULSEPAIR and the profiles used in the operation
1370 1372 Affected : self.__profileIndex
1371 1373 '''
1372 #Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β· Remove DCΒ·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·
1374 #----------------- Remove DC-----------------------------------
1373 1375 if self.removeDC==True:
1374 1376 mean = numpy.mean(self.__buffer,1)
1375 1377 tmp = mean.reshape(self.__nch,1,self.__nHeis)
1376 1378 dc= numpy.tile(tmp,[1,self.__nProf,1])
1377 1379 self.__buffer = self.__buffer - dc
1378 #Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Calculo de Potencia Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·
1380 #------------------Calculo de Potencia ------------------------
1379 1381 pair0 = self.__buffer*numpy.conj(self.__buffer)
1380 1382 pair0 = pair0.real
1381 1383 lag_0 = numpy.sum(pair0,1)
1382 #Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Calculo de Ruido x canalΒ·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·
1384 #------------------Calculo de Ruido x canal--------------------
1383 1385 self.noise = numpy.zeros(self.__nch)
1384 1386 for i in range(self.__nch):
1385 1387 daux = numpy.sort(pair0[i,:,:],axis= None)
1386 1388 self.noise[i]=hildebrand_sekhon( daux ,self.nCohInt)
1387 1389
1388 1390 self.noise = self.noise.reshape(self.__nch,1)
1389 1391 self.noise = numpy.tile(self.noise,[1,self.__nHeis])
1390 1392 noise_buffer = self.noise.reshape(self.__nch,1,self.__nHeis)
1391 1393 noise_buffer = numpy.tile(noise_buffer,[1,self.__nProf,1])
1392 #Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β· Potencia recibida= P , Potencia senal = S , Ruido= NΒ·Β·
1393 #Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β· P= S+N ,P=lag_0/N Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·
1394 #Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β· Power Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·
1394 #------------------ Potencia recibida= P , Potencia senal = S , Ruido= N--
1395 #------------------ P= S+N ,P=lag_0/N ---------------------------------
1396 #-------------------- Power --------------------------------------------------
1395 1397 data_power = lag_0/(self.n*self.nCohInt)
1396 #------------------ Senal Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·
1398 #------------------ Senal ---------------------------------------------------
1397 1399 data_intensity = pair0 - noise_buffer
1398 1400 data_intensity = numpy.sum(data_intensity,axis=1)*(self.n*self.nCohInt)#*self.nCohInt)
1399 1401 #data_intensity = (lag_0-self.noise*self.n)*(self.n*self.nCohInt)
1400 1402 for i in range(self.__nch):
1401 1403 for j in range(self.__nHeis):
1402 1404 if data_intensity[i][j] < 0:
1403 1405 data_intensity[i][j] = numpy.min(numpy.absolute(data_intensity[i][j]))
1404 1406
1405 #Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β· Calculo de Frecuencia y Velocidad dopplerΒ·Β·Β·Β·Β·Β·Β·Β·
1407 #----------------- Calculo de Frecuencia y Velocidad doppler--------
1406 1408 pair1 = self.__buffer[:,:-1,:]*numpy.conjugate(self.__buffer[:,1:,:])
1407 1409 lag_1 = numpy.sum(pair1,1)
1408 1410 data_freq = (-1/(2.0*math.pi*self.ippSec*self.nCohInt))*numpy.angle(lag_1)
1409 1411 data_velocity = (self.lambda_/2.0)*data_freq
1410 1412
1411 #Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β· Potencia promedio estimada de la SenalΒ·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·
1413 #---------------- Potencia promedio estimada de la Senal-----------
1412 1414 lag_0 = lag_0/self.n
1413 1415 S = lag_0-self.noise
1414 1416
1415 #Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β· Frecuencia Doppler promedio Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·
1417 #---------------- Frecuencia Doppler promedio ---------------------
1416 1418 lag_1 = lag_1/(self.n-1)
1417 1419 R1 = numpy.abs(lag_1)
1418 1420
1419 #Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β· Calculo del SNRΒ·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·
1421 #---------------- Calculo del SNR----------------------------------
1420 1422 data_snrPP = S/self.noise
1421 1423 for i in range(self.__nch):
1422 1424 for j in range(self.__nHeis):
1423 1425 if data_snrPP[i][j] < 1.e-20:
1424 1426 data_snrPP[i][j] = 1.e-20
1425 1427
1426 #Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β· Calculo del ancho espectral Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·Β·
1428 #----------------- Calculo del ancho espectral ----------------------
1427 1429 L = S/R1
1428 1430 L = numpy.where(L<0,1,L)
1429 1431 L = numpy.log(L)
1430 1432 tmp = numpy.sqrt(numpy.absolute(L))
1431 1433 data_specwidth = (self.lambda_/(2*math.sqrt(2)*math.pi*self.ippSec*self.nCohInt))*tmp*numpy.sign(L)
1432 1434 n = self.__profIndex
1433 1435
1434 1436 self.__buffer = numpy.zeros((self.__nch, self.__nProf,self.__nHeis), dtype='complex')
1435 1437 self.__profIndex = 0
1436 1438 return data_power,data_intensity,data_velocity,data_snrPP,data_specwidth,n
1437 1439
1438 1440
1439 1441 def pulsePairbyProfiles(self,dataOut):
1440 1442
1441 1443 self.__dataReady = False
1442 1444 data_power = None
1443 1445 data_intensity = None
1444 1446 data_velocity = None
1445 1447 data_specwidth = None
1446 1448 data_snrPP = None
1447 1449 self.putData(data=dataOut.data)
1448 1450 if self.__profIndex == self.n:
1449 1451 data_power,data_intensity, data_velocity,data_snrPP,data_specwidth, n = self.pushData(dataOut=dataOut)
1450 1452 self.__dataReady = True
1451 1453
1452 1454 return data_power, data_intensity, data_velocity, data_snrPP, data_specwidth
1453 1455
1454 1456
1455 1457 def pulsePairOp(self, dataOut, datatime= None):
1456 1458
1457 1459 if self.__initime == None:
1458 1460 self.__initime = datatime
1459 1461 data_power, data_intensity, data_velocity, data_snrPP, data_specwidth = self.pulsePairbyProfiles(dataOut)
1460 1462 self.__lastdatatime = datatime
1461 1463
1462 1464 if data_power is None:
1463 1465 return None, None, None,None,None,None
1464 1466
1465 1467 avgdatatime = self.__initime
1466 1468 deltatime = datatime - self.__lastdatatime
1467 1469 self.__initime = datatime
1468 1470
1469 1471 return data_power, data_intensity, data_velocity, data_snrPP, data_specwidth, avgdatatime
1470 1472
1471 1473 def run(self, dataOut,n = None,removeDC= False, overlapping= False,**kwargs):
1472 1474
1473 1475 if not self.isConfig:
1474 1476 self.setup(dataOut = dataOut, n = n , removeDC=removeDC , **kwargs)
1475 1477 self.isConfig = True
1476 1478 data_power, data_intensity, data_velocity,data_snrPP,data_specwidth, avgdatatime = self.pulsePairOp(dataOut, dataOut.utctime)
1477 1479 dataOut.flagNoData = True
1478 1480
1479 1481 if self.__dataReady:
1480 1482 dataOut.nCohInt *= self.n
1481 1483 dataOut.dataPP_POW = data_intensity # S
1482 1484 dataOut.dataPP_POWER = data_power # P
1483 1485 dataOut.dataPP_DOP = data_velocity
1484 1486 dataOut.dataPP_SNR = data_snrPP
1485 1487 dataOut.dataPP_WIDTH = data_specwidth
1486 1488 dataOut.PRFbyAngle = self.n #numero de PRF*cada angulo rotado que equivale a un tiempo.
1487 1489 dataOut.utctime = avgdatatime
1488 1490 dataOut.flagNoData = False
1489 1491 return dataOut
1490 1492
1491 1493
1492 1494
1493 1495 # import collections
1494 1496 # from scipy.stats import mode
1495 1497 #
1496 1498 # class Synchronize(Operation):
1497 1499 #
1498 1500 # isConfig = False
1499 1501 # __profIndex = 0
1500 1502 #
1501 1503 # def __init__(self, **kwargs):
1502 1504 #
1503 1505 # Operation.__init__(self, **kwargs)
1504 1506 # # self.isConfig = False
1505 1507 # self.__powBuffer = None
1506 1508 # self.__startIndex = 0
1507 1509 # self.__pulseFound = False
1508 1510 #
1509 1511 # def __findTxPulse(self, dataOut, channel=0, pulse_with = None):
1510 1512 #
1511 1513 # #Read data
1512 1514 #
1513 1515 # powerdB = dataOut.getPower(channel = channel)
1514 1516 # noisedB = dataOut.getNoise(channel = channel)[0]
1515 1517 #
1516 1518 # self.__powBuffer.extend(powerdB.flatten())
1517 1519 #
1518 1520 # dataArray = numpy.array(self.__powBuffer)
1519 1521 #
1520 1522 # filteredPower = numpy.correlate(dataArray, dataArray[0:self.__nSamples], "same")
1521 1523 #
1522 1524 # maxValue = numpy.nanmax(filteredPower)
1523 1525 #
1524 1526 # if maxValue < noisedB + 10:
1525 1527 # #No se encuentra ningun pulso de transmision
1526 1528 # return None
1527 1529 #
1528 1530 # maxValuesIndex = numpy.where(filteredPower > maxValue - 0.1*abs(maxValue))[0]
1529 1531 #
1530 1532 # if len(maxValuesIndex) < 2:
1531 1533 # #Solo se encontro un solo pulso de transmision de un baudio, esperando por el siguiente TX
1532 1534 # return None
1533 1535 #
1534 1536 # phasedMaxValuesIndex = maxValuesIndex - self.__nSamples
1535 1537 #
1536 1538 # #Seleccionar solo valores con un espaciamiento de nSamples
1537 1539 # pulseIndex = numpy.intersect1d(maxValuesIndex, phasedMaxValuesIndex)
1538 1540 #
1539 1541 # if len(pulseIndex) < 2:
1540 1542 # #Solo se encontro un pulso de transmision con ancho mayor a 1
1541 1543 # return None
1542 1544 #
1543 1545 # spacing = pulseIndex[1:] - pulseIndex[:-1]
1544 1546 #
1545 1547 # #remover senales que se distancien menos de 10 unidades o muestras
1546 1548 # #(No deberian existir IPP menor a 10 unidades)
1547 1549 #
1548 1550 # realIndex = numpy.where(spacing > 10 )[0]
1549 1551 #
1550 1552 # if len(realIndex) < 2:
1551 1553 # #Solo se encontro un pulso de transmision con ancho mayor a 1
1552 1554 # return None
1553 1555 #
1554 1556 # #Eliminar pulsos anchos (deja solo la diferencia entre IPPs)
1555 1557 # realPulseIndex = pulseIndex[realIndex]
1556 1558 #
1557 1559 # period = mode(realPulseIndex[1:] - realPulseIndex[:-1])[0][0]
1558 1560 #
1559 1561 # print "IPP = %d samples" %period
1560 1562 #
1561 1563 # self.__newNSamples = dataOut.nHeights #int(period)
1562 1564 # self.__startIndex = int(realPulseIndex[0])
1563 1565 #
1564 1566 # return 1
1565 1567 #
1566 1568 #
1567 1569 # def setup(self, nSamples, nChannels, buffer_size = 4):
1568 1570 #
1569 1571 # self.__powBuffer = collections.deque(numpy.zeros( buffer_size*nSamples,dtype=numpy.float),
1570 1572 # maxlen = buffer_size*nSamples)
1571 1573 #
1572 1574 # bufferList = []
1573 1575 #
1574 1576 # for i in range(nChannels):
1575 1577 # bufferByChannel = collections.deque(numpy.zeros( buffer_size*nSamples, dtype=numpy.complex) + numpy.NAN,
1576 1578 # maxlen = buffer_size*nSamples)
1577 1579 #
1578 1580 # bufferList.append(bufferByChannel)
1579 1581 #
1580 1582 # self.__nSamples = nSamples
1581 1583 # self.__nChannels = nChannels
1582 1584 # self.__bufferList = bufferList
1583 1585 #
1584 1586 # def run(self, dataOut, channel = 0):
1585 1587 #
1586 1588 # if not self.isConfig:
1587 1589 # nSamples = dataOut.nHeights
1588 1590 # nChannels = dataOut.nChannels
1589 1591 # self.setup(nSamples, nChannels)
1590 1592 # self.isConfig = True
1591 1593 #
1592 1594 # #Append new data to internal buffer
1593 1595 # for thisChannel in range(self.__nChannels):
1594 1596 # bufferByChannel = self.__bufferList[thisChannel]
1595 1597 # bufferByChannel.extend(dataOut.data[thisChannel])
1596 1598 #
1597 1599 # if self.__pulseFound:
1598 1600 # self.__startIndex -= self.__nSamples
1599 1601 #
1600 1602 # #Finding Tx Pulse
1601 1603 # if not self.__pulseFound:
1602 1604 # indexFound = self.__findTxPulse(dataOut, channel)
1603 1605 #
1604 1606 # if indexFound == None:
1605 1607 # dataOut.flagNoData = True
1606 1608 # return
1607 1609 #
1608 1610 # self.__arrayBuffer = numpy.zeros((self.__nChannels, self.__newNSamples), dtype = numpy.complex)
1609 1611 # self.__pulseFound = True
1610 1612 # self.__startIndex = indexFound
1611 1613 #
1612 1614 # #If pulse was found ...
1613 1615 # for thisChannel in range(self.__nChannels):
1614 1616 # bufferByChannel = self.__bufferList[thisChannel]
1615 1617 # #print self.__startIndex
1616 1618 # x = numpy.array(bufferByChannel)
1617 1619 # self.__arrayBuffer[thisChannel] = x[self.__startIndex:self.__startIndex+self.__newNSamples]
1618 1620 #
1619 1621 # deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1620 1622 # dataOut.heightList = numpy.arange(self.__newNSamples)*deltaHeight
1621 1623 # # dataOut.ippSeconds = (self.__newNSamples / deltaHeight)/1e6
1622 1624 #
1623 1625 # dataOut.data = self.__arrayBuffer
1624 1626 #
1625 1627 # self.__startIndex += self.__newNSamples
1626 1628 #
1627 1629 # return
General Comments 0
You need to be logged in to leave comments. Login now