##// END OF EJS Templates
-Se ha cambiado la forma de calcular los segundos en tiempo local con el parametro Timezone tomado del archivo leido....
Miguel Valdez -
r344:ff2597a84a69
parent child
Show More
@@ -1,552 +1,574
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JROData.py 173 2012-11-20 15:06:21Z murco $
5 5 '''
6 6
7 7 import os, sys
8 8 import copy
9 9 import numpy
10 10 import datetime
11 11
12 12 from jroheaderIO import SystemHeader, RadarControllerHeader
13 13
14 14 def hildebrand_sekhon(data, navg):
15 15 """
16 16 This method is for the objective determination of de noise level in Doppler spectra. This
17 17 implementation technique is based on the fact that the standard deviation of the spectral
18 18 densities is equal to the mean spectral density for white Gaussian noise
19 19
20 20 Inputs:
21 21 Data : heights
22 22 navg : numbers of averages
23 23
24 24 Return:
25 25 -1 : any error
26 26 anoise : noise's level
27 27 """
28 28
29 29 dataflat = data.copy().reshape(-1)
30 30 dataflat.sort()
31 31 npts = dataflat.size #numbers of points of the data
32 32 npts_noise = 0.2*npts
33 33
34 34 if npts < 32:
35 35 print "error in noise - requires at least 32 points"
36 36 return -1.0
37 37
38 38 dataflat2 = numpy.power(dataflat,2)
39 39
40 40 cs = numpy.cumsum(dataflat)
41 41 cs2 = numpy.cumsum(dataflat2)
42 42
43 43 # data sorted in ascending order
44 44 nmin = int((npts + 7.)/8)
45 45
46 46 for i in range(nmin, npts):
47 47 s = cs[i]
48 48 s2 = cs2[i]
49 49 p = s / float(i);
50 50 p2 = p**2;
51 51 q = s2 / float(i) - p2;
52 52 leftc = p2;
53 53 rightc = q * float(navg);
54 54 R2 = leftc/rightc
55 55
56 56 # Signal detect: R2 < 1 (R2 = leftc/rightc)
57 57 if R2 < 1:
58 58 npts_noise = i
59 59 break
60 60
61 61
62 62 anoise = numpy.average(dataflat[0:npts_noise])
63 63
64 64 return anoise;
65 65
66 66 def sorting_bruce(data, navg):
67 67
68 68 data = data.copy()
69 69
70 70 sortdata = numpy.sort(data)
71 71 lenOfData = len(data)
72 72 nums_min = lenOfData/10
73 73
74 74 if (lenOfData/10) > 0:
75 75 nums_min = lenOfData/10
76 76 else:
77 77 nums_min = 0
78 78
79 79 rtest = 1.0 + 1.0/navg
80 80
81 81 sum = 0.
82 82
83 83 sumq = 0.
84 84
85 85 j = 0
86 86
87 87 cont = 1
88 88
89 89 while((cont==1)and(j<lenOfData)):
90 90
91 91 sum += sortdata[j]
92 92
93 93 sumq += sortdata[j]**2
94 94
95 95 j += 1
96 96
97 97 if j > nums_min:
98 98 if ((sumq*j) <= (rtest*sum**2)):
99 99 lnoise = sum / j
100 100 else:
101 101 j = j - 1
102 102 sum = sum - sordata[j]
103 103 sumq = sumq - sordata[j]**2
104 104 cont = 0
105 105
106 106 if j == nums_min:
107 107 lnoise = sum /j
108 108
109 109 return lnoise
110 110
111 111 class JROData:
112 112
113 113 # m_BasicHeader = BasicHeader()
114 114 # m_ProcessingHeader = ProcessingHeader()
115 115
116 116 systemHeaderObj = SystemHeader()
117 117
118 118 radarControllerHeaderObj = RadarControllerHeader()
119 119
120 120 # data = None
121 121
122 122 type = None
123 123
124 124 dtype = None
125 125
126 126 # nChannels = None
127 127
128 128 # nHeights = None
129 129
130 130 nProfiles = None
131 131
132 132 heightList = None
133 133
134 134 channelList = None
135 135
136 136 flagNoData = True
137 137
138 138 flagTimeBlock = False
139 139
140 useLocalTime = False
141
140 142 utctime = None
141 143
144 timeZone = None
145
146 dstFlag = None
147
148 errorCount = None
149
142 150 blocksize = None
143 151
144 152 nCode = None
145 153
146 154 nBaud = None
147 155
148 156 code = None
149 157
150 158 flagDecodeData = False #asumo q la data no esta decodificada
151 159
152 160 flagDeflipData = False #asumo q la data no esta sin flip
153 161
154 162 flagShiftFFT = False
155 163
156 164 ippSeconds = None
157 165
158 166 timeInterval = None
159 167
160 168 nCohInt = None
161 169
162 170 noise = None
163 171
164 172 windowOfFilter = 1
165 173
166 174 #Speed of ligth
167 175 C = 3e8
168 176
169 177 frequency = 49.92e6
170 178
171 179 def __init__(self):
172 180
173 181 raise ValueError, "This class has not been implemented"
174 182
175 183 def copy(self, inputObj=None):
176 184
177 185 if inputObj == None:
178 186 return copy.deepcopy(self)
179 187
180 188 for key in inputObj.__dict__.keys():
181 189 self.__dict__[key] = inputObj.__dict__[key]
182 190
183 191 def deepcopy(self):
184 192
185 193 return copy.deepcopy(self)
186 194
187 195 def isEmpty(self):
188 196
189 197 return self.flagNoData
190 198
191 199 def getNoise(self):
192 200
193 201 raise ValueError, "Not implemented"
194 202
195 203 def getNChannels(self):
196 204
197 205 return len(self.channelList)
198 206
199 207 def getChannelIndexList(self):
200 208
201 209 return range(self.nChannels)
202 210
203 211 def getNHeights(self):
204 212
205 213 return len(self.heightList)
206 214
207 215 def getHeiRange(self, extrapoints=0):
208 216
209 217 heis = self.heightList
210 218 # deltah = self.heightList[1] - self.heightList[0]
211 219 #
212 220 # heis.append(self.heightList[-1])
213 221
214 222 return heis
215 223
224 def getltctime(self):
225
226 if self.useLocalTime:
227 return self.utctime - self.timeZone*60
228
229 return self.utctime
230
216 231 def getDatatime(self):
217 232
218 datatime = datetime.datetime.utcfromtimestamp(self.utctime)
233 datatime = datetime.datetime.utcfromtimestamp(self.ltctime)
219 234 return datatime
220 235
221 236 def getTimeRange(self):
222 237
223 238 datatime = []
224 239
225 datatime.append(self.utctime)
226 datatime.append(self.utctime + self.timeInterval)
240 datatime.append(self.ltctime)
241 datatime.append(self.ltctime + self.timeInterval)
227 242
228 243 datatime = numpy.array(datatime)
229 244
230 245 return datatime
231 246
232 247 def getFmax(self):
233 248
234 249 PRF = 1./(self.ippSeconds * self.nCohInt)
235 250
236 251 fmax = PRF/2.
237 252
238 253 return fmax
239 254
240 255 def getVmax(self):
241 256
242 257 _lambda = self.C/self.frequency
243 258
244 259 vmax = self.getFmax() * _lambda
245 260
246 261 return vmax
247 262
248 263 nChannels = property(getNChannels, "I'm the 'nChannel' property.")
249 264 channelIndexList = property(getChannelIndexList, "I'm the 'channelIndexList' property.")
250 265 nHeights = property(getNHeights, "I'm the 'nHeights' property.")
251 266 noise = property(getNoise, "I'm the 'nHeights' property.")
252 267 datatime = property(getDatatime, "I'm the 'datatime' property")
268 ltctime = property(getltctime, "I'm the 'ltctime' property")
253 269
254 270 class Voltage(JROData):
255 271
256 272 #data es un numpy array de 2 dmensiones (canales, alturas)
257 273 data = None
258 274
259 275 def __init__(self):
260 276 '''
261 277 Constructor
262 278 '''
263 279
264 280 self.radarControllerHeaderObj = RadarControllerHeader()
265 281
266 282 self.systemHeaderObj = SystemHeader()
267 283
268 284 self.type = "Voltage"
269 285
270 286 self.data = None
271 287
272 288 self.dtype = None
273 289
274 290 # self.nChannels = 0
275 291
276 292 # self.nHeights = 0
277 293
278 294 self.nProfiles = None
279 295
280 296 self.heightList = None
281 297
282 298 self.channelList = None
283 299
284 300 # self.channelIndexList = None
285 301
286 302 self.flagNoData = True
287 303
288 304 self.flagTimeBlock = False
289 305
290 306 self.utctime = None
291 307
308 self.timeZone = None
309
310 self.dstFlag = None
311
312 self.errorCount = None
313
292 314 self.nCohInt = None
293 315
294 316 self.blocksize = None
295 317
296 318 self.flagDecodeData = False #asumo q la data no esta decodificada
297 319
298 320 self.flagDeflipData = False #asumo q la data no esta sin flip
299 321
300 322 self.flagShiftFFT = False
301 323
302 324
303 325 def getNoisebyHildebrand(self):
304 326 """
305 327 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
306 328
307 329 Return:
308 330 noiselevel
309 331 """
310 332
311 333 for channel in range(self.nChannels):
312 334 daux = self.data_spc[channel,:,:]
313 335 self.noise[channel] = hildebrand_sekhon(daux, self.nCohInt)
314 336
315 337 return self.noise
316 338
317 339 def getNoise(self, type = 1):
318 340
319 341 self.noise = numpy.zeros(self.nChannels)
320 342
321 343 if type == 1:
322 344 noise = self.getNoisebyHildebrand()
323 345
324 346 return 10*numpy.log10(noise)
325 347
326 348 class Spectra(JROData):
327 349
328 350 #data es un numpy array de 2 dmensiones (canales, perfiles, alturas)
329 351 data_spc = None
330 352
331 353 #data es un numpy array de 2 dmensiones (canales, pares, alturas)
332 354 data_cspc = None
333 355
334 356 #data es un numpy array de 2 dmensiones (canales, alturas)
335 357 data_dc = None
336 358
337 359 nFFTPoints = None
338 360
339 361 nPairs = None
340 362
341 363 pairsList = None
342 364
343 365 nIncohInt = None
344 366
345 367 wavelength = None #Necesario para cacular el rango de velocidad desde la frecuencia
346 368
347 369 nCohInt = None #se requiere para determinar el valor de timeInterval
348 370
349 371 def __init__(self):
350 372 '''
351 373 Constructor
352 374 '''
353 375
354 376 self.radarControllerHeaderObj = RadarControllerHeader()
355 377
356 378 self.systemHeaderObj = SystemHeader()
357 379
358 380 self.type = "Spectra"
359 381
360 382 # self.data = None
361 383
362 384 self.dtype = None
363 385
364 386 # self.nChannels = 0
365 387
366 388 # self.nHeights = 0
367 389
368 390 self.nProfiles = None
369 391
370 392 self.heightList = None
371 393
372 394 self.channelList = None
373 395
374 396 # self.channelIndexList = None
375 397
376 398 self.flagNoData = True
377 399
378 400 self.flagTimeBlock = False
379 401
380 402 self.utctime = None
381 403
382 404 self.nCohInt = None
383 405
384 406 self.nIncohInt = None
385 407
386 408 self.blocksize = None
387 409
388 410 self.nFFTPoints = None
389 411
390 412 self.wavelength = None
391 413
392 414 self.flagDecodeData = False #asumo q la data no esta decodificada
393 415
394 416 self.flagDeflipData = False #asumo q la data no esta sin flip
395 417
396 418 self.flagShiftFFT = False
397 419
398 420 def getNoisebyHildebrand(self):
399 421 """
400 422 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
401 423
402 424 Return:
403 425 noiselevel
404 426 """
405 427
406 428 for channel in range(self.nChannels):
407 429 daux = self.data_spc[channel,:,:]
408 430 self.noise[channel] = hildebrand_sekhon(daux, self.nIncohInt)
409 431
410 432 return self.noise
411 433
412 434 def getNoisebyWindow(self, heiIndexMin=0, heiIndexMax=-1, freqIndexMin=0, freqIndexMax=-1):
413 435 """
414 436 Determina el ruido del canal utilizando la ventana indicada con las coordenadas:
415 437 (heiIndexMIn, freqIndexMin) hasta (heiIndexMax, freqIndexMAx)
416 438
417 439 Inputs:
418 440 heiIndexMin: Limite inferior del eje de alturas
419 441 heiIndexMax: Limite superior del eje de alturas
420 442 freqIndexMin: Limite inferior del eje de frecuencia
421 443 freqIndexMax: Limite supoerior del eje de frecuencia
422 444 """
423 445
424 446 data = self.data_spc[:, heiIndexMin:heiIndexMax, freqIndexMin:freqIndexMax]
425 447
426 448 for channel in range(self.nChannels):
427 449 daux = data[channel,:,:]
428 450 self.noise[channel] = numpy.average(daux)
429 451
430 452 return self.noise
431 453
432 454 def getNoisebySort(self):
433 455
434 456 for channel in range(self.nChannels):
435 457 daux = self.data_spc[channel,:,:]
436 458 self.noise[channel] = sorting_bruce(daux, self.nIncohInt)
437 459
438 460 return self.noise
439 461
440 462 def getNoise(self, type = 1):
441 463
442 464 self.noise = numpy.zeros(self.nChannels)
443 465
444 466 if type == 1:
445 467 noise = self.getNoisebyHildebrand()
446 468
447 469 if type == 2:
448 470 noise = self.getNoisebySort()
449 471
450 472 if type == 3:
451 473 noise = self.getNoisebyWindow()
452 474
453 475 return noise
454 476
455 477
456 478 def getFreqRange(self, extrapoints=0):
457 479
458 480 deltafreq = self.getFmax() / self.nFFTPoints
459 481 freqrange = deltafreq*(numpy.arange(self.nFFTPoints+extrapoints)-self.nFFTPoints/2.) - deltafreq/2
460 482
461 483 return freqrange
462 484
463 485 def getVelRange(self, extrapoints=0):
464 486
465 487 deltav = self.getVmax() / self.nFFTPoints
466 488 velrange = deltav*(numpy.arange(self.nFFTPoints+extrapoints)-self.nFFTPoints/2.) - deltav/2
467 489
468 490 return velrange
469 491
470 492 def getNPairs(self):
471 493
472 494 return len(self.pairsList)
473 495
474 496 def getPairsIndexList(self):
475 497
476 498 return range(self.nPairs)
477 499
478 500 def getNormFactor(self):
479 501 pwcode = 1
480 502 if self.flagDecodeData:
481 503 pwcode = numpy.sum(self.code[0]**2)
482 504 normFactor = min(self.nFFTPoints,self.nProfiles)*self.nIncohInt*self.nCohInt*pwcode
483 505
484 506 return normFactor
485 507
486 508 def getFlagCspc(self):
487 509
488 510 if self.data_cspc == None:
489 511 return True
490 512
491 513 return False
492 514
493 515 def getFlagDc(self):
494 516
495 517 if self.data_dc == None:
496 518 return True
497 519
498 520 return False
499 521
500 522 nPairs = property(getNPairs, "I'm the 'nPairs' property.")
501 523 pairsIndexList = property(getPairsIndexList, "I'm the 'pairsIndexList' property.")
502 524 normFactor = property(getNormFactor, "I'm the 'getNormFactor' property.")
503 525 flag_cspc = property(getFlagCspc)
504 526 flag_dc = property(getFlagDc)
505 527
506 528 class SpectraHeis(JROData):
507 529
508 530 data_spc = None
509 531
510 532 data_cspc = None
511 533
512 534 data_dc = None
513 535
514 536 nFFTPoints = None
515 537
516 538 nPairs = None
517 539
518 540 pairsList = None
519 541
520 542 nIncohInt = None
521 543
522 544 def __init__(self):
523 545
524 546 self.radarControllerHeaderObj = RadarControllerHeader()
525 547
526 548 self.systemHeaderObj = SystemHeader()
527 549
528 550 self.type = "SpectraHeis"
529 551
530 552 self.dtype = None
531 553
532 554 # self.nChannels = 0
533 555
534 556 # self.nHeights = 0
535 557
536 558 self.nProfiles = None
537 559
538 560 self.heightList = None
539 561
540 562 self.channelList = None
541 563
542 564 # self.channelIndexList = None
543 565
544 566 self.flagNoData = True
545 567
546 568 self.flagTimeBlock = False
547 569
548 570 self.nPairs = 0
549 571
550 572 self.utctime = None
551 573
552 574 self.blocksize = None
@@ -1,2729 +1,2737
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JRODataIO.py 169 2012-11-19 21:57:03Z murco $
5 5 '''
6 6
7 7 import os, sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import time, datetime
13 import pyfits
13 try:
14 import pyfits
15 except:
16 print "pyfits module has not been imported, it should be installed to save files in fits format"
14 17
15 18 from jrodata import *
16 19 from jroheaderIO import *
17 20 from jroprocessing import *
18 21
19 LOCALTIME = -18000
22 LOCALTIME = True #-18000
20 23
21 24 def isNumber(str):
22 25 """
23 26 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
24 27
25 28 Excepciones:
26 29 Si un determinado string no puede ser convertido a numero
27 30 Input:
28 31 str, string al cual se le analiza para determinar si convertible a un numero o no
29 32
30 33 Return:
31 34 True : si el string es uno numerico
32 35 False : no es un string numerico
33 36 """
34 37 try:
35 38 float( str )
36 39 return True
37 40 except:
38 41 return False
39 42
40 43 def isThisFileinRange(filename, startUTSeconds, endUTSeconds):
41 44 """
42 45 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
43 46
44 47 Inputs:
45 48 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
46 49
47 50 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
48 51 segundos contados desde 01/01/1970.
49 52 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
50 53 segundos contados desde 01/01/1970.
51 54
52 55 Return:
53 56 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
54 57 fecha especificado, de lo contrario retorna False.
55 58
56 59 Excepciones:
57 60 Si el archivo no existe o no puede ser abierto
58 61 Si la cabecera no puede ser leida.
59 62
60 63 """
61 64 basicHeaderObj = BasicHeader(LOCALTIME)
62 65
63 66 try:
64 67 fp = open(filename,'rb')
65 68 except:
66 69 raise IOError, "The file %s can't be opened" %(filename)
67 70
68 71 sts = basicHeaderObj.read(fp)
69 72 fp.close()
70 73
71 74 if not(sts):
72 75 print "Skipping the file %s because it has not a valid header" %(filename)
73 76 return 0
74 77
75 78 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
76 79 return 0
77 80
78 81 return 1
79 82
80 83 def isFileinThisTime(filename, startTime, endTime):
81 84 """
82 85 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
83 86
84 87 Inputs:
85 88 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
86 89
87 90 startTime : tiempo inicial del rango seleccionado en formato datetime.time
88 91
89 92 endTime : tiempo final del rango seleccionado en formato datetime.time
90 93
91 94 Return:
92 95 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
93 96 fecha especificado, de lo contrario retorna False.
94 97
95 98 Excepciones:
96 99 Si el archivo no existe o no puede ser abierto
97 100 Si la cabecera no puede ser leida.
98 101
99 102 """
100 103
101 104
102 105 try:
103 106 fp = open(filename,'rb')
104 107 except:
105 108 raise IOError, "The file %s can't be opened" %(filename)
106 109
107 110 basicHeaderObj = BasicHeader(LOCALTIME)
108 111 sts = basicHeaderObj.read(fp)
109 112 fp.close()
110 113
111 114 thisDatetime = basicHeaderObj.datatime
112 115 thisTime = basicHeaderObj.datatime.time()
113 116
114 117 if not(sts):
115 118 print "Skipping the file %s because it has not a valid header" %(filename)
116 119 return None
117 120
118 121 if not ((startTime <= thisTime) and (endTime > thisTime)):
119 122 return None
120 123
121 124 return thisDatetime
122 125
123 126 def getlastFileFromPath(path, ext):
124 127 """
125 128 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
126 129 al final de la depuracion devuelve el ultimo file de la lista que quedo.
127 130
128 131 Input:
129 132 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
130 133 ext : extension de los files contenidos en una carpeta
131 134
132 135 Return:
133 136 El ultimo file de una determinada carpeta, no se considera el path.
134 137 """
135 138 validFilelist = []
136 139 fileList = os.listdir(path)
137 140
138 141 # 0 1234 567 89A BCDE
139 142 # H YYYY DDD SSS .ext
140 143
141 144 for file in fileList:
142 145 try:
143 146 year = int(file[1:5])
144 147 doy = int(file[5:8])
145 148
146 149
147 150 except:
148 151 continue
149 152
150 153 if (os.path.splitext(file)[-1].lower() != ext.lower()):
151 154 continue
152 155
153 156 validFilelist.append(file)
154 157
155 158 if validFilelist:
156 159 validFilelist = sorted( validFilelist, key=str.lower )
157 160 return validFilelist[-1]
158 161
159 162 return None
160 163
161 164 def checkForRealPath(path, foldercounter, year, doy, set, ext):
162 165 """
163 166 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
164 167 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
165 168 el path exacto de un determinado file.
166 169
167 170 Example :
168 171 nombre correcto del file es .../.../D2009307/P2009307367.ext
169 172
170 173 Entonces la funcion prueba con las siguientes combinaciones
171 174 .../.../y2009307367.ext
172 175 .../.../Y2009307367.ext
173 176 .../.../x2009307/y2009307367.ext
174 177 .../.../x2009307/Y2009307367.ext
175 178 .../.../X2009307/y2009307367.ext
176 179 .../.../X2009307/Y2009307367.ext
177 180 siendo para este caso, la ultima combinacion de letras, identica al file buscado
178 181
179 182 Return:
180 183 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
181 184 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
182 185 para el filename
183 186 """
184 187 fullfilename = None
185 188 find_flag = False
186 189 filename = None
187 190
188 191 prefixDirList = [None,'d','D']
189 192 if ext.lower() == ".r": #voltage
190 193 prefixFileList = ['d','D']
191 194 elif ext.lower() == ".pdata": #spectra
192 195 prefixFileList = ['p','P']
193 196 else:
194 197 return None, filename
195 198
196 199 #barrido por las combinaciones posibles
197 200 for prefixDir in prefixDirList:
198 201 thispath = path
199 202 if prefixDir != None:
200 203 #formo el nombre del directorio xYYYYDDD (x=d o x=D)
201 204 if foldercounter == 0:
202 205 thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy ))
203 206 else:
204 207 thispath = os.path.join(path, "%s%04d%03d_%02d" % ( prefixDir, year, doy , foldercounter))
205 208 for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D"
206 209 filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
207 210 fullfilename = os.path.join( thispath, filename ) #formo el path completo
208 211
209 212 if os.path.exists( fullfilename ): #verifico que exista
210 213 find_flag = True
211 214 break
212 215 if find_flag:
213 216 break
214 217
215 218 if not(find_flag):
216 219 return None, filename
217 220
218 221 return fullfilename, filename
219 222
220 223 def isDoyFolder(folder):
221 224 try:
222 225 year = int(folder[1:5])
223 226 except:
224 227 return 0
225 228
226 229 try:
227 230 doy = int(folder[5:8])
228 231 except:
229 232 return 0
230 233
231 234 return 1
232 235
233 236 class JRODataIO:
234 237
235 238 c = 3E8
236 239
237 240 isConfig = False
238 241
239 242 basicHeaderObj = BasicHeader(LOCALTIME)
240 243
241 244 systemHeaderObj = SystemHeader()
242 245
243 246 radarControllerHeaderObj = RadarControllerHeader()
244 247
245 248 processingHeaderObj = ProcessingHeader()
246 249
247 250 online = 0
248 251
249 252 dtype = None
250 253
251 254 pathList = []
252 255
253 256 filenameList = []
254 257
255 258 filename = None
256 259
257 260 ext = None
258 261
259 262 flagIsNewFile = 1
260 263
261 264 flagTimeBlock = 0
262 265
263 266 flagIsNewBlock = 0
264 267
265 268 fp = None
266 269
267 270 firstHeaderSize = 0
268 271
269 272 basicHeaderSize = 24
270 273
271 274 versionFile = 1103
272 275
273 276 fileSize = None
274 277
275 278 ippSeconds = None
276 279
277 280 fileSizeByHeader = None
278 281
279 282 fileIndex = None
280 283
281 284 profileIndex = None
282 285
283 286 blockIndex = None
284 287
285 288 nTotalBlocks = None
286 289
287 290 maxTimeStep = 30
288 291
289 292 lastUTTime = None
290 293
291 294 datablock = None
292 295
293 296 dataOut = None
294 297
295 298 blocksize = None
296 299
297 300 def __init__(self):
298 301
299 302 raise ValueError, "Not implemented"
300 303
301 304 def run(self):
302 305
303 306 raise ValueError, "Not implemented"
304 307
305 308 def getOutput(self):
306 309
307 310 return self.dataOut
308 311
309 312 class JRODataReader(JRODataIO, ProcessingUnit):
310 313
311 314 nReadBlocks = 0
312 315
313 316 delay = 10 #number of seconds waiting a new file
314 317
315 318 nTries = 3 #quantity tries
316 319
317 320 nFiles = 3 #number of files for searching
318 321
319 322 path = None
320 323
321 324 foldercounter = 0
322 325
323 326 flagNoMoreFiles = 0
324 327
325 328 datetimeList = []
326 329
327 330 __isFirstTimeOnline = 1
328 331
329 332 __printInfo = True
330 333
334 profileIndex = None
335
331 336 def __init__(self):
332 337
333 338 """
334 339
335 340 """
336 341
337 342 raise ValueError, "This method has not been implemented"
338 343
339 344
340 345 def createObjByDefault(self):
341 346 """
342 347
343 348 """
344 349 raise ValueError, "This method has not been implemented"
345 350
346 351 def getBlockDimension(self):
347 352
348 353 raise ValueError, "No implemented"
349 354
350 355 def __searchFilesOffLine(self,
351 356 path,
352 357 startDate,
353 358 endDate,
354 359 startTime=datetime.time(0,0,0),
355 360 endTime=datetime.time(23,59,59),
356 361 set=None,
357 362 expLabel='',
358 363 ext='.r',
359 364 walk=True):
360 365
361 366 pathList = []
362 367
363 368 if not walk:
364 369 pathList.append(path)
365 370
366 371 else:
367 372 dirList = []
368 373 for thisPath in os.listdir(path):
369 374 if not os.path.isdir(os.path.join(path,thisPath)):
370 375 continue
371 376 if not isDoyFolder(thisPath):
372 377 continue
373 378
374 379 dirList.append(thisPath)
375 380
376 381 if not(dirList):
377 382 return None, None
378 383
379 384 thisDate = startDate
380 385
381 386 while(thisDate <= endDate):
382 387 year = thisDate.timetuple().tm_year
383 388 doy = thisDate.timetuple().tm_yday
384 389
385 390 matchlist = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy) + '*')
386 391 if len(matchlist) == 0:
387 392 thisDate += datetime.timedelta(1)
388 393 continue
389 394 for match in matchlist:
390 395 pathList.append(os.path.join(path,match,expLabel))
391 396
392 397 thisDate += datetime.timedelta(1)
393 398
394 399 if pathList == []:
395 400 print "Any folder was found for the date range: %s-%s" %(startDate, endDate)
396 401 return None, None
397 402
398 403 print "%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate)
399 404
400 405 filenameList = []
401 406 datetimeList = []
402 407
403 408 for i in range(len(pathList)):
404 409
405 410 thisPath = pathList[i]
406 411
407 412 fileList = glob.glob1(thisPath, "*%s" %ext)
408 413 fileList.sort()
409 414
410 415 for file in fileList:
411 416
412 417 filename = os.path.join(thisPath,file)
413 418 thisDatetime = isFileinThisTime(filename, startTime, endTime)
414 419
415 420 if not(thisDatetime):
416 421 continue
417 422
418 423 filenameList.append(filename)
419 424 datetimeList.append(thisDatetime)
420 425
421 426 if not(filenameList):
422 427 print "Any file was found for the time range %s - %s" %(startTime, endTime)
423 428 return None, None
424 429
425 430 print "%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime)
426 431 print
427 432
428 433 for i in range(len(filenameList)):
429 434 print "%s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
430 435
431 436 self.filenameList = filenameList
432 437 self.datetimeList = datetimeList
433 438
434 439 return pathList, filenameList
435 440
436 441 def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True):
437 442
438 443 """
439 444 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
440 445 devuelve el archivo encontrado ademas de otros datos.
441 446
442 447 Input:
443 448 path : carpeta donde estan contenidos los files que contiene data
444 449
445 450 expLabel : Nombre del subexperimento (subfolder)
446 451
447 452 ext : extension de los files
448 453
449 454 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
450 455
451 456 Return:
452 457 directory : eL directorio donde esta el file encontrado
453 458 filename : el ultimo file de una determinada carpeta
454 459 year : el anho
455 460 doy : el numero de dia del anho
456 461 set : el set del archivo
457 462
458 463
459 464 """
460 465 dirList = []
461 466
462 467 if not walk:
463 468 fullpath = path
464 469
465 470 else:
466 471 #Filtra solo los directorios
467 472 for thisPath in os.listdir(path):
468 473 if not os.path.isdir(os.path.join(path,thisPath)):
469 474 continue
470 475 if not isDoyFolder(thisPath):
471 476 continue
472 477
473 478 dirList.append(thisPath)
474 479
475 480 if not(dirList):
476 481 return None, None, None, None, None
477 482
478 483 dirList = sorted( dirList, key=str.lower )
479 484
480 485 doypath = dirList[-1]
481 486 foldercounter = int(doypath.split('_')[1]) if len(doypath.split('_'))>1 else 0
482 487 fullpath = os.path.join(path, doypath, expLabel)
483 488
484 489
485 490 print "%s folder was found: " %(fullpath )
486 491
487 492 filename = getlastFileFromPath(fullpath, ext)
488 493
489 494 if not(filename):
490 495 return None, None, None, None, None
491 496
492 497 print "%s file was found" %(filename)
493 498
494 499 if not(self.__verifyFile(os.path.join(fullpath, filename))):
495 500 return None, None, None, None, None
496 501
497 502 year = int( filename[1:5] )
498 503 doy = int( filename[5:8] )
499 504 set = int( filename[8:11] )
500 505
501 506 return fullpath, foldercounter, filename, year, doy, set
502 507
503 508 def __setNextFileOffline(self):
504 509
505 510 idFile = self.fileIndex
506 511
507 512 while (True):
508 513 idFile += 1
509 514 if not(idFile < len(self.filenameList)):
510 515 self.flagNoMoreFiles = 1
511 516 print "No more Files"
512 517 return 0
513 518
514 519 filename = self.filenameList[idFile]
515 520
516 521 if not(self.__verifyFile(filename)):
517 522 continue
518 523
519 524 fileSize = os.path.getsize(filename)
520 525 fp = open(filename,'rb')
521 526 break
522 527
523 528 self.flagIsNewFile = 1
524 529 self.fileIndex = idFile
525 530 self.filename = filename
526 531 self.fileSize = fileSize
527 532 self.fp = fp
528 533
529 534 print "Setting the file: %s"%self.filename
530 535
531 536 return 1
532 537
533 538 def __setNextFileOnline(self):
534 539 """
535 540 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
536 541 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
537 542 siguientes.
538 543
539 544 Affected:
540 545 self.flagIsNewFile
541 546 self.filename
542 547 self.fileSize
543 548 self.fp
544 549 self.set
545 550 self.flagNoMoreFiles
546 551
547 552 Return:
548 553 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
549 554 1 : si el file fue abierto con exito y esta listo a ser leido
550 555
551 556 Excepciones:
552 557 Si un determinado file no puede ser abierto
553 558 """
554 559 nFiles = 0
555 560 fileOk_flag = False
556 561 firstTime_flag = True
557 562
558 563 self.set += 1
559 564
560 565 if self.set > 999:
561 566 self.set = 0
562 567 self.foldercounter += 1
563 568
564 569 #busca el 1er file disponible
565 570 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
566 571 if fullfilename:
567 572 if self.__verifyFile(fullfilename, False):
568 573 fileOk_flag = True
569 574
570 575 #si no encuentra un file entonces espera y vuelve a buscar
571 576 if not(fileOk_flag):
572 577 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
573 578
574 579 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
575 580 tries = self.nTries
576 581 else:
577 582 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
578 583
579 584 for nTries in range( tries ):
580 585 if firstTime_flag:
581 586 print "\tWaiting %0.2f sec for the file \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
582 587 time.sleep( self.delay )
583 588 else:
584 589 print "\tSearching next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
585 590
586 591 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
587 592 if fullfilename:
588 593 if self.__verifyFile(fullfilename):
589 594 fileOk_flag = True
590 595 break
591 596
592 597 if fileOk_flag:
593 598 break
594 599
595 600 firstTime_flag = False
596 601
597 602 print "\tSkipping the file \"%s\" due to this file doesn't exist" % filename
598 603 self.set += 1
599 604
600 605 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
601 606 self.set = 0
602 607 self.doy += 1
603 608 self.foldercounter = 0
604 609
605 610 if fileOk_flag:
606 611 self.fileSize = os.path.getsize( fullfilename )
607 612 self.filename = fullfilename
608 613 self.flagIsNewFile = 1
609 614 if self.fp != None: self.fp.close()
610 615 self.fp = open(fullfilename, 'rb')
611 616 self.flagNoMoreFiles = 0
612 617 print 'Setting the file: %s' % fullfilename
613 618 else:
614 619 self.fileSize = 0
615 620 self.filename = None
616 621 self.flagIsNewFile = 0
617 622 self.fp = None
618 623 self.flagNoMoreFiles = 1
619 624 print 'No more Files'
620 625
621 626 return fileOk_flag
622 627
623 628
624 629 def setNextFile(self):
625 630 if self.fp != None:
626 631 self.fp.close()
627 632
628 633 if self.online:
629 634 newFile = self.__setNextFileOnline()
630 635 else:
631 636 newFile = self.__setNextFileOffline()
632 637
633 638 if not(newFile):
634 639 return 0
635 640
636 641 self.__readFirstHeader()
637 642 self.nReadBlocks = 0
638 643 return 1
639 644
640 645 def __waitNewBlock(self):
641 646 """
642 647 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
643 648
644 649 Si el modo de lectura es OffLine siempre retorn 0
645 650 """
646 651 if not self.online:
647 652 return 0
648 653
649 654 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
650 655 return 0
651 656
652 657 currentPointer = self.fp.tell()
653 658
654 659 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
655 660
656 661 for nTries in range( self.nTries ):
657 662
658 663 self.fp.close()
659 664 self.fp = open( self.filename, 'rb' )
660 665 self.fp.seek( currentPointer )
661 666
662 667 self.fileSize = os.path.getsize( self.filename )
663 668 currentSize = self.fileSize - currentPointer
664 669
665 670 if ( currentSize >= neededSize ):
666 671 self.__rdBasicHeader()
667 672 return 1
668 673
669 674 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
670 675 time.sleep( self.delay )
671 676
672 677
673 678 return 0
674 679
675 680 def __jumpToLastBlock(self):
676 681
677 682 if not(self.__isFirstTimeOnline):
678 683 return
679 684
680 685 csize = self.fileSize - self.fp.tell()
681 686
682 687 #sata el primer bloque de datos
683 688 if csize > self.processingHeaderObj.blockSize:
684 689 self.fp.seek(self.fp.tell() + self.processingHeaderObj.blockSize)
685 690 else:
686 691 return
687 692
688 693 csize = self.fileSize - self.fp.tell()
689 694 neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
690 695 factor = int(csize/neededsize)
691 696 if factor > 0:
692 697 self.fp.seek(self.fp.tell() + factor*neededsize)
693 698
694 699 self.flagIsNewFile = 0
695 700 self.__isFirstTimeOnline = 0
696 701
697 702
698 703 def __setNewBlock(self):
699 704
700 705 if self.fp == None:
701 706 return 0
702 707
703 708 if self.online:
704 709 self.__jumpToLastBlock()
705 710
706 711 if self.flagIsNewFile:
707 712 return 1
708 713
709 714 self.lastUTTime = self.basicHeaderObj.utc
710 715 currentSize = self.fileSize - self.fp.tell()
711 716 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
712 717
713 718 if (currentSize >= neededSize):
714 719 self.__rdBasicHeader()
715 720 return 1
716 721
717 722 if self.__waitNewBlock():
718 723 return 1
719 724
720 725 if not(self.setNextFile()):
721 726 return 0
722 727
723 728 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
724 729
725 730 self.flagTimeBlock = 0
726 731
727 732 if deltaTime > self.maxTimeStep:
728 733 self.flagTimeBlock = 1
729 734
730 735 return 1
731 736
732 737
733 738 def readNextBlock(self):
734 739 if not(self.__setNewBlock()):
735 740 return 0
736 741
737 742 if not(self.readBlock()):
738 743 return 0
739 744
740 745 return 1
741 746
742 747 def __rdProcessingHeader(self, fp=None):
743 748 if fp == None:
744 749 fp = self.fp
745 750
746 751 self.processingHeaderObj.read(fp)
747 752
748 753 def __rdRadarControllerHeader(self, fp=None):
749 754 if fp == None:
750 755 fp = self.fp
751 756
752 757 self.radarControllerHeaderObj.read(fp)
753 758
754 759 def __rdSystemHeader(self, fp=None):
755 760 if fp == None:
756 761 fp = self.fp
757 762
758 763 self.systemHeaderObj.read(fp)
759 764
760 765 def __rdBasicHeader(self, fp=None):
761 766 if fp == None:
762 767 fp = self.fp
763 768
764 769 self.basicHeaderObj.read(fp)
765 770
766 771
767 772 def __readFirstHeader(self):
768 773 self.__rdBasicHeader()
769 774 self.__rdSystemHeader()
770 775 self.__rdRadarControllerHeader()
771 776 self.__rdProcessingHeader()
772 777
773 778 self.firstHeaderSize = self.basicHeaderObj.size
774 779
775 780 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
776 781 if datatype == 0:
777 782 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
778 783 elif datatype == 1:
779 784 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
780 785 elif datatype == 2:
781 786 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
782 787 elif datatype == 3:
783 788 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
784 789 elif datatype == 4:
785 790 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
786 791 elif datatype == 5:
787 792 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
788 793 else:
789 794 raise ValueError, 'Data type was not defined'
790 795
791 796 self.dtype = datatype_str
792 797 self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
793 798 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
794 799 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
795 800 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
796 801 self.getBlockDimension()
797 802
798 803
799 804 def __verifyFile(self, filename, msgFlag=True):
800 805 msg = None
801 806 try:
802 807 fp = open(filename, 'rb')
803 808 currentPosition = fp.tell()
804 809 except:
805 810 if msgFlag:
806 811 print "The file %s can't be opened" % (filename)
807 812 return False
808 813
809 814 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
810 815
811 816 if neededSize == 0:
812 817 basicHeaderObj = BasicHeader(LOCALTIME)
813 818 systemHeaderObj = SystemHeader()
814 819 radarControllerHeaderObj = RadarControllerHeader()
815 820 processingHeaderObj = ProcessingHeader()
816 821
817 822 try:
818 823 if not( basicHeaderObj.read(fp) ): raise IOError
819 824 if not( systemHeaderObj.read(fp) ): raise IOError
820 825 if not( radarControllerHeaderObj.read(fp) ): raise IOError
821 826 if not( processingHeaderObj.read(fp) ): raise IOError
822 827 data_type = int(numpy.log2((processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
823 828
824 829 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
825 830
826 831 except:
827 832 if msgFlag:
828 833 print "\tThe file %s is empty or it hasn't enough data" % filename
829 834
830 835 fp.close()
831 836 return False
832 837 else:
833 838 msg = "\tSkipping the file %s due to it hasn't enough data" %filename
834 839
835 840 fp.close()
836 841 fileSize = os.path.getsize(filename)
837 842 currentSize = fileSize - currentPosition
838 843 if currentSize < neededSize:
839 844 if msgFlag and (msg != None):
840 845 print msg #print"\tSkipping the file %s due to it hasn't enough data" %filename
841 846 return False
842 847
843 848 return True
844 849
845 850 def setup(self,
846 851 path=None,
847 852 startDate=None,
848 853 endDate=None,
849 854 startTime=datetime.time(0,0,0),
850 855 endTime=datetime.time(23,59,59),
851 856 set=0,
852 857 expLabel = "",
853 858 ext = None,
854 859 online = False,
855 860 delay = 60,
856 861 walk = True):
857 862
858 863 if path == None:
859 864 raise ValueError, "The path is not valid"
860 865
861 866 if ext == None:
862 867 ext = self.ext
863 868
864 869 if online:
865 870 print "Searching files in online mode..."
866 871
867 872 for nTries in range( self.nTries ):
868 873 fullpath, foldercounter, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk)
869 874
870 875 if fullpath:
871 876 break
872 877
873 878 print '\tWaiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
874 879 time.sleep( self.delay )
875 880
876 881 if not(fullpath):
877 882 print "There 'isn't valied files in %s" % path
878 883 return None
879 884
880 885 self.year = year
881 886 self.doy = doy
882 887 self.set = set - 1
883 888 self.path = path
884 889 self.foldercounter = foldercounter
885 890
886 891 else:
887 892 print "Searching files in offline mode ..."
888 893 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
889 894 startTime=startTime, endTime=endTime,
890 895 set=set, expLabel=expLabel, ext=ext,
891 896 walk=walk)
892 897
893 898 if not(pathList):
894 899 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
895 900 datetime.datetime.combine(startDate,startTime).ctime(),
896 901 datetime.datetime.combine(endDate,endTime).ctime())
897 902
898 903 sys.exit(-1)
899 904
900 905
901 906 self.fileIndex = -1
902 907 self.pathList = pathList
903 908 self.filenameList = filenameList
904 909
905 910 self.online = online
906 911 self.delay = delay
907 912 ext = ext.lower()
908 913 self.ext = ext
909 914
910 915 if not(self.setNextFile()):
911 916 if (startDate!=None) and (endDate!=None):
912 917 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
913 918 elif startDate != None:
914 919 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
915 920 else:
916 921 print "No files"
917 922
918 923 sys.exit(-1)
919 924
920 925 # self.updateDataHeader()
921 926
922 927 return self.dataOut
923 928
929 def getBasicHeader(self):
930
931 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.ippSeconds
932
933 self.dataOut.flagTimeBlock = self.flagTimeBlock
934
935 self.dataOut.timeZone = self.basicHeaderObj.timeZone
936
937 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
938
939 self.dataOut.errorCount = self.basicHeaderObj.errorCount
940
941 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
942
943 def getFirstHeader(self):
944
945 raise ValueError, "This method has not been implemented"
946
924 947 def getData():
925 948
926 949 raise ValueError, "This method has not been implemented"
927 950
928 951 def hasNotDataInBuffer():
929 952
930 953 raise ValueError, "This method has not been implemented"
931 954
932 955 def readBlock():
933 956
934 957 raise ValueError, "This method has not been implemented"
935 958
936 959 def isEndProcess(self):
937 960
938 961 return self.flagNoMoreFiles
939 962
940 963 def printReadBlocks(self):
941 964
942 965 print "Number of read blocks per file %04d" %self.nReadBlocks
943 966
944 967 def printTotalBlocks(self):
945 968
946 969 print "Number of read blocks %04d" %self.nTotalBlocks
947 970
948 971 def printNumberOfBlock(self):
949 972
950 973 if self.flagIsNewBlock:
951 974 print "Block No. %04d, Total blocks %04d -> %s" %(self.basicHeaderObj.dataBlock, self.nTotalBlocks, self.dataOut.datatime.ctime())
952 975
953 976 def printInfo(self):
954 977
955 978 if self.__printInfo == False:
956 979 return
957 980
958 981 self.basicHeaderObj.printInfo()
959 982 self.systemHeaderObj.printInfo()
960 983 self.radarControllerHeaderObj.printInfo()
961 984 self.processingHeaderObj.printInfo()
962 985
963 986 self.__printInfo = False
964 987
965 988
966 989 def run(self, **kwargs):
967 990
968 991 if not(self.isConfig):
969 992
970 993 # self.dataOut = dataOut
971 994 self.setup(**kwargs)
972 995 self.isConfig = True
973 996
974 997 self.getData()
975 998
976 999 class JRODataWriter(JRODataIO, Operation):
977 1000
978 1001 """
979 1002 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
980 1003 de los datos siempre se realiza por bloques.
981 1004 """
982 1005
983 1006 blockIndex = 0
984 1007
985 1008 path = None
986 1009
987 1010 setFile = None
988 1011
989 1012 profilesPerBlock = None
990 1013
991 1014 blocksPerFile = None
992 1015
993 1016 nWriteBlocks = 0
994 1017
995 1018 def __init__(self, dataOut=None):
996 1019 raise ValueError, "Not implemented"
997 1020
998 1021
999 1022 def hasAllDataInBuffer(self):
1000 1023 raise ValueError, "Not implemented"
1001 1024
1002 1025
1003 1026 def setBlockDimension(self):
1004 1027 raise ValueError, "Not implemented"
1005 1028
1006 1029
1007 1030 def writeBlock(self):
1008 1031 raise ValueError, "No implemented"
1009 1032
1010 1033
1011 1034 def putData(self):
1012 1035 raise ValueError, "No implemented"
1013 1036
1014 def getDataHeader(self):
1037
1038 def setBasicHeader(self):
1039
1040 self.basicHeaderObj.size = self.basicHeaderSize #bytes
1041 self.basicHeaderObj.version = self.versionFile
1042 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1043
1044 utc = numpy.floor(self.dataOut.utctime)
1045 milisecond = (self.dataOut.utctime - utc)* 1000.0
1046
1047 self.basicHeaderObj.utc = utc
1048 self.basicHeaderObj.miliSecond = milisecond
1049 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1050 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1051 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1052
1053 def setFirstHeader(self):
1015 1054 """
1016 1055 Obtiene una copia del First Header
1017 1056
1018 1057 Affected:
1019 1058
1020 1059 self.basicHeaderObj
1021 1060 self.systemHeaderObj
1022 1061 self.radarControllerHeaderObj
1023 1062 self.processingHeaderObj self.
1024 1063
1025 1064 Return:
1026 1065 None
1027 1066 """
1028 1067
1029 1068 raise ValueError, "No implemented"
1030
1031 def getBasicHeader(self):
1032
1033 self.basicHeaderObj.size = self.basicHeaderSize #bytes
1034 self.basicHeaderObj.version = self.versionFile
1035 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1036
1037 utc = numpy.floor(self.dataOut.utctime)
1038 milisecond = (self.dataOut.utctime - utc)* 1000.0
1039
1040 self.basicHeaderObj.utc = utc
1041 self.basicHeaderObj.miliSecond = milisecond
1042 self.basicHeaderObj.timeZone = 0
1043 self.basicHeaderObj.dstFlag = 0
1044 self.basicHeaderObj.errorCount = 0
1045
1069
1046 1070 def __writeFirstHeader(self):
1047 1071 """
1048 1072 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1049 1073
1050 1074 Affected:
1051 1075 __dataType
1052 1076
1053 1077 Return:
1054 1078 None
1055 1079 """
1056 1080
1057 1081 # CALCULAR PARAMETROS
1058 1082
1059 1083 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1060 1084 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1061 1085
1062 1086 self.basicHeaderObj.write(self.fp)
1063 1087 self.systemHeaderObj.write(self.fp)
1064 1088 self.radarControllerHeaderObj.write(self.fp)
1065 1089 self.processingHeaderObj.write(self.fp)
1066 1090
1067 1091 self.dtype = self.dataOut.dtype
1068 1092
1069 1093 def __setNewBlock(self):
1070 1094 """
1071 1095 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1072 1096
1073 1097 Return:
1074 1098 0 : si no pudo escribir nada
1075 1099 1 : Si escribio el Basic el First Header
1076 1100 """
1077 1101 if self.fp == None:
1078 1102 self.setNextFile()
1079 1103
1080 1104 if self.flagIsNewFile:
1081 1105 return 1
1082 1106
1083 1107 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1084 1108 self.basicHeaderObj.write(self.fp)
1085 1109 return 1
1086 1110
1087 1111 if not( self.setNextFile() ):
1088 1112 return 0
1089 1113
1090 1114 return 1
1091 1115
1092 1116
1093 1117 def writeNextBlock(self):
1094 1118 """
1095 1119 Selecciona el bloque siguiente de datos y los escribe en un file
1096 1120
1097 1121 Return:
1098 1122 0 : Si no hizo pudo escribir el bloque de datos
1099 1123 1 : Si no pudo escribir el bloque de datos
1100 1124 """
1101 1125 if not( self.__setNewBlock() ):
1102 1126 return 0
1103 1127
1104 1128 self.writeBlock()
1105 1129
1106 1130 return 1
1107 1131
1108 1132 def setNextFile(self):
1109 1133 """
1110 1134 Determina el siguiente file que sera escrito
1111 1135
1112 1136 Affected:
1113 1137 self.filename
1114 1138 self.subfolder
1115 1139 self.fp
1116 1140 self.setFile
1117 1141 self.flagIsNewFile
1118 1142
1119 1143 Return:
1120 1144 0 : Si el archivo no puede ser escrito
1121 1145 1 : Si el archivo esta listo para ser escrito
1122 1146 """
1123 1147 ext = self.ext
1124 1148 path = self.path
1125 1149
1126 1150 if self.fp != None:
1127 1151 self.fp.close()
1128 1152
1129 1153 timeTuple = time.localtime( self.dataOut.utctime)
1130 1154 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1131 1155
1132 1156 fullpath = os.path.join( path, subfolder )
1133 1157 if not( os.path.exists(fullpath) ):
1134 1158 os.mkdir(fullpath)
1135 1159 self.setFile = -1 #inicializo mi contador de seteo
1136 1160 else:
1137 1161 filesList = os.listdir( fullpath )
1138 1162 if len( filesList ) > 0:
1139 1163 filesList = sorted( filesList, key=str.lower )
1140 1164 filen = filesList[-1]
1141 1165 # el filename debera tener el siguiente formato
1142 1166 # 0 1234 567 89A BCDE (hex)
1143 1167 # x YYYY DDD SSS .ext
1144 1168 if isNumber( filen[8:11] ):
1145 1169 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1146 1170 else:
1147 1171 self.setFile = -1
1148 1172 else:
1149 1173 self.setFile = -1 #inicializo mi contador de seteo
1150 1174
1151 1175 setFile = self.setFile
1152 1176 setFile += 1
1153 1177
1154 1178 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
1155 1179 timeTuple.tm_year,
1156 1180 timeTuple.tm_yday,
1157 1181 setFile,
1158 1182 ext )
1159 1183
1160 1184 filename = os.path.join( path, subfolder, file )
1161 1185
1162 1186 fp = open( filename,'wb' )
1163 1187
1164 1188 self.blockIndex = 0
1165 1189
1166 1190 #guardando atributos
1167 1191 self.filename = filename
1168 1192 self.subfolder = subfolder
1169 1193 self.fp = fp
1170 1194 self.setFile = setFile
1171 1195 self.flagIsNewFile = 1
1172 1196
1173 self.getDataHeader()
1197 self.setFirstHeader()
1174 1198
1175 1199 print 'Writing the file: %s'%self.filename
1176 1200
1177 1201 self.__writeFirstHeader()
1178 1202
1179 1203 return 1
1180 1204
1181 1205 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=None, set=0, ext=None):
1182 1206 """
1183 1207 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1184 1208
1185 1209 Inputs:
1186 1210 path : el path destino en el cual se escribiran los files a crear
1187 1211 format : formato en el cual sera salvado un file
1188 1212 set : el setebo del file
1189 1213
1190 1214 Return:
1191 1215 0 : Si no realizo un buen seteo
1192 1216 1 : Si realizo un buen seteo
1193 1217 """
1194 1218
1195 1219 if ext == None:
1196 1220 ext = self.ext
1197 1221
1198 1222 ext = ext.lower()
1199 1223
1200 1224 self.ext = ext
1201 1225
1202 1226 self.path = path
1203 1227
1204 1228 self.setFile = set - 1
1205 1229
1206 1230 self.blocksPerFile = blocksPerFile
1207 1231
1208 1232 self.profilesPerBlock = profilesPerBlock
1209 1233
1210 1234 self.dataOut = dataOut
1211 1235
1212 1236 if not(self.setNextFile()):
1213 1237 print "There isn't a next file"
1214 1238 return 0
1215 1239
1216 1240 self.setBlockDimension()
1217 1241
1218 1242 return 1
1219 1243
1220 1244 def run(self, dataOut, **kwargs):
1221 1245
1222 1246 if not(self.isConfig):
1223 1247
1224 1248 self.setup(dataOut, **kwargs)
1225 1249 self.isConfig = True
1226 1250
1227 1251 self.putData()
1228 1252
1229 1253 class VoltageReader(JRODataReader):
1230 1254 """
1231 1255 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
1232 1256 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
1233 1257 perfiles*alturas*canales) son almacenados en la variable "buffer".
1234 1258
1235 1259 perfiles * alturas * canales
1236 1260
1237 1261 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1238 1262 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
1239 1263 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
1240 1264 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1241 1265
1242 1266 Example:
1243 1267
1244 1268 dpath = "/home/myuser/data"
1245 1269
1246 1270 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1247 1271
1248 1272 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1249 1273
1250 1274 readerObj = VoltageReader()
1251 1275
1252 1276 readerObj.setup(dpath, startTime, endTime)
1253 1277
1254 1278 while(True):
1255 1279
1256 1280 #to get one profile
1257 1281 profile = readerObj.getData()
1258 1282
1259 1283 #print the profile
1260 1284 print profile
1261 1285
1262 1286 #If you want to see all datablock
1263 1287 print readerObj.datablock
1264 1288
1265 1289 if readerObj.flagNoMoreFiles:
1266 1290 break
1267 1291
1268 1292 """
1269 1293
1270 1294 ext = ".r"
1271 1295
1272 1296 optchar = "D"
1273 1297 dataOut = None
1274 1298
1275 1299
1276 1300 def __init__(self):
1277 1301 """
1278 1302 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
1279 1303
1280 1304 Input:
1281 1305 dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para
1282 1306 almacenar un perfil de datos cada vez que se haga un requerimiento
1283 1307 (getData). El perfil sera obtenido a partir del buffer de datos,
1284 1308 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1285 1309 bloque de datos.
1286 1310 Si este parametro no es pasado se creara uno internamente.
1287 1311
1288 1312 Variables afectadas:
1289 1313 self.dataOut
1290 1314
1291 1315 Return:
1292 1316 None
1293 1317 """
1294 1318
1295 1319 self.isConfig = False
1296 1320
1297 1321 self.datablock = None
1298 1322
1299 1323 self.utc = 0
1300 1324
1301 1325 self.ext = ".r"
1302 1326
1303 1327 self.optchar = "D"
1304 1328
1305 1329 self.basicHeaderObj = BasicHeader(LOCALTIME)
1306 1330
1307 1331 self.systemHeaderObj = SystemHeader()
1308 1332
1309 1333 self.radarControllerHeaderObj = RadarControllerHeader()
1310 1334
1311 1335 self.processingHeaderObj = ProcessingHeader()
1312 1336
1313 1337 self.online = 0
1314 1338
1315 1339 self.fp = None
1316 1340
1317 1341 self.idFile = None
1318 1342
1319 1343 self.dtype = None
1320 1344
1321 1345 self.fileSizeByHeader = None
1322 1346
1323 1347 self.filenameList = []
1324 1348
1325 1349 self.filename = None
1326 1350
1327 1351 self.fileSize = None
1328 1352
1329 1353 self.firstHeaderSize = 0
1330 1354
1331 1355 self.basicHeaderSize = 24
1332 1356
1333 1357 self.pathList = []
1334 1358
1335 1359 self.filenameList = []
1336 1360
1337 1361 self.lastUTTime = 0
1338 1362
1339 1363 self.maxTimeStep = 30
1340 1364
1341 1365 self.flagNoMoreFiles = 0
1342 1366
1343 1367 self.set = 0
1344 1368
1345 1369 self.path = None
1346 1370
1347 1371 self.profileIndex = 2**32-1
1348 1372
1349 1373 self.delay = 3 #seconds
1350 1374
1351 1375 self.nTries = 3 #quantity tries
1352 1376
1353 1377 self.nFiles = 3 #number of files for searching
1354 1378
1355 1379 self.nReadBlocks = 0
1356 1380
1357 1381 self.flagIsNewFile = 1
1358 1382
1359 1383 self.__isFirstTimeOnline = 1
1360 1384
1361 1385 self.ippSeconds = 0
1362 1386
1363 1387 self.flagTimeBlock = 0
1364 1388
1365 1389 self.flagIsNewBlock = 0
1366 1390
1367 1391 self.nTotalBlocks = 0
1368 1392
1369 1393 self.blocksize = 0
1370 1394
1371 1395 self.dataOut = self.createObjByDefault()
1372 1396
1373 1397 def createObjByDefault(self):
1374 1398
1375 1399 dataObj = Voltage()
1376 1400
1377 1401 return dataObj
1378 1402
1379 1403 def __hasNotDataInBuffer(self):
1380 1404 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1381 1405 return 1
1382 1406 return 0
1383 1407
1384 1408
1385 1409 def getBlockDimension(self):
1386 1410 """
1387 1411 Obtiene la cantidad de puntos a leer por cada bloque de datos
1388 1412
1389 1413 Affected:
1390 1414 self.blocksize
1391 1415
1392 1416 Return:
1393 1417 None
1394 1418 """
1395 1419 pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
1396 1420 self.blocksize = pts2read
1397 1421
1398 1422
1399 1423 def readBlock(self):
1400 1424 """
1401 1425 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
1402 1426 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1403 1427 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1404 1428 es seteado a 0
1405 1429
1406 1430 Inputs:
1407 1431 None
1408 1432
1409 1433 Return:
1410 1434 None
1411 1435
1412 1436 Affected:
1413 1437 self.profileIndex
1414 1438 self.datablock
1415 1439 self.flagIsNewFile
1416 1440 self.flagIsNewBlock
1417 1441 self.nTotalBlocks
1418 1442
1419 1443 Exceptions:
1420 1444 Si un bloque leido no es un bloque valido
1421 1445 """
1422 1446
1423 1447 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
1424 1448
1425 1449 try:
1426 1450 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
1427 1451 except:
1428 1452 print "The read block (%3d) has not enough data" %self.nReadBlocks
1429 1453 return 0
1430 1454
1431 1455 junk = numpy.transpose(junk, (2,0,1))
1432 1456 self.datablock = junk['real'] + junk['imag']*1j
1433 1457
1434 1458 self.profileIndex = 0
1435 1459
1436 1460 self.flagIsNewFile = 0
1437 1461 self.flagIsNewBlock = 1
1438 1462
1439 1463 self.nTotalBlocks += 1
1440 1464 self.nReadBlocks += 1
1441 1465
1442 1466 return 1
1443 1467
1468 def getFirstHeader(self):
1469
1470 self.dataOut.dtype = self.dtype
1471
1472 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
1473
1474 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
1475
1476 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
1477
1478 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
1479
1480 self.dataOut.ippSeconds = self.ippSeconds
1481
1482 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt
1483
1484 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
1485
1486 self.dataOut.flagShiftFFT = False
1487
1488 if self.radarControllerHeaderObj.code != None:
1489
1490 self.dataOut.nCode = self.radarControllerHeaderObj.nCode
1491
1492 self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
1493
1494 self.dataOut.code = self.radarControllerHeaderObj.code
1495
1496 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
1497
1498 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
1499
1500 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
1501
1502 self.dataOut.flagDeflipData = False #asumo q la data no esta sin flip
1503
1504 self.dataOut.flagShiftFFT = False
1444 1505
1445 1506 def getData(self):
1446 1507 """
1447 1508 getData obtiene una unidad de datos del buffer de lectura y la copia a la clase "Voltage"
1448 1509 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
1449 1510 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
1450 1511
1451 1512 Ademas incrementa el contador del buffer en 1.
1452 1513
1453 1514 Return:
1454 1515 data : retorna un perfil de voltages (alturas * canales) copiados desde el
1455 1516 buffer. Si no hay mas archivos a leer retorna None.
1456 1517
1457 1518 Variables afectadas:
1458 1519 self.dataOut
1459 1520 self.profileIndex
1460 1521
1461 1522 Affected:
1462 1523 self.dataOut
1463 1524 self.profileIndex
1464 1525 self.flagTimeBlock
1465 1526 self.flagIsNewBlock
1466 1527 """
1467 1528
1468 1529 if self.flagNoMoreFiles:
1469 1530 self.dataOut.flagNoData = True
1470 1531 print 'Process finished'
1471 1532 return 0
1472 1533
1473 1534 self.flagTimeBlock = 0
1474 1535 self.flagIsNewBlock = 0
1475 1536
1476 1537 if self.__hasNotDataInBuffer():
1477 1538
1478 1539 if not( self.readNextBlock() ):
1479 1540 return 0
1480 1541
1481 self.dataOut.dtype = self.dtype
1482
1483 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
1484
1485 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
1486
1487 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
1488
1489 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
1490
1491 self.dataOut.flagTimeBlock = self.flagTimeBlock
1492
1493 self.dataOut.ippSeconds = self.ippSeconds
1494
1495 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt
1496
1497 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
1498
1499 self.dataOut.flagShiftFFT = False
1500
1501 if self.radarControllerHeaderObj.code != None:
1502
1503 self.dataOut.nCode = self.radarControllerHeaderObj.nCode
1504
1505 self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
1506
1507 self.dataOut.code = self.radarControllerHeaderObj.code
1508
1509 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
1510
1511 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
1512
1513 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
1514
1515 self.dataOut.flagDeflipData = False #asumo q la data no esta sin flip
1516
1517 self.dataOut.flagShiftFFT = False
1518
1519
1520 # self.updateDataHeader()
1521
1522 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
1542 self.getFirstHeader()
1523 1543
1524 1544 if self.datablock == None:
1525 1545 self.dataOut.flagNoData = True
1526 1546 return 0
1527 1547
1528 1548 self.dataOut.data = self.datablock[:,self.profileIndex,:]
1529 1549
1530 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.ippSeconds
1531
1532 self.profileIndex += 1
1533
1534 1550 self.dataOut.flagNoData = False
1535 1551
1536 # print self.profileIndex, self.dataOut.utctime
1537 # if self.profileIndex == 800:
1538 # a=1
1552 self.getBasicHeader()
1539 1553
1554 self.profileIndex += 1
1540 1555
1541 1556 return self.dataOut.data
1542 1557
1543 1558
1544 1559 class VoltageWriter(JRODataWriter):
1545 1560 """
1546 1561 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
1547 1562 de los datos siempre se realiza por bloques.
1548 1563 """
1549 1564
1550 1565 ext = ".r"
1551 1566
1552 1567 optchar = "D"
1553 1568
1554 1569 shapeBuffer = None
1555 1570
1556 1571
1557 1572 def __init__(self):
1558 1573 """
1559 1574 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
1560 1575
1561 1576 Affected:
1562 1577 self.dataOut
1563 1578
1564 1579 Return: None
1565 1580 """
1566 1581
1567 1582 self.nTotalBlocks = 0
1568 1583
1569 1584 self.profileIndex = 0
1570 1585
1571 1586 self.isConfig = False
1572 1587
1573 1588 self.fp = None
1574 1589
1575 1590 self.flagIsNewFile = 1
1576 1591
1577 1592 self.nTotalBlocks = 0
1578 1593
1579 1594 self.flagIsNewBlock = 0
1580 1595
1581 1596 self.setFile = None
1582 1597
1583 1598 self.dtype = None
1584 1599
1585 1600 self.path = None
1586 1601
1587 1602 self.filename = None
1588 1603
1589 1604 self.basicHeaderObj = BasicHeader(LOCALTIME)
1590 1605
1591 1606 self.systemHeaderObj = SystemHeader()
1592 1607
1593 1608 self.radarControllerHeaderObj = RadarControllerHeader()
1594 1609
1595 1610 self.processingHeaderObj = ProcessingHeader()
1596 1611
1597 1612 def hasAllDataInBuffer(self):
1598 1613 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1599 1614 return 1
1600 1615 return 0
1601 1616
1602 1617
1603 1618 def setBlockDimension(self):
1604 1619 """
1605 1620 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
1606 1621
1607 1622 Affected:
1608 1623 self.shape_spc_Buffer
1609 1624 self.shape_cspc_Buffer
1610 1625 self.shape_dc_Buffer
1611 1626
1612 1627 Return: None
1613 1628 """
1614 1629 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
1615 1630 self.processingHeaderObj.nHeights,
1616 1631 self.systemHeaderObj.nChannels)
1617 1632
1618 1633 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
1619 1634 self.processingHeaderObj.profilesPerBlock,
1620 1635 self.processingHeaderObj.nHeights),
1621 1636 dtype=numpy.dtype('complex64'))
1622 1637
1623 1638
1624 1639 def writeBlock(self):
1625 1640 """
1626 1641 Escribe el buffer en el file designado
1627 1642
1628 1643 Affected:
1629 1644 self.profileIndex
1630 1645 self.flagIsNewFile
1631 1646 self.flagIsNewBlock
1632 1647 self.nTotalBlocks
1633 1648 self.blockIndex
1634 1649
1635 1650 Return: None
1636 1651 """
1637 1652 data = numpy.zeros( self.shapeBuffer, self.dtype )
1638 1653
1639 1654 junk = numpy.transpose(self.datablock, (1,2,0))
1640 1655
1641 1656 data['real'] = junk.real
1642 1657 data['imag'] = junk.imag
1643 1658
1644 1659 data = data.reshape( (-1) )
1645 1660
1646 1661 data.tofile( self.fp )
1647 1662
1648 1663 self.datablock.fill(0)
1649 1664
1650 1665 self.profileIndex = 0
1651 1666 self.flagIsNewFile = 0
1652 1667 self.flagIsNewBlock = 1
1653 1668
1654 1669 self.blockIndex += 1
1655 1670 self.nTotalBlocks += 1
1656 1671
1657 1672 def putData(self):
1658 1673 """
1659 1674 Setea un bloque de datos y luego los escribe en un file
1660 1675
1661 1676 Affected:
1662 1677 self.flagIsNewBlock
1663 1678 self.profileIndex
1664 1679
1665 1680 Return:
1666 1681 0 : Si no hay data o no hay mas files que puedan escribirse
1667 1682 1 : Si se escribio la data de un bloque en un file
1668 1683 """
1669 1684 if self.dataOut.flagNoData:
1670 1685 return 0
1671 1686
1672 1687 self.flagIsNewBlock = 0
1673 1688
1674 1689 if self.dataOut.flagTimeBlock:
1675 1690
1676 1691 self.datablock.fill(0)
1677 1692 self.profileIndex = 0
1678 1693 self.setNextFile()
1679 1694
1680 1695 if self.profileIndex == 0:
1681 self.getBasicHeader()
1696 self.setBasicHeader()
1682 1697
1683 1698 self.datablock[:,self.profileIndex,:] = self.dataOut.data
1684 1699
1685 1700 self.profileIndex += 1
1686 1701
1687 1702 if self.hasAllDataInBuffer():
1688 1703 #if self.flagIsNewFile:
1689 1704 self.writeNextBlock()
1690 # self.getDataHeader()
1705 # self.setFirstHeader()
1691 1706
1692 1707 return 1
1693 1708
1694 1709 def __getProcessFlags(self):
1695 1710
1696 1711 processFlags = 0
1697 1712
1698 1713 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1699 1714 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1700 1715 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1701 1716 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1702 1717 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1703 1718 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1704 1719
1705 1720 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1706 1721
1707 1722
1708 1723
1709 1724 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
1710 1725 PROCFLAG.DATATYPE_SHORT,
1711 1726 PROCFLAG.DATATYPE_LONG,
1712 1727 PROCFLAG.DATATYPE_INT64,
1713 1728 PROCFLAG.DATATYPE_FLOAT,
1714 1729 PROCFLAG.DATATYPE_DOUBLE]
1715 1730
1716 1731
1717 1732 for index in range(len(dtypeList)):
1718 1733 if self.dataOut.dtype == dtypeList[index]:
1719 1734 dtypeValue = datatypeValueList[index]
1720 1735 break
1721 1736
1722 1737 processFlags += dtypeValue
1723 1738
1724 1739 if self.dataOut.flagDecodeData:
1725 1740 processFlags += PROCFLAG.DECODE_DATA
1726 1741
1727 1742 if self.dataOut.flagDeflipData:
1728 1743 processFlags += PROCFLAG.DEFLIP_DATA
1729 1744
1730 1745 if self.dataOut.code != None:
1731 1746 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1732 1747
1733 1748 if self.dataOut.nCohInt > 1:
1734 1749 processFlags += PROCFLAG.COHERENT_INTEGRATION
1735 1750
1736 1751 return processFlags
1737 1752
1738 1753
1739 1754 def __getBlockSize(self):
1740 1755 '''
1741 1756 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
1742 1757 '''
1743 1758
1744 1759 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1745 1760 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1746 1761 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1747 1762 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1748 1763 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1749 1764 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1750 1765
1751 1766 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1752 1767 datatypeValueList = [1,2,4,8,4,8]
1753 1768 for index in range(len(dtypeList)):
1754 1769 if self.dataOut.dtype == dtypeList[index]:
1755 1770 datatypeValue = datatypeValueList[index]
1756 1771 break
1757 1772
1758 1773 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels * self.dataOut.nProfiles * datatypeValue * 2)
1759 1774
1760 1775 return blocksize
1761 1776
1762 def getDataHeader(self):
1777 def setFirstHeader(self):
1763 1778
1764 1779 """
1765 1780 Obtiene una copia del First Header
1766 1781
1767 1782 Affected:
1768 1783 self.systemHeaderObj
1769 1784 self.radarControllerHeaderObj
1770 1785 self.dtype
1771 1786
1772 1787 Return:
1773 1788 None
1774 1789 """
1775 1790
1776 1791 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
1777 1792 self.systemHeaderObj.nChannels = self.dataOut.nChannels
1778 1793 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
1779 1794
1780 self.getBasicHeader()
1795 self.setBasicHeader()
1781 1796
1782 1797 processingHeaderSize = 40 # bytes
1783 1798 self.processingHeaderObj.dtype = 0 # Voltage
1784 1799 self.processingHeaderObj.blockSize = self.__getBlockSize()
1785 1800 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
1786 1801 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
1787 1802 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
1788 1803 self.processingHeaderObj.processFlags = self.__getProcessFlags()
1789 1804 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
1790 1805 self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage
1791 1806 self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage
1792 1807
1793 1808 if self.dataOut.code != None:
1794 1809 self.processingHeaderObj.code = self.dataOut.code
1795 1810 self.processingHeaderObj.nCode = self.dataOut.nCode
1796 1811 self.processingHeaderObj.nBaud = self.dataOut.nBaud
1797 1812 codesize = int(8 + 4 * self.dataOut.nCode * self.dataOut.nBaud)
1798 1813 processingHeaderSize += codesize
1799 1814
1800 1815 if self.processingHeaderObj.nWindows != 0:
1801 1816 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
1802 1817 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
1803 1818 self.processingHeaderObj.nHeights = self.dataOut.nHeights
1804 1819 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
1805 1820 processingHeaderSize += 12
1806 1821
1807 1822 self.processingHeaderObj.size = processingHeaderSize
1808 1823
1809 1824 class SpectraReader(JRODataReader):
1810 1825 """
1811 1826 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
1812 1827 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
1813 1828 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
1814 1829
1815 1830 paresCanalesIguales * alturas * perfiles (Self Spectra)
1816 1831 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
1817 1832 canales * alturas (DC Channels)
1818 1833
1819 1834 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1820 1835 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
1821 1836 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
1822 1837 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1823 1838
1824 1839 Example:
1825 1840 dpath = "/home/myuser/data"
1826 1841
1827 1842 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1828 1843
1829 1844 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1830 1845
1831 1846 readerObj = SpectraReader()
1832 1847
1833 1848 readerObj.setup(dpath, startTime, endTime)
1834 1849
1835 1850 while(True):
1836 1851
1837 1852 readerObj.getData()
1838 1853
1839 1854 print readerObj.data_spc
1840 1855
1841 1856 print readerObj.data_cspc
1842 1857
1843 1858 print readerObj.data_dc
1844 1859
1845 1860 if readerObj.flagNoMoreFiles:
1846 1861 break
1847 1862
1848 1863 """
1849 1864
1850 1865 pts2read_SelfSpectra = 0
1851 1866
1852 1867 pts2read_CrossSpectra = 0
1853 1868
1854 1869 pts2read_DCchannels = 0
1855 1870
1856 1871 ext = ".pdata"
1857 1872
1858 1873 optchar = "P"
1859 1874
1860 1875 dataOut = None
1861 1876
1862 1877 nRdChannels = None
1863 1878
1864 1879 nRdPairs = None
1865 1880
1866 1881 rdPairList = []
1867
1868 1882
1869 1883 def __init__(self):
1870 1884 """
1871 1885 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
1872 1886
1873 1887 Inputs:
1874 1888 dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para
1875 1889 almacenar un perfil de datos cada vez que se haga un requerimiento
1876 1890 (getData). El perfil sera obtenido a partir del buffer de datos,
1877 1891 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1878 1892 bloque de datos.
1879 1893 Si este parametro no es pasado se creara uno internamente.
1880 1894
1881 1895 Affected:
1882 1896 self.dataOut
1883 1897
1884 1898 Return : None
1885 1899 """
1886 1900
1887 1901 self.isConfig = False
1888 1902
1889 1903 self.pts2read_SelfSpectra = 0
1890 1904
1891 1905 self.pts2read_CrossSpectra = 0
1892 1906
1893 1907 self.pts2read_DCchannels = 0
1894 1908
1895 1909 self.datablock = None
1896 1910
1897 1911 self.utc = None
1898 1912
1899 1913 self.ext = ".pdata"
1900 1914
1901 1915 self.optchar = "P"
1902 1916
1903 1917 self.basicHeaderObj = BasicHeader(LOCALTIME)
1904 1918
1905 1919 self.systemHeaderObj = SystemHeader()
1906 1920
1907 1921 self.radarControllerHeaderObj = RadarControllerHeader()
1908 1922
1909 1923 self.processingHeaderObj = ProcessingHeader()
1910 1924
1911 1925 self.online = 0
1912 1926
1913 1927 self.fp = None
1914 1928
1915 1929 self.idFile = None
1916 1930
1917 1931 self.dtype = None
1918 1932
1919 1933 self.fileSizeByHeader = None
1920 1934
1921 1935 self.filenameList = []
1922 1936
1923 1937 self.filename = None
1924 1938
1925 1939 self.fileSize = None
1926 1940
1927 1941 self.firstHeaderSize = 0
1928 1942
1929 1943 self.basicHeaderSize = 24
1930 1944
1931 1945 self.pathList = []
1932 1946
1933 1947 self.lastUTTime = 0
1934 1948
1935 1949 self.maxTimeStep = 30
1936 1950
1937 1951 self.flagNoMoreFiles = 0
1938 1952
1939 1953 self.set = 0
1940 1954
1941 1955 self.path = None
1942 1956
1943 1957 self.delay = 60 #seconds
1944 1958
1945 1959 self.nTries = 3 #quantity tries
1946 1960
1947 1961 self.nFiles = 3 #number of files for searching
1948 1962
1949 1963 self.nReadBlocks = 0
1950 1964
1951 1965 self.flagIsNewFile = 1
1952 1966
1953 1967 self.__isFirstTimeOnline = 1
1954 1968
1955 1969 self.ippSeconds = 0
1956 1970
1957 1971 self.flagTimeBlock = 0
1958 1972
1959 1973 self.flagIsNewBlock = 0
1960 1974
1961 1975 self.nTotalBlocks = 0
1962 1976
1963 1977 self.blocksize = 0
1964 1978
1965 1979 self.dataOut = self.createObjByDefault()
1980
1981 self.profileIndex = 1 #Always
1966 1982
1967 1983
1968 1984 def createObjByDefault(self):
1969 1985
1970 1986 dataObj = Spectra()
1971 1987
1972 1988 return dataObj
1973 1989
1974 1990 def __hasNotDataInBuffer(self):
1975 1991 return 1
1976 1992
1977 1993
1978 1994 def getBlockDimension(self):
1979 1995 """
1980 1996 Obtiene la cantidad de puntos a leer por cada bloque de datos
1981 1997
1982 1998 Affected:
1983 1999 self.nRdChannels
1984 2000 self.nRdPairs
1985 2001 self.pts2read_SelfSpectra
1986 2002 self.pts2read_CrossSpectra
1987 2003 self.pts2read_DCchannels
1988 2004 self.blocksize
1989 2005 self.dataOut.nChannels
1990 2006 self.dataOut.nPairs
1991 2007
1992 2008 Return:
1993 2009 None
1994 2010 """
1995 2011 self.nRdChannels = 0
1996 2012 self.nRdPairs = 0
1997 2013 self.rdPairList = []
1998 2014
1999 2015 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
2000 2016 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
2001 2017 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
2002 2018 else:
2003 2019 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
2004 2020 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
2005 2021
2006 2022 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
2007 2023
2008 2024 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
2009 2025 self.blocksize = self.pts2read_SelfSpectra
2010 2026
2011 2027 if self.processingHeaderObj.flag_cspc:
2012 2028 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
2013 2029 self.blocksize += self.pts2read_CrossSpectra
2014 2030
2015 2031 if self.processingHeaderObj.flag_dc:
2016 2032 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
2017 2033 self.blocksize += self.pts2read_DCchannels
2018 2034
2019 2035 # self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels
2020 2036
2021 2037
2022 2038 def readBlock(self):
2023 2039 """
2024 2040 Lee el bloque de datos desde la posicion actual del puntero del archivo
2025 2041 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
2026 2042 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
2027 2043 es seteado a 0
2028 2044
2029 2045 Return: None
2030 2046
2031 2047 Variables afectadas:
2032 2048
2033 2049 self.flagIsNewFile
2034 2050 self.flagIsNewBlock
2035 2051 self.nTotalBlocks
2036 2052 self.data_spc
2037 2053 self.data_cspc
2038 2054 self.data_dc
2039 2055
2040 2056 Exceptions:
2041 2057 Si un bloque leido no es un bloque valido
2042 2058 """
2043 2059 blockOk_flag = False
2044 2060 fpointer = self.fp.tell()
2045 2061
2046 2062 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
2047 2063 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
2048 2064
2049 2065 if self.processingHeaderObj.flag_cspc:
2050 2066 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
2051 2067 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
2052 2068
2053 2069 if self.processingHeaderObj.flag_dc:
2054 2070 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
2055 2071 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
2056 2072
2057 2073
2058 2074 if not(self.processingHeaderObj.shif_fft):
2059 2075 #desplaza a la derecha en el eje 2 determinadas posiciones
2060 2076 shift = int(self.processingHeaderObj.profilesPerBlock/2)
2061 2077 spc = numpy.roll( spc, shift , axis=2 )
2062 2078
2063 2079 if self.processingHeaderObj.flag_cspc:
2064 2080 #desplaza a la derecha en el eje 2 determinadas posiciones
2065 2081 cspc = numpy.roll( cspc, shift, axis=2 )
2066 2082
2067 2083 # self.processingHeaderObj.shif_fft = True
2068 2084
2069 2085 spc = numpy.transpose( spc, (0,2,1) )
2070 2086 self.data_spc = spc
2071 2087
2072 2088 if self.processingHeaderObj.flag_cspc:
2073 2089 cspc = numpy.transpose( cspc, (0,2,1) )
2074 2090 self.data_cspc = cspc['real'] + cspc['imag']*1j
2075 2091 else:
2076 2092 self.data_cspc = None
2077 2093
2078 2094 if self.processingHeaderObj.flag_dc:
2079 2095 self.data_dc = dc['real'] + dc['imag']*1j
2080 2096 else:
2081 2097 self.data_dc = None
2082 2098
2083 2099 self.flagIsNewFile = 0
2084 2100 self.flagIsNewBlock = 1
2085 2101
2086 2102 self.nTotalBlocks += 1
2087 2103 self.nReadBlocks += 1
2088 2104
2089 2105 return 1
2090 2106
2107 def getFirstHeader(self):
2108
2109 self.dataOut.dtype = self.dtype
2110
2111 self.dataOut.nPairs = self.nRdPairs
2112
2113 self.dataOut.pairsList = self.rdPairList
2114
2115 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
2116
2117 self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
2118
2119 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
2120
2121 self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
2122
2123 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
2091 2124
2125 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
2126
2127 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
2128
2129 self.dataOut.ippSeconds = self.ippSeconds
2130
2131 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt * self.processingHeaderObj.nIncohInt * self.dataOut.nFFTPoints
2132
2133 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
2134
2135 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
2136
2137 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
2138
2139 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
2140
2141 self.dataOut.flagDeflipData = True #asumo q la data no esta sin flip
2142
2143 if self.processingHeaderObj.code != None:
2144
2145 self.dataOut.nCode = self.processingHeaderObj.nCode
2146
2147 self.dataOut.nBaud = self.processingHeaderObj.nBaud
2148
2149 self.dataOut.code = self.processingHeaderObj.code
2150
2151 self.dataOut.flagDecodeData = True
2152
2092 2153 def getData(self):
2093 2154 """
2094 2155 Copia el buffer de lectura a la clase "Spectra",
2095 2156 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
2096 2157 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
2097 2158
2098 2159 Return:
2099 2160 0 : Si no hay mas archivos disponibles
2100 2161 1 : Si hizo una buena copia del buffer
2101 2162
2102 2163 Affected:
2103 2164 self.dataOut
2104 2165
2105 2166 self.flagTimeBlock
2106 2167 self.flagIsNewBlock
2107 2168 """
2108 2169
2109 2170 if self.flagNoMoreFiles:
2110 2171 self.dataOut.flagNoData = True
2111 2172 print 'Process finished'
2112 2173 return 0
2113 2174
2114 2175 self.flagTimeBlock = 0
2115 2176 self.flagIsNewBlock = 0
2116 2177
2117 2178 if self.__hasNotDataInBuffer():
2118 2179
2119 2180 if not( self.readNextBlock() ):
2120 2181 self.dataOut.flagNoData = True
2121 return 0
2122
2123 # self.updateDataHeader()
2182 return 0
2124 2183
2125 2184 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
2126 2185
2127 2186 if self.data_dc == None:
2128 2187 self.dataOut.flagNoData = True
2129 2188 return 0
2189
2190 self.getBasicHeader()
2191
2192 self.getFirstHeader()
2130 2193
2131 2194 self.dataOut.data_spc = self.data_spc
2132 2195
2133 2196 self.dataOut.data_cspc = self.data_cspc
2134 2197
2135 2198 self.dataOut.data_dc = self.data_dc
2136
2137 self.dataOut.flagTimeBlock = self.flagTimeBlock
2138 2199
2139 2200 self.dataOut.flagNoData = False
2140
2141 self.dataOut.dtype = self.dtype
2142
2143 # self.dataOut.nChannels = self.nRdChannels
2144
2145 self.dataOut.nPairs = self.nRdPairs
2146
2147 self.dataOut.pairsList = self.rdPairList
2148
2149 # self.dataOut.nHeights = self.processingHeaderObj.nHeights
2150
2151 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
2152
2153 self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
2154
2155 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
2156
2157 self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
2158
2159 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
2160
2161 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
2162
2163 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
2164 2201
2165 # self.dataOut.channelIndexList = range(self.systemHeaderObj.nChannels)
2166
2167 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000.#+ self.profileIndex * self.ippSeconds
2168
2169 self.dataOut.ippSeconds = self.ippSeconds
2170
2171 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt * self.processingHeaderObj.nIncohInt * self.dataOut.nFFTPoints
2172
2173 # self.profileIndex += 1
2174
2175 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
2176
2177 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
2178
2179 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
2180
2181 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
2182
2183 self.dataOut.flagDeflipData = True #asumo q la data no esta sin flip
2184
2185 if self.processingHeaderObj.code != None:
2186
2187 self.dataOut.nCode = self.processingHeaderObj.nCode
2188
2189 self.dataOut.nBaud = self.processingHeaderObj.nBaud
2190
2191 self.dataOut.code = self.processingHeaderObj.code
2192
2193 self.dataOut.flagDecodeData = True
2194
2195 2202 return self.dataOut.data_spc
2196 2203
2197 2204
2198 2205 class SpectraWriter(JRODataWriter):
2199 2206
2200 2207 """
2201 2208 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
2202 2209 de los datos siempre se realiza por bloques.
2203 2210 """
2204 2211
2205 2212 ext = ".pdata"
2206 2213
2207 2214 optchar = "P"
2208 2215
2209 2216 shape_spc_Buffer = None
2210 2217
2211 2218 shape_cspc_Buffer = None
2212 2219
2213 2220 shape_dc_Buffer = None
2214 2221
2215 2222 data_spc = None
2216 2223
2217 2224 data_cspc = None
2218 2225
2219 2226 data_dc = None
2220 2227
2221 2228 # dataOut = None
2222 2229
2223 2230 def __init__(self):
2224 2231 """
2225 2232 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
2226 2233
2227 2234 Affected:
2228 2235 self.dataOut
2229 2236 self.basicHeaderObj
2230 2237 self.systemHeaderObj
2231 2238 self.radarControllerHeaderObj
2232 2239 self.processingHeaderObj
2233 2240
2234 2241 Return: None
2235 2242 """
2236 2243
2237 2244 self.isConfig = False
2238 2245
2239 2246 self.nTotalBlocks = 0
2240 2247
2241 2248 self.data_spc = None
2242 2249
2243 2250 self.data_cspc = None
2244 2251
2245 2252 self.data_dc = None
2246 2253
2247 2254 self.fp = None
2248 2255
2249 2256 self.flagIsNewFile = 1
2250 2257
2251 2258 self.nTotalBlocks = 0
2252 2259
2253 2260 self.flagIsNewBlock = 0
2254 2261
2255 2262 self.setFile = None
2256 2263
2257 2264 self.dtype = None
2258 2265
2259 2266 self.path = None
2260 2267
2261 2268 self.noMoreFiles = 0
2262 2269
2263 2270 self.filename = None
2264 2271
2265 2272 self.basicHeaderObj = BasicHeader(LOCALTIME)
2266 2273
2267 2274 self.systemHeaderObj = SystemHeader()
2268 2275
2269 2276 self.radarControllerHeaderObj = RadarControllerHeader()
2270 2277
2271 2278 self.processingHeaderObj = ProcessingHeader()
2272 2279
2273 2280
2274 2281 def hasAllDataInBuffer(self):
2275 2282 return 1
2276 2283
2277 2284
2278 2285 def setBlockDimension(self):
2279 2286 """
2280 2287 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
2281 2288
2282 2289 Affected:
2283 2290 self.shape_spc_Buffer
2284 2291 self.shape_cspc_Buffer
2285 2292 self.shape_dc_Buffer
2286 2293
2287 2294 Return: None
2288 2295 """
2289 2296 self.shape_spc_Buffer = (self.dataOut.nChannels,
2290 2297 self.processingHeaderObj.nHeights,
2291 2298 self.processingHeaderObj.profilesPerBlock)
2292 2299
2293 2300 self.shape_cspc_Buffer = (self.dataOut.nPairs,
2294 2301 self.processingHeaderObj.nHeights,
2295 2302 self.processingHeaderObj.profilesPerBlock)
2296 2303
2297 2304 self.shape_dc_Buffer = (self.dataOut.nChannels,
2298 2305 self.processingHeaderObj.nHeights)
2299 2306
2300 2307
2301 2308 def writeBlock(self):
2302 2309 """
2303 2310 Escribe el buffer en el file designado
2304 2311
2305 2312 Affected:
2306 2313 self.data_spc
2307 2314 self.data_cspc
2308 2315 self.data_dc
2309 2316 self.flagIsNewFile
2310 2317 self.flagIsNewBlock
2311 2318 self.nTotalBlocks
2312 2319 self.nWriteBlocks
2313 2320
2314 2321 Return: None
2315 2322 """
2316 2323
2317 2324 spc = numpy.transpose( self.data_spc, (0,2,1) )
2318 2325 if not( self.processingHeaderObj.shif_fft ):
2319 2326 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2320 2327 data = spc.reshape((-1))
2321 2328 data = data.astype(self.dtype[0])
2322 2329 data.tofile(self.fp)
2323 2330
2324 2331 if self.data_cspc != None:
2325 2332 data = numpy.zeros( self.shape_cspc_Buffer, self.dtype )
2326 2333 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
2327 2334 if not( self.processingHeaderObj.shif_fft ):
2328 2335 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2329 2336 data['real'] = cspc.real
2330 2337 data['imag'] = cspc.imag
2331 2338 data = data.reshape((-1))
2332 2339 data.tofile(self.fp)
2333 2340
2334 2341 if self.data_dc != None:
2335 2342 data = numpy.zeros( self.shape_dc_Buffer, self.dtype )
2336 2343 dc = self.data_dc
2337 2344 data['real'] = dc.real
2338 2345 data['imag'] = dc.imag
2339 2346 data = data.reshape((-1))
2340 2347 data.tofile(self.fp)
2341 2348
2342 2349 self.data_spc.fill(0)
2343 2350 self.data_dc.fill(0)
2344 2351 if self.data_cspc != None:
2345 2352 self.data_cspc.fill(0)
2346 2353
2347 2354 self.flagIsNewFile = 0
2348 2355 self.flagIsNewBlock = 1
2349 2356 self.nTotalBlocks += 1
2350 2357 self.nWriteBlocks += 1
2351 2358 self.blockIndex += 1
2352 2359
2353 2360
2354 2361 def putData(self):
2355 2362 """
2356 2363 Setea un bloque de datos y luego los escribe en un file
2357 2364
2358 2365 Affected:
2359 2366 self.data_spc
2360 2367 self.data_cspc
2361 2368 self.data_dc
2362 2369
2363 2370 Return:
2364 2371 0 : Si no hay data o no hay mas files que puedan escribirse
2365 2372 1 : Si se escribio la data de un bloque en un file
2366 2373 """
2367 2374
2368 2375 if self.dataOut.flagNoData:
2369 2376 return 0
2370 2377
2371 2378 self.flagIsNewBlock = 0
2372 2379
2373 2380 if self.dataOut.flagTimeBlock:
2374 2381 self.data_spc.fill(0)
2375 2382 self.data_cspc.fill(0)
2376 2383 self.data_dc.fill(0)
2377 2384 self.setNextFile()
2378 2385
2379 2386 if self.flagIsNewFile == 0:
2380 self.getBasicHeader()
2387 self.setBasicHeader()
2381 2388
2382 2389 self.data_spc = self.dataOut.data_spc.copy()
2383 2390 self.data_cspc = self.dataOut.data_cspc.copy()
2384 2391 self.data_dc = self.dataOut.data_dc.copy()
2385 2392
2386 2393 # #self.processingHeaderObj.dataBlocksPerFile)
2387 2394 if self.hasAllDataInBuffer():
2388 # self.getDataHeader()
2395 # self.setFirstHeader()
2389 2396 self.writeNextBlock()
2390 2397
2391 2398 return 1
2392 2399
2393 2400
2394 2401 def __getProcessFlags(self):
2395 2402
2396 2403 processFlags = 0
2397 2404
2398 2405 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2399 2406 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2400 2407 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2401 2408 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2402 2409 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2403 2410 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2404 2411
2405 2412 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2406 2413
2407 2414
2408 2415
2409 2416 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
2410 2417 PROCFLAG.DATATYPE_SHORT,
2411 2418 PROCFLAG.DATATYPE_LONG,
2412 2419 PROCFLAG.DATATYPE_INT64,
2413 2420 PROCFLAG.DATATYPE_FLOAT,
2414 2421 PROCFLAG.DATATYPE_DOUBLE]
2415 2422
2416 2423
2417 2424 for index in range(len(dtypeList)):
2418 2425 if self.dataOut.dtype == dtypeList[index]:
2419 2426 dtypeValue = datatypeValueList[index]
2420 2427 break
2421 2428
2422 2429 processFlags += dtypeValue
2423 2430
2424 2431 if self.dataOut.flagDecodeData:
2425 2432 processFlags += PROCFLAG.DECODE_DATA
2426 2433
2427 2434 if self.dataOut.flagDeflipData:
2428 2435 processFlags += PROCFLAG.DEFLIP_DATA
2429 2436
2430 2437 if self.dataOut.code != None:
2431 2438 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
2432 2439
2433 2440 if self.dataOut.nIncohInt > 1:
2434 2441 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
2435 2442
2436 2443 if self.dataOut.data_dc != None:
2437 2444 processFlags += PROCFLAG.SAVE_CHANNELS_DC
2438 2445
2439 2446 return processFlags
2440 2447
2441 2448
2442 2449 def __getBlockSize(self):
2443 2450 '''
2444 2451 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
2445 2452 '''
2446 2453
2447 2454 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2448 2455 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2449 2456 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2450 2457 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2451 2458 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2452 2459 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2453 2460
2454 2461 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2455 2462 datatypeValueList = [1,2,4,8,4,8]
2456 2463 for index in range(len(dtypeList)):
2457 2464 if self.dataOut.dtype == dtypeList[index]:
2458 2465 datatypeValue = datatypeValueList[index]
2459 2466 break
2460 2467
2461 2468
2462 2469 pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints
2463 2470
2464 2471 pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write)
2465 2472 blocksize = (pts2write_SelfSpectra*datatypeValue)
2466 2473
2467 2474 if self.dataOut.data_cspc != None:
2468 2475 pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write)
2469 2476 blocksize += (pts2write_CrossSpectra*datatypeValue*2)
2470 2477
2471 2478 if self.dataOut.data_dc != None:
2472 2479 pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights)
2473 2480 blocksize += (pts2write_DCchannels*datatypeValue*2)
2474 2481
2475 2482 blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
2476 2483
2477 2484 return blocksize
2478 2485
2479 def getDataHeader(self):
2486 def setFirstHeader(self):
2480 2487
2481 2488 """
2482 2489 Obtiene una copia del First Header
2483 2490
2484 2491 Affected:
2485 2492 self.systemHeaderObj
2486 2493 self.radarControllerHeaderObj
2487 2494 self.dtype
2488 2495
2489 2496 Return:
2490 2497 None
2491 2498 """
2492 2499
2493 2500 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
2494 2501 self.systemHeaderObj.nChannels = self.dataOut.nChannels
2495 2502 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
2496 2503
2497 self.getBasicHeader()
2504 self.setBasicHeader()
2498 2505
2499 2506 processingHeaderSize = 40 # bytes
2500 2507 self.processingHeaderObj.dtype = 1 # Spectra
2501 2508 self.processingHeaderObj.blockSize = self.__getBlockSize()
2502 2509 self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints
2503 2510 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
2504 2511 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
2505 2512 self.processingHeaderObj.processFlags = self.__getProcessFlags()
2506 2513 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval
2507 2514 self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt
2508 2515 self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
2516 self.processingHeaderObj.shif_fft = self.dataOut.flagShiftFFT
2509 2517
2510 2518 if self.processingHeaderObj.totalSpectra > 0:
2511 2519 channelList = []
2512 2520 for channel in range(self.dataOut.nChannels):
2513 2521 channelList.append(channel)
2514 2522 channelList.append(channel)
2515 2523
2516 2524 pairsList = []
2517 2525 for pair in self.dataOut.pairsList:
2518 2526 pairsList.append(pair[0])
2519 2527 pairsList.append(pair[1])
2520 2528 spectraComb = channelList + pairsList
2521 2529 spectraComb = numpy.array(spectraComb,dtype="u1")
2522 2530 self.processingHeaderObj.spectraComb = spectraComb
2523 2531 sizeOfSpcComb = len(spectraComb)
2524 2532 processingHeaderSize += sizeOfSpcComb
2525 2533
2526 2534 # The processing header should not have information about code
2527 2535 # if self.dataOut.code != None:
2528 2536 # self.processingHeaderObj.code = self.dataOut.code
2529 2537 # self.processingHeaderObj.nCode = self.dataOut.nCode
2530 2538 # self.processingHeaderObj.nBaud = self.dataOut.nBaud
2531 2539 # nCodeSize = 4 # bytes
2532 2540 # nBaudSize = 4 # bytes
2533 2541 # codeSize = 4 # bytes
2534 2542 # sizeOfCode = int(nCodeSize + nBaudSize + codeSize * self.dataOut.nCode * self.dataOut.nBaud)
2535 2543 # processingHeaderSize += sizeOfCode
2536 2544
2537 2545 if self.processingHeaderObj.nWindows != 0:
2538 2546 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
2539 2547 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
2540 2548 self.processingHeaderObj.nHeights = self.dataOut.nHeights
2541 2549 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
2542 2550 sizeOfFirstHeight = 4
2543 2551 sizeOfdeltaHeight = 4
2544 2552 sizeOfnHeights = 4
2545 2553 sizeOfWindows = (sizeOfFirstHeight + sizeOfdeltaHeight + sizeOfnHeights)*self.processingHeaderObj.nWindows
2546 2554 processingHeaderSize += sizeOfWindows
2547 2555
2548 2556 self.processingHeaderObj.size = processingHeaderSize
2549 2557
2550 2558 class SpectraHeisWriter(Operation):
2551 2559 # set = None
2552 2560 setFile = None
2553 2561 idblock = None
2554 2562 doypath = None
2555 2563 subfolder = None
2556 2564
2557 2565 def __init__(self):
2558 2566 self.wrObj = FITS()
2559 2567 # self.dataOut = dataOut
2560 2568 self.nTotalBlocks=0
2561 2569 # self.set = None
2562 2570 self.setFile = None
2563 2571 self.idblock = 0
2564 2572 self.wrpath = None
2565 2573 self.doypath = None
2566 2574 self.subfolder = None
2567 2575 self.isConfig = False
2568 2576
2569 2577 def isNumber(str):
2570 2578 """
2571 2579 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
2572 2580
2573 2581 Excepciones:
2574 2582 Si un determinado string no puede ser convertido a numero
2575 2583 Input:
2576 2584 str, string al cual se le analiza para determinar si convertible a un numero o no
2577 2585
2578 2586 Return:
2579 2587 True : si el string es uno numerico
2580 2588 False : no es un string numerico
2581 2589 """
2582 2590 try:
2583 2591 float( str )
2584 2592 return True
2585 2593 except:
2586 2594 return False
2587 2595
2588 2596 def setup(self, dataOut, wrpath):
2589 2597
2590 2598 if not(os.path.exists(wrpath)):
2591 2599 os.mkdir(wrpath)
2592 2600
2593 2601 self.wrpath = wrpath
2594 2602 # self.setFile = 0
2595 2603 self.dataOut = dataOut
2596 2604
2597 2605 def putData(self):
2598 2606 name= time.localtime( self.dataOut.utctime)
2599 2607 ext=".fits"
2600 2608
2601 2609 if self.doypath == None:
2602 2610 self.subfolder = 'F%4.4d%3.3d_%d' % (name.tm_year,name.tm_yday,time.mktime(datetime.datetime.now().timetuple()))
2603 2611 self.doypath = os.path.join( self.wrpath, self.subfolder )
2604 2612 os.mkdir(self.doypath)
2605 2613
2606 2614 if self.setFile == None:
2607 2615 # self.set = self.dataOut.set
2608 2616 self.setFile = 0
2609 2617 # if self.set != self.dataOut.set:
2610 2618 ## self.set = self.dataOut.set
2611 2619 # self.setFile = 0
2612 2620
2613 2621 #make the filename
2614 2622 file = 'D%4.4d%3.3d_%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext)
2615 2623
2616 2624 filename = os.path.join(self.wrpath,self.subfolder, file)
2617 2625
2618 2626 idblock = numpy.array([self.idblock],dtype="int64")
2619 2627 header=self.wrObj.cFImage(idblock=idblock,
2620 2628 year=time.gmtime(self.dataOut.utctime).tm_year,
2621 2629 month=time.gmtime(self.dataOut.utctime).tm_mon,
2622 2630 day=time.gmtime(self.dataOut.utctime).tm_mday,
2623 2631 hour=time.gmtime(self.dataOut.utctime).tm_hour,
2624 2632 minute=time.gmtime(self.dataOut.utctime).tm_min,
2625 2633 second=time.gmtime(self.dataOut.utctime).tm_sec)
2626 2634
2627 2635 c=3E8
2628 2636 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
2629 2637 freq=numpy.arange(-1*self.dataOut.nHeights/2.,self.dataOut.nHeights/2.)*(c/(2*deltaHeight*1000))
2630 2638
2631 2639 colList = []
2632 2640
2633 2641 colFreq=self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints)+'E', array=freq)
2634 2642
2635 2643 colList.append(colFreq)
2636 2644
2637 2645 nchannel=self.dataOut.nChannels
2638 2646
2639 2647 for i in range(nchannel):
2640 2648 col = self.wrObj.writeData(name="PCh"+str(i+1),
2641 2649 format=str(self.dataOut.nFFTPoints)+'E',
2642 2650 data=10*numpy.log10(self.dataOut.data_spc[i,:]))
2643 2651
2644 2652 colList.append(col)
2645 2653
2646 2654 data=self.wrObj.Ctable(colList=colList)
2647 2655
2648 2656 self.wrObj.CFile(header,data)
2649 2657
2650 2658 self.wrObj.wFile(filename)
2651 2659
2652 2660 #update the setFile
2653 2661 self.setFile += 1
2654 2662 self.idblock += 1
2655 2663
2656 2664 return 1
2657 2665
2658 2666 def run(self, dataOut, **kwargs):
2659 2667
2660 2668 if not(self.isConfig):
2661 2669
2662 2670 self.setup(dataOut, **kwargs)
2663 2671 self.isConfig = True
2664 2672
2665 2673 self.putData()
2666 2674
2667 2675
2668 2676 class FITS:
2669 2677 name=None
2670 2678 format=None
2671 2679 array =None
2672 2680 data =None
2673 2681 thdulist=None
2674 2682 prihdr=None
2675 2683 hdu=None
2676 2684
2677 2685 def __init__(self):
2678 2686
2679 2687 pass
2680 2688
2681 2689 def setColF(self,name,format,array):
2682 2690 self.name=name
2683 2691 self.format=format
2684 2692 self.array=array
2685 2693 a1=numpy.array([self.array],dtype=numpy.float32)
2686 2694 self.col1 = pyfits.Column(name=self.name, format=self.format, array=a1)
2687 2695 return self.col1
2688 2696
2689 2697 # def setColP(self,name,format,data):
2690 2698 # self.name=name
2691 2699 # self.format=format
2692 2700 # self.data=data
2693 2701 # a2=numpy.array([self.data],dtype=numpy.float32)
2694 2702 # self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2695 2703 # return self.col2
2696 2704
2697 2705
2698 2706 def writeData(self,name,format,data):
2699 2707 self.name=name
2700 2708 self.format=format
2701 2709 self.data=data
2702 2710 a2=numpy.array([self.data],dtype=numpy.float32)
2703 2711 self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2704 2712 return self.col2
2705 2713
2706 2714 def cFImage(self,idblock,year,month,day,hour,minute,second):
2707 2715 self.hdu= pyfits.PrimaryHDU(idblock)
2708 2716 self.hdu.header.set("Year",year)
2709 2717 self.hdu.header.set("Month",month)
2710 2718 self.hdu.header.set("Day",day)
2711 2719 self.hdu.header.set("Hour",hour)
2712 2720 self.hdu.header.set("Minute",minute)
2713 2721 self.hdu.header.set("Second",second)
2714 2722 return self.hdu
2715 2723
2716 2724
2717 2725 def Ctable(self,colList):
2718 2726 self.cols=pyfits.ColDefs(colList)
2719 2727 self.tbhdu = pyfits.new_table(self.cols)
2720 2728 return self.tbhdu
2721 2729
2722 2730
2723 2731 def CFile(self,hdu,tbhdu):
2724 2732 self.thdulist=pyfits.HDUList([hdu,tbhdu])
2725 2733
2726 2734 def wFile(self,filename):
2727 2735 if os.path.isfile(filename):
2728 2736 os.remove(filename)
2729 2737 self.thdulist.writeto(filename)
@@ -1,530 +1,534
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JROHeaderIO.py 151 2012-10-31 19:00:51Z murco $
5 5 '''
6 6 import sys
7 7 import numpy
8 8 import copy
9 9 import datetime
10 10
11 11 class Header:
12 12
13 13 def __init__(self):
14 14 raise
15 15
16 16 def copy(self):
17 17 return copy.deepcopy(self)
18 18
19 19 def read():
20 20 pass
21 21
22 22 def write():
23 23 pass
24 24
25 25 def printInfo(self):
26 26
27 27 print "#"*100
28 28 print self.__class__.__name__.upper()
29 29 print "#"*100
30 30 for key in self.__dict__.keys():
31 31 print "%s = %s" %(key, self.__dict__[key])
32 32
33 33 class BasicHeader(Header):
34 34
35 35 size = None
36 36 version = None
37 37 dataBlock = None
38 38 utc = None
39 ltc = None
39 40 miliSecond = None
40 41 timeZone = None
41 42 dstFlag = None
42 43 errorCount = None
43 44 struct = None
44 45 datatime = None
45 46
46 47 __LOCALTIME = None
47 48
48 def __init__(self, localtime=0):
49 def __init__(self, useLocalTime=True):
49 50
50 51 self.size = 0
51 52 self.version = 0
52 53 self.dataBlock = 0
53 54 self.utc = 0
54 55 self.miliSecond = 0
55 56 self.timeZone = 0
56 57 self.dstFlag = 0
57 58 self.errorCount = 0
58 59 self.struct = numpy.dtype([
59 60 ('nSize','<u4'),
60 61 ('nVersion','<u2'),
61 62 ('nDataBlockId','<u4'),
62 63 ('nUtime','<u4'),
63 64 ('nMilsec','<u2'),
64 65 ('nTimezone','<i2'),
65 66 ('nDstflag','<i2'),
66 67 ('nErrorCount','<u4')
67 68 ])
68 69
69 self.__LOCALTIME = localtime
70 self.useLocalTime = useLocalTime
70 71
71 72 def read(self, fp):
72 73 try:
73 74 header = numpy.fromfile(fp, self.struct,1)
74 75 self.size = int(header['nSize'][0])
75 76 self.version = int(header['nVersion'][0])
76 77 self.dataBlock = int(header['nDataBlockId'][0])
77 78 self.utc = int(header['nUtime'][0])
78 79 self.miliSecond = int(header['nMilsec'][0])
79 80 self.timeZone = int(header['nTimezone'][0])
80 81 self.dstFlag = int(header['nDstflag'][0])
81 82 self.errorCount = int(header['nErrorCount'][0])
82 83
83 self.utc += self.__LOCALTIME
84 self.ltc = self.utc
84 85
85 self.datatime = datetime.datetime.utcfromtimestamp(self.utc)
86 if self.useLocalTime:
87 self.ltc -= self.timeZone*60
88
89 self.datatime = datetime.datetime.utcfromtimestamp(self.ltc)
86 90
87 91 except Exception, e:
88 92 print "BasicHeader: "
89 93 print e
90 94 return 0
91 95
92 96 return 1
93 97
94 98 def write(self, fp):
95 self.utc -= self.__LOCALTIME
99
96 100 headerTuple = (self.size,self.version,self.dataBlock,self.utc,self.miliSecond,self.timeZone,self.dstFlag,self.errorCount)
97 101 header = numpy.array(headerTuple,self.struct)
98 102 header.tofile(fp)
99 103
100 104 return 1
101 105
102 106 class SystemHeader(Header):
103 107
104 108 size = None
105 109 nSamples = None
106 110 nProfiles = None
107 111 nChannels = None
108 112 adcResolution = None
109 113 pciDioBusWidth = None
110 114 struct = None
111 115
112 116 def __init__(self):
113 117 self.size = 0
114 118 self.nSamples = 0
115 119 self.nProfiles = 0
116 120 self.nChannels = 0
117 121 self.adcResolution = 0
118 122 self.pciDioBusWidth = 0
119 123 self.struct = numpy.dtype([
120 124 ('nSize','<u4'),
121 125 ('nNumSamples','<u4'),
122 126 ('nNumProfiles','<u4'),
123 127 ('nNumChannels','<u4'),
124 128 ('nADCResolution','<u4'),
125 129 ('nPCDIOBusWidth','<u4'),
126 130 ])
127 131
128 132
129 133 def read(self, fp):
130 134 try:
131 135 header = numpy.fromfile(fp,self.struct,1)
132 136 self.size = header['nSize'][0]
133 137 self.nSamples = header['nNumSamples'][0]
134 138 self.nProfiles = header['nNumProfiles'][0]
135 139 self.nChannels = header['nNumChannels'][0]
136 140 self.adcResolution = header['nADCResolution'][0]
137 141 self.pciDioBusWidth = header['nPCDIOBusWidth'][0]
138 142
139 143 except Exception, e:
140 144 print "SystemHeader: " + e
141 145 return 0
142 146
143 147 return 1
144 148
145 149 def write(self, fp):
146 150 headerTuple = (self.size,self.nSamples,self.nProfiles,self.nChannels,self.adcResolution,self.pciDioBusWidth)
147 151 header = numpy.array(headerTuple,self.struct)
148 152 header.tofile(fp)
149 153
150 154 return 1
151 155
152 156 class RadarControllerHeader(Header):
153 157
154 158 size = None
155 159 expType = None
156 160 nTx = None
157 161 ipp = None
158 162 txA = None
159 163 txB = None
160 164 nWindows = None
161 165 numTaus = None
162 166 codeType = None
163 167 line6Function = None
164 168 line5Function = None
165 169 fClock = None
166 170 prePulseBefore = None
167 171 prePulserAfter = None
168 172 rangeIpp = None
169 173 rangeTxA = None
170 174 rangeTxB = None
171 175 struct = None
172 176
173 177 def __init__(self):
174 178 self.size = 0
175 179 self.expType = 0
176 180 self.nTx = 0
177 181 self.ipp = 0
178 182 self.txA = 0
179 183 self.txB = 0
180 184 self.nWindows = 0
181 185 self.numTaus = 0
182 186 self.codeType = 0
183 187 self.line6Function = 0
184 188 self.line5Function = 0
185 189 self.fClock = 0
186 190 self.prePulseBefore = 0
187 191 self.prePulserAfter = 0
188 192 self.rangeIpp = 0
189 193 self.rangeTxA = 0
190 194 self.rangeTxB = 0
191 195 self.struct = numpy.dtype([
192 196 ('nSize','<u4'),
193 197 ('nExpType','<u4'),
194 198 ('nNTx','<u4'),
195 199 ('fIpp','<f4'),
196 200 ('fTxA','<f4'),
197 201 ('fTxB','<f4'),
198 202 ('nNumWindows','<u4'),
199 203 ('nNumTaus','<u4'),
200 204 ('nCodeType','<u4'),
201 205 ('nLine6Function','<u4'),
202 206 ('nLine5Function','<u4'),
203 207 ('fClock','<f4'),
204 208 ('nPrePulseBefore','<u4'),
205 209 ('nPrePulseAfter','<u4'),
206 210 ('sRangeIPP','<a20'),
207 211 ('sRangeTxA','<a20'),
208 212 ('sRangeTxB','<a20'),
209 213 ])
210 214
211 215 self.samplingWindowStruct = numpy.dtype([('h0','<f4'),('dh','<f4'),('nsa','<u4')])
212 216
213 217 self.samplingWindow = None
214 218 self.nHeights = None
215 219 self.firstHeight = None
216 220 self.deltaHeight = None
217 221 self.samplesWin = None
218 222
219 223 self.nCode = None
220 224 self.nBaud = None
221 225 self.code = None
222 226 self.flip1 = None
223 227 self.flip2 = None
224 228
225 229 self.dynamic = numpy.array([],numpy.dtype('byte'))
226 230
227 231
228 232 def read(self, fp):
229 233 try:
230 234 startFp = fp.tell()
231 235 header = numpy.fromfile(fp,self.struct,1)
232 236 self.size = int(header['nSize'][0])
233 237 self.expType = int(header['nExpType'][0])
234 238 self.nTx = int(header['nNTx'][0])
235 239 self.ipp = float(header['fIpp'][0])
236 240 self.txA = float(header['fTxA'][0])
237 241 self.txB = float(header['fTxB'][0])
238 242 self.nWindows = int(header['nNumWindows'][0])
239 243 self.numTaus = int(header['nNumTaus'][0])
240 244 self.codeType = int(header['nCodeType'][0])
241 245 self.line6Function = int(header['nLine6Function'][0])
242 246 self.line5Function = int(header['nLine5Function'][0])
243 247 self.fClock = float(header['fClock'][0])
244 248 self.prePulseBefore = int(header['nPrePulseBefore'][0])
245 249 self.prePulserAfter = int(header['nPrePulseAfter'][0])
246 250 self.rangeIpp = header['sRangeIPP'][0]
247 251 self.rangeTxA = header['sRangeTxA'][0]
248 252 self.rangeTxB = header['sRangeTxB'][0]
249 253 # jump Dynamic Radar Controller Header
250 254 jumpFp = self.size - 116
251 255 self.dynamic = numpy.fromfile(fp,numpy.dtype('byte'),jumpFp)
252 256 #pointer backward to dynamic header and read
253 257 backFp = fp.tell() - jumpFp
254 258 fp.seek(backFp)
255 259
256 260 self.samplingWindow = numpy.fromfile(fp,self.samplingWindowStruct,self.nWindows)
257 261 self.nHeights = int(numpy.sum(self.samplingWindow['nsa']))
258 262 self.firstHeight = self.samplingWindow['h0']
259 263 self.deltaHeight = self.samplingWindow['dh']
260 264 self.samplesWin = self.samplingWindow['nsa']
261 265
262 266 self.Taus = numpy.fromfile(fp,'<f4',self.numTaus)
263 267
264 268 if self.codeType != 0:
265 269 self.nCode = int(numpy.fromfile(fp,'<u4',1))
266 270 self.nBaud = int(numpy.fromfile(fp,'<u4',1))
267 271 self.code = numpy.empty([self.nCode,self.nBaud],dtype='u1')
268 272 tempList = []
269 273 for ic in range(self.nCode):
270 274 temp = numpy.fromfile(fp,'u1',4*int(numpy.ceil(self.nBaud/32.)))
271 275 tempList.append(temp)
272 276 self.code[ic] = numpy.unpackbits(temp[::-1])[-1*self.nBaud:]
273 277 self.code = 2.0*self.code - 1.0
274 278
275 279 if self.line5Function == RCfunction.FLIP:
276 280 self.flip1 = numpy.fromfile(fp,'<u4',1)
277 281
278 282 if self.line6Function == RCfunction.FLIP:
279 283 self.flip2 = numpy.fromfile(fp,'<u4',1)
280 284
281 285 endFp = self.size + startFp
282 286 jumpFp = endFp - fp.tell()
283 287 if jumpFp > 0:
284 288 fp.seek(jumpFp)
285 289
286 290 except Exception, e:
287 291 print "RadarControllerHeader: " + e
288 292 return 0
289 293
290 294 return 1
291 295
292 296 def write(self, fp):
293 297 headerTuple = (self.size,
294 298 self.expType,
295 299 self.nTx,
296 300 self.ipp,
297 301 self.txA,
298 302 self.txB,
299 303 self.nWindows,
300 304 self.numTaus,
301 305 self.codeType,
302 306 self.line6Function,
303 307 self.line5Function,
304 308 self.fClock,
305 309 self.prePulseBefore,
306 310 self.prePulserAfter,
307 311 self.rangeIpp,
308 312 self.rangeTxA,
309 313 self.rangeTxB)
310 314
311 315 header = numpy.array(headerTuple,self.struct)
312 316 header.tofile(fp)
313 317
314 318 dynamic = self.dynamic
315 319 dynamic.tofile(fp)
316 320
317 321 return 1
318 322
319 323
320 324
321 325 class ProcessingHeader(Header):
322 326
323 327 size = None
324 328 dtype = None
325 329 blockSize = None
326 330 profilesPerBlock = None
327 331 dataBlocksPerFile = None
328 332 nWindows = None
329 333 processFlags = None
330 334 nCohInt = None
331 335 nIncohInt = None
332 336 totalSpectra = None
333 337 struct = None
334 338 flag_dc = None
335 339 flag_cspc = None
336 340
337 341 def __init__(self):
338 342 self.size = 0
339 343 self.dtype = 0
340 344 self.blockSize = 0
341 345 self.profilesPerBlock = 0
342 346 self.dataBlocksPerFile = 0
343 347 self.nWindows = 0
344 348 self.processFlags = 0
345 349 self.nCohInt = 0
346 350 self.nIncohInt = 0
347 351 self.totalSpectra = 0
348 352 self.struct = numpy.dtype([
349 353 ('nSize','<u4'),
350 354 ('nDataType','<u4'),
351 355 ('nSizeOfDataBlock','<u4'),
352 356 ('nProfilesperBlock','<u4'),
353 357 ('nDataBlocksperFile','<u4'),
354 358 ('nNumWindows','<u4'),
355 359 ('nProcessFlags','<u4'),
356 360 ('nCoherentIntegrations','<u4'),
357 361 ('nIncoherentIntegrations','<u4'),
358 362 ('nTotalSpectra','<u4')
359 363 ])
360 364 self.samplingWindow = 0
361 365 self.structSamplingWindow = numpy.dtype([('h0','<f4'),('dh','<f4'),('nsa','<u4')])
362 366 self.nHeights = 0
363 367 self.firstHeight = 0
364 368 self.deltaHeight = 0
365 369 self.samplesWin = 0
366 370 self.spectraComb = 0
367 371 self.nCode = None
368 372 self.code = None
369 373 self.nBaud = None
370 374 self.shif_fft = False
371 375 self.flag_dc = False
372 376 self.flag_cspc = False
373 377
374 378 def read(self, fp):
375 379 try:
376 380 header = numpy.fromfile(fp,self.struct,1)
377 381 self.size = int(header['nSize'][0])
378 382 self.dtype = int(header['nDataType'][0])
379 383 self.blockSize = int(header['nSizeOfDataBlock'][0])
380 384 self.profilesPerBlock = int(header['nProfilesperBlock'][0])
381 385 self.dataBlocksPerFile = int(header['nDataBlocksperFile'][0])
382 386 self.nWindows = int(header['nNumWindows'][0])
383 387 self.processFlags = header['nProcessFlags']
384 388 self.nCohInt = int(header['nCoherentIntegrations'][0])
385 389 self.nIncohInt = int(header['nIncoherentIntegrations'][0])
386 390 self.totalSpectra = int(header['nTotalSpectra'][0])
387 391 self.samplingWindow = numpy.fromfile(fp,self.structSamplingWindow,self.nWindows)
388 392 self.nHeights = int(numpy.sum(self.samplingWindow['nsa']))
389 393 self.firstHeight = float(self.samplingWindow['h0'][0])
390 394 self.deltaHeight = float(self.samplingWindow['dh'][0])
391 395 self.samplesWin = self.samplingWindow['nsa']
392 396 self.spectraComb = numpy.fromfile(fp,'u1',2*self.totalSpectra)
393 397
394 398 if ((self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE) == PROCFLAG.DEFINE_PROCESS_CODE):
395 399 self.nCode = int(numpy.fromfile(fp,'<u4',1))
396 400 self.nBaud = int(numpy.fromfile(fp,'<u4',1))
397 401 self.code = numpy.fromfile(fp,'<f4',self.nCode*self.nBaud).reshape(self.nCode,self.nBaud)
398 402
399 403 if ((self.processFlags & PROCFLAG.SHIFT_FFT_DATA) == PROCFLAG.SHIFT_FFT_DATA):
400 404 self.shif_fft = True
401 405 else:
402 406 self.shif_fft = False
403 407
404 408 if ((self.processFlags & PROCFLAG.SAVE_CHANNELS_DC) == PROCFLAG.SAVE_CHANNELS_DC):
405 409 self.flag_dc = True
406 410
407 411 nChannels = 0
408 412 nPairs = 0
409 413 pairList = []
410 414
411 415 for i in range( 0, self.totalSpectra*2, 2 ):
412 416 if self.spectraComb[i] == self.spectraComb[i+1]:
413 417 nChannels = nChannels + 1 #par de canales iguales
414 418 else:
415 419 nPairs = nPairs + 1 #par de canales diferentes
416 420 pairList.append( (self.spectraComb[i], self.spectraComb[i+1]) )
417 421
418 422 self.flag_cspc = False
419 423 if nPairs > 0:
420 424 self.flag_cspc = True
421 425
422 426 except Exception, e:
423 427 print "ProcessingHeader: " + e
424 428 return 0
425 429
426 430 return 1
427 431
428 432 def write(self, fp):
429 433 headerTuple = (self.size,
430 434 self.dtype,
431 435 self.blockSize,
432 436 self.profilesPerBlock,
433 437 self.dataBlocksPerFile,
434 438 self.nWindows,
435 439 self.processFlags,
436 440 self.nCohInt,
437 441 self.nIncohInt,
438 442 self.totalSpectra)
439 443
440 444 header = numpy.array(headerTuple,self.struct)
441 445 header.tofile(fp)
442 446
443 447 if self.nWindows != 0:
444 448 sampleWindowTuple = (self.firstHeight,self.deltaHeight,self.samplesWin)
445 449 samplingWindow = numpy.array(sampleWindowTuple,self.structSamplingWindow)
446 450 samplingWindow.tofile(fp)
447 451
448 452
449 453 if self.totalSpectra != 0:
450 454 spectraComb = numpy.array([],numpy.dtype('u1'))
451 455 spectraComb = self.spectraComb
452 456 spectraComb.tofile(fp)
453 457
454 458 # if self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE == PROCFLAG.DEFINE_PROCESS_CODE:
455 459 # nCode = numpy.array([self.nCode], numpy.dtype('u4')) #Probar con un dato que almacene codigo, hasta el momento no se hizo la prueba
456 460 # nCode.tofile(fp)
457 461 #
458 462 # nBaud = numpy.array([self.nBaud], numpy.dtype('u4'))
459 463 # nBaud.tofile(fp)
460 464 #
461 465 # code = self.code.reshape(self.nCode*self.nBaud)
462 466 # code = code.astype(numpy.dtype('<f4'))
463 467 # code.tofile(fp)
464 468
465 469 return 1
466 470
467 471 class RCfunction:
468 472 NONE=0
469 473 FLIP=1
470 474 CODE=2
471 475 SAMPLING=3
472 476 LIN6DIV256=4
473 477 SYNCHRO=5
474 478
475 479 class nCodeType:
476 480 NONE=0
477 481 USERDEFINE=1
478 482 BARKER2=2
479 483 BARKER3=3
480 484 BARKER4=4
481 485 BARKER5=5
482 486 BARKER7=6
483 487 BARKER11=7
484 488 BARKER13=8
485 489 AC128=9
486 490 COMPLEMENTARYCODE2=10
487 491 COMPLEMENTARYCODE4=11
488 492 COMPLEMENTARYCODE8=12
489 493 COMPLEMENTARYCODE16=13
490 494 COMPLEMENTARYCODE32=14
491 495 COMPLEMENTARYCODE64=15
492 496 COMPLEMENTARYCODE128=16
493 497 CODE_BINARY28=17
494 498
495 499 class PROCFLAG:
496 500 COHERENT_INTEGRATION = numpy.uint32(0x00000001)
497 501 DECODE_DATA = numpy.uint32(0x00000002)
498 502 SPECTRA_CALC = numpy.uint32(0x00000004)
499 503 INCOHERENT_INTEGRATION = numpy.uint32(0x00000008)
500 504 POST_COHERENT_INTEGRATION = numpy.uint32(0x00000010)
501 505 SHIFT_FFT_DATA = numpy.uint32(0x00000020)
502 506
503 507 DATATYPE_CHAR = numpy.uint32(0x00000040)
504 508 DATATYPE_SHORT = numpy.uint32(0x00000080)
505 509 DATATYPE_LONG = numpy.uint32(0x00000100)
506 510 DATATYPE_INT64 = numpy.uint32(0x00000200)
507 511 DATATYPE_FLOAT = numpy.uint32(0x00000400)
508 512 DATATYPE_DOUBLE = numpy.uint32(0x00000800)
509 513
510 514 DATAARRANGE_CONTIGUOUS_CH = numpy.uint32(0x00001000)
511 515 DATAARRANGE_CONTIGUOUS_H = numpy.uint32(0x00002000)
512 516 DATAARRANGE_CONTIGUOUS_P = numpy.uint32(0x00004000)
513 517
514 518 SAVE_CHANNELS_DC = numpy.uint32(0x00008000)
515 519 DEFLIP_DATA = numpy.uint32(0x00010000)
516 520 DEFINE_PROCESS_CODE = numpy.uint32(0x00020000)
517 521
518 522 ACQ_SYS_NATALIA = numpy.uint32(0x00040000)
519 523 ACQ_SYS_ECHOTEK = numpy.uint32(0x00080000)
520 524 ACQ_SYS_ADRXD = numpy.uint32(0x000C0000)
521 525 ACQ_SYS_JULIA = numpy.uint32(0x00100000)
522 526 ACQ_SYS_XXXXXX = numpy.uint32(0x00140000)
523 527
524 528 EXP_NAME_ESP = numpy.uint32(0x00200000)
525 529 CHANNEL_NAMES_ESP = numpy.uint32(0x00400000)
526 530
527 531 OPERATION_MASK = numpy.uint32(0x0000003F)
528 532 DATATYPE_MASK = numpy.uint32(0x00000FC0)
529 533 DATAARRANGE_MASK = numpy.uint32(0x00007000)
530 534 ACQ_SYS_MASK = numpy.uint32(0x001C0000) No newline at end of file
@@ -1,1621 +1,1627
1 1 '''
2 2
3 3 $Author: dsuarez $
4 4 $Id: Processor.py 1 2012-11-12 18:56:07Z dsuarez $
5 5 '''
6 6 import os
7 7 import numpy
8 8 import datetime
9 9 import time
10 10
11 11 from jrodata import *
12 12 from jrodataIO import *
13 13 from jroplot import *
14 14
15 15 try:
16 16 import cfunctions
17 17 except:
18 18 pass
19 19
20 20 class ProcessingUnit:
21 21
22 22 """
23 23 Esta es la clase base para el procesamiento de datos.
24 24
25 25 Contiene el metodo "call" para llamar operaciones. Las operaciones pueden ser:
26 26 - Metodos internos (callMethod)
27 27 - Objetos del tipo Operation (callObject). Antes de ser llamados, estos objetos
28 28 tienen que ser agreagados con el metodo "add".
29 29
30 30 """
31 31 # objeto de datos de entrada (Voltage, Spectra o Correlation)
32 32 dataIn = None
33 33
34 34 # objeto de datos de entrada (Voltage, Spectra o Correlation)
35 35 dataOut = None
36 36
37 37
38 38 objectDict = None
39 39
40 40 def __init__(self):
41 41
42 42 self.objectDict = {}
43 43
44 44 def init(self):
45 45
46 46 raise ValueError, "Not implemented"
47 47
48 48 def addOperation(self, object, objId):
49 49
50 50 """
51 51 Agrega el objeto "object" a la lista de objetos "self.objectList" y retorna el
52 52 identificador asociado a este objeto.
53 53
54 54 Input:
55 55
56 56 object : objeto de la clase "Operation"
57 57
58 58 Return:
59 59
60 60 objId : identificador del objeto, necesario para ejecutar la operacion
61 61 """
62 62
63 63 self.objectDict[objId] = object
64 64
65 65 return objId
66 66
67 67 def operation(self, **kwargs):
68 68
69 69 """
70 70 Operacion directa sobre la data (dataOut.data). Es necesario actualizar los valores de los
71 71 atributos del objeto dataOut
72 72
73 73 Input:
74 74
75 75 **kwargs : Diccionario de argumentos de la funcion a ejecutar
76 76 """
77 77
78 78 raise ValueError, "ImplementedError"
79 79
80 80 def callMethod(self, name, **kwargs):
81 81
82 82 """
83 83 Ejecuta el metodo con el nombre "name" y con argumentos **kwargs de la propia clase.
84 84
85 85 Input:
86 86 name : nombre del metodo a ejecutar
87 87
88 88 **kwargs : diccionario con los nombres y valores de la funcion a ejecutar.
89 89
90 90 """
91 91 if name != 'run':
92 92
93 93 if name == 'init' and self.dataIn.isEmpty():
94 94 self.dataOut.flagNoData = True
95 95 return False
96 96
97 97 if name != 'init' and self.dataOut.isEmpty():
98 98 return False
99 99
100 100 methodToCall = getattr(self, name)
101 101
102 102 methodToCall(**kwargs)
103 103
104 104 if name != 'run':
105 105 return True
106 106
107 107 if self.dataOut.isEmpty():
108 108 return False
109 109
110 110 return True
111 111
112 112 def callObject(self, objId, **kwargs):
113 113
114 114 """
115 115 Ejecuta la operacion asociada al identificador del objeto "objId"
116 116
117 117 Input:
118 118
119 119 objId : identificador del objeto a ejecutar
120 120
121 121 **kwargs : diccionario con los nombres y valores de la funcion a ejecutar.
122 122
123 123 Return:
124 124
125 125 None
126 126 """
127 127
128 128 if self.dataOut.isEmpty():
129 129 return False
130 130
131 131 object = self.objectDict[objId]
132 132
133 133 object.run(self.dataOut, **kwargs)
134 134
135 135 return True
136 136
137 137 def call(self, operationConf, **kwargs):
138 138
139 139 """
140 140 Return True si ejecuta la operacion "operationConf.name" con los
141 141 argumentos "**kwargs". False si la operacion no se ha ejecutado.
142 142 La operacion puede ser de dos tipos:
143 143
144 144 1. Un metodo propio de esta clase:
145 145
146 146 operation.type = "self"
147 147
148 148 2. El metodo "run" de un objeto del tipo Operation o de un derivado de ella:
149 149 operation.type = "other".
150 150
151 151 Este objeto de tipo Operation debe de haber sido agregado antes con el metodo:
152 152 "addOperation" e identificado con el operation.id
153 153
154 154
155 155 con el id de la operacion.
156 156
157 157 Input:
158 158
159 159 Operation : Objeto del tipo operacion con los atributos: name, type y id.
160 160
161 161 """
162 162
163 163 if operationConf.type == 'self':
164 164 sts = self.callMethod(operationConf.name, **kwargs)
165 165
166 166 if operationConf.type == 'other':
167 167 sts = self.callObject(operationConf.id, **kwargs)
168 168
169 169 return sts
170 170
171 171 def setInput(self, dataIn):
172 172
173 173 self.dataIn = dataIn
174 174
175 175 def getOutput(self):
176 176
177 177 return self.dataOut
178 178
179 179 class Operation():
180 180
181 181 """
182 182 Clase base para definir las operaciones adicionales que se pueden agregar a la clase ProcessingUnit
183 183 y necesiten acumular informacion previa de los datos a procesar. De preferencia usar un buffer de
184 184 acumulacion dentro de esta clase
185 185
186 186 Ejemplo: Integraciones coherentes, necesita la informacion previa de los n perfiles anteriores (bufffer)
187 187
188 188 """
189 189
190 190 __buffer = None
191 191 __isConfig = False
192 192
193 193 def __init__(self):
194 194
195 195 pass
196 196
197 197 def run(self, dataIn, **kwargs):
198 198
199 199 """
200 200 Realiza las operaciones necesarias sobre la dataIn.data y actualiza los atributos del objeto dataIn.
201 201
202 202 Input:
203 203
204 204 dataIn : objeto del tipo JROData
205 205
206 206 Return:
207 207
208 208 None
209 209
210 210 Affected:
211 211 __buffer : buffer de recepcion de datos.
212 212
213 213 """
214 214
215 215 raise ValueError, "ImplementedError"
216 216
217 217 class VoltageProc(ProcessingUnit):
218 218
219 219
220 220 def __init__(self):
221 221
222 222 self.objectDict = {}
223 223 self.dataOut = Voltage()
224 224 self.flip = 1
225 225
226 226 def init(self):
227 227
228 228 self.dataOut.copy(self.dataIn)
229 229 # No necesita copiar en cada init() los atributos de dataIn
230 230 # la copia deberia hacerse por cada nuevo bloque de datos
231 231
232 232 def selectChannels(self, channelList):
233 233
234 234 channelIndexList = []
235 235
236 236 for channel in channelList:
237 237 index = self.dataOut.channelList.index(channel)
238 238 channelIndexList.append(index)
239 239
240 240 self.selectChannelsByIndex(channelIndexList)
241 241
242 242 def selectChannelsByIndex(self, channelIndexList):
243 243 """
244 244 Selecciona un bloque de datos en base a canales segun el channelIndexList
245 245
246 246 Input:
247 247 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
248 248
249 249 Affected:
250 250 self.dataOut.data
251 251 self.dataOut.channelIndexList
252 252 self.dataOut.nChannels
253 253 self.dataOut.m_ProcessingHeader.totalSpectra
254 254 self.dataOut.systemHeaderObj.numChannels
255 255 self.dataOut.m_ProcessingHeader.blockSize
256 256
257 257 Return:
258 258 None
259 259 """
260 260
261 261 for channelIndex in channelIndexList:
262 262 if channelIndex not in self.dataOut.channelIndexList:
263 263 print channelIndexList
264 264 raise ValueError, "The value %d in channelIndexList is not valid" %channelIndex
265 265
266 266 nChannels = len(channelIndexList)
267 267
268 268 data = self.dataOut.data[channelIndexList,:]
269 269
270 270 self.dataOut.data = data
271 271 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
272 272 # self.dataOut.nChannels = nChannels
273 273
274 274 return 1
275 275
276 276 def selectHeights(self, minHei, maxHei):
277 277 """
278 278 Selecciona un bloque de datos en base a un grupo de valores de alturas segun el rango
279 279 minHei <= height <= maxHei
280 280
281 281 Input:
282 282 minHei : valor minimo de altura a considerar
283 283 maxHei : valor maximo de altura a considerar
284 284
285 285 Affected:
286 286 Indirectamente son cambiados varios valores a travez del metodo selectHeightsByIndex
287 287
288 288 Return:
289 289 1 si el metodo se ejecuto con exito caso contrario devuelve 0
290 290 """
291 291 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
292 292 raise ValueError, "some value in (%d,%d) is not valid" % (minHei, maxHei)
293 293
294 294 if (maxHei > self.dataOut.heightList[-1]):
295 295 maxHei = self.dataOut.heightList[-1]
296 296 # raise ValueError, "some value in (%d,%d) is not valid" % (minHei, maxHei)
297 297
298 298 minIndex = 0
299 299 maxIndex = 0
300 300 heights = self.dataOut.heightList
301 301
302 302 inda = numpy.where(heights >= minHei)
303 303 indb = numpy.where(heights <= maxHei)
304 304
305 305 try:
306 306 minIndex = inda[0][0]
307 307 except:
308 308 minIndex = 0
309 309
310 310 try:
311 311 maxIndex = indb[0][-1]
312 312 except:
313 313 maxIndex = len(heights)
314 314
315 315 self.selectHeightsByIndex(minIndex, maxIndex)
316 316
317 317 return 1
318 318
319 319
320 320 def selectHeightsByIndex(self, minIndex, maxIndex):
321 321 """
322 322 Selecciona un bloque de datos en base a un grupo indices de alturas segun el rango
323 323 minIndex <= index <= maxIndex
324 324
325 325 Input:
326 326 minIndex : valor de indice minimo de altura a considerar
327 327 maxIndex : valor de indice maximo de altura a considerar
328 328
329 329 Affected:
330 330 self.dataOut.data
331 331 self.dataOut.heightList
332 332
333 333 Return:
334 334 1 si el metodo se ejecuto con exito caso contrario devuelve 0
335 335 """
336 336
337 337 if (minIndex < 0) or (minIndex > maxIndex):
338 338 raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
339 339
340 340 if (maxIndex >= self.dataOut.nHeights):
341 341 maxIndex = self.dataOut.nHeights-1
342 342 # raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
343 343
344 344 nHeights = maxIndex - minIndex + 1
345 345
346 346 #voltage
347 347 data = self.dataOut.data[:,minIndex:maxIndex+1]
348 348
349 349 firstHeight = self.dataOut.heightList[minIndex]
350 350
351 351 self.dataOut.data = data
352 352 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex+1]
353 353
354 354 return 1
355 355
356 356
357 357 def filterByHeights(self, window):
358 358 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
359 359
360 360 if window == None:
361 361 window = self.dataOut.radarControllerHeaderObj.txA / deltaHeight
362 362
363 363 newdelta = deltaHeight * window
364 364 r = self.dataOut.data.shape[1] % window
365 365 buffer = self.dataOut.data[:,0:self.dataOut.data.shape[1]-r]
366 366 buffer = buffer.reshape(self.dataOut.data.shape[0],self.dataOut.data.shape[1]/window,window)
367 367 buffer = numpy.sum(buffer,2)
368 368 self.dataOut.data = buffer
369 369 self.dataOut.heightList = numpy.arange(self.dataOut.heightList[0],newdelta*self.dataOut.nHeights/window-newdelta,newdelta)
370 370 self.dataOut.windowOfFilter = window
371 371
372 372 def deFlip(self):
373 373 self.dataOut.data *= self.flip
374 374 self.flip *= -1.
375 375
376 376
377 377 class CohInt(Operation):
378 378
379 379 __isConfig = False
380 380
381 381 __profIndex = 0
382 382 __withOverapping = False
383 383
384 384 __byTime = False
385 385 __initime = None
386 386 __lastdatatime = None
387 387 __integrationtime = None
388 388
389 389 __buffer = None
390 390
391 391 __dataReady = False
392 392
393 393 n = None
394 394
395 395
396 396 def __init__(self):
397 397
398 398 self.__isConfig = False
399 399
400 400 def setup(self, n=None, timeInterval=None, overlapping=False):
401 401 """
402 402 Set the parameters of the integration class.
403 403
404 404 Inputs:
405 405
406 406 n : Number of coherent integrations
407 407 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
408 408 overlapping :
409 409
410 410 """
411 411
412 412 self.__initime = None
413 413 self.__lastdatatime = 0
414 414 self.__buffer = None
415 415 self.__dataReady = False
416 416
417 417
418 418 if n == None and timeInterval == None:
419 419 raise ValueError, "n or timeInterval should be specified ..."
420 420
421 421 if n != None:
422 422 self.n = n
423 423 self.__byTime = False
424 424 else:
425 425 self.__integrationtime = timeInterval * 60. #if (type(timeInterval)!=integer) -> change this line
426 426 self.n = 9999
427 427 self.__byTime = True
428 428
429 429 if overlapping:
430 430 self.__withOverapping = True
431 431 self.__buffer = None
432 432 else:
433 433 self.__withOverapping = False
434 434 self.__buffer = 0
435 435
436 436 self.__profIndex = 0
437 437
438 438 def putData(self, data):
439 439
440 440 """
441 441 Add a profile to the __buffer and increase in one the __profileIndex
442 442
443 443 """
444 444
445 445 if not self.__withOverapping:
446 446 self.__buffer += data.copy()
447 447 self.__profIndex += 1
448 448 return
449 449
450 450 #Overlapping data
451 451 nChannels, nHeis = data.shape
452 452 data = numpy.reshape(data, (1, nChannels, nHeis))
453 453
454 454 #If the buffer is empty then it takes the data value
455 455 if self.__buffer == None:
456 456 self.__buffer = data
457 457 self.__profIndex += 1
458 458 return
459 459
460 460 #If the buffer length is lower than n then stakcing the data value
461 461 if self.__profIndex < self.n:
462 462 self.__buffer = numpy.vstack((self.__buffer, data))
463 463 self.__profIndex += 1
464 464 return
465 465
466 466 #If the buffer length is equal to n then replacing the last buffer value with the data value
467 467 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
468 468 self.__buffer[self.n-1] = data
469 469 self.__profIndex = self.n
470 470 return
471 471
472 472
473 473 def pushData(self):
474 474 """
475 475 Return the sum of the last profiles and the profiles used in the sum.
476 476
477 477 Affected:
478 478
479 479 self.__profileIndex
480 480
481 481 """
482 482
483 483 if not self.__withOverapping:
484 484 data = self.__buffer
485 485 n = self.__profIndex
486 486
487 487 self.__buffer = 0
488 488 self.__profIndex = 0
489 489
490 490 return data, n
491 491
492 492 #Integration with Overlapping
493 493 data = numpy.sum(self.__buffer, axis=0)
494 494 n = self.__profIndex
495 495
496 496 return data, n
497 497
498 498 def byProfiles(self, data):
499 499
500 500 self.__dataReady = False
501 501 avgdata = None
502 502 n = None
503 503
504 504 self.putData(data)
505 505
506 506 if self.__profIndex == self.n:
507 507
508 508 avgdata, n = self.pushData()
509 509 self.__dataReady = True
510 510
511 511 return avgdata
512 512
513 513 def byTime(self, data, datatime):
514 514
515 515 self.__dataReady = False
516 516 avgdata = None
517 517 n = None
518 518
519 519 self.putData(data)
520 520
521 521 if (datatime - self.__initime) >= self.__integrationtime:
522 522 avgdata, n = self.pushData()
523 523 self.n = n
524 524 self.__dataReady = True
525 525
526 526 return avgdata
527 527
528 528 def integrate(self, data, datatime=None):
529 529
530 530 if self.__initime == None:
531 531 self.__initime = datatime
532 532
533 533 if self.__byTime:
534 534 avgdata = self.byTime(data, datatime)
535 535 else:
536 536 avgdata = self.byProfiles(data)
537 537
538 538
539 539 self.__lastdatatime = datatime
540 540
541 541 if avgdata == None:
542 542 return None, None
543 543
544 544 avgdatatime = self.__initime
545 545
546 546 deltatime = datatime -self.__lastdatatime
547 547
548 548 if not self.__withOverapping:
549 549 self.__initime = datatime
550 550 else:
551 551 self.__initime += deltatime
552 552
553 553 return avgdata, avgdatatime
554 554
555 555 def run(self, dataOut, **kwargs):
556 556
557 557 if not self.__isConfig:
558 558 self.setup(**kwargs)
559 559 self.__isConfig = True
560 560
561 561 avgdata, avgdatatime = self.integrate(dataOut.data, dataOut.utctime)
562 562
563 563 # dataOut.timeInterval *= n
564 564 dataOut.flagNoData = True
565 565
566 566 if self.__dataReady:
567 567 dataOut.data = avgdata
568 568 dataOut.nCohInt *= self.n
569 569 dataOut.utctime = avgdatatime
570 570 dataOut.timeInterval = dataOut.ippSeconds * dataOut.nCohInt
571 571 dataOut.flagNoData = False
572 572
573 573
574 574 class Decoder(Operation):
575 575
576 576 __isConfig = False
577 577 __profIndex = 0
578 578
579 579 code = None
580 580
581 581 nCode = None
582 582 nBaud = None
583 583
584 584 def __init__(self):
585 585
586 586 self.__isConfig = False
587 587
588 588 def setup(self, code, shape):
589 589
590 590 self.__profIndex = 0
591 591
592 592 self.code = code
593 593
594 594 self.nCode = len(code)
595 595 self.nBaud = len(code[0])
596 596
597 597 self.__nChannels, self.__nHeis = shape
598 598
599 599 __codeBuffer = numpy.zeros((self.nCode, self.__nHeis), dtype=numpy.complex)
600 600
601 601 __codeBuffer[:,0:self.nBaud] = self.code
602 602
603 603 self.fft_code = numpy.conj(numpy.fft.fft(__codeBuffer, axis=1))
604 604
605 605 self.ndatadec = self.__nHeis - self.nBaud + 1
606 606
607 607 self.datadecTime = numpy.zeros((self.__nChannels, self.ndatadec), dtype=numpy.complex)
608 608
609 609 def convolutionInFreq(self, data):
610 610
611 611 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
612 612
613 613 fft_data = numpy.fft.fft(data, axis=1)
614 614
615 615 conv = fft_data*fft_code
616 616
617 617 data = numpy.fft.ifft(conv,axis=1)
618 618
619 619 datadec = data[:,:-self.nBaud+1]
620 620
621 621 return datadec
622 622
623 623 def convolutionInFreqOpt(self, data):
624 624
625 625 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
626 626
627 627 data = cfunctions.decoder(fft_code, data)
628 628
629 629 datadec = data[:,:-self.nBaud+1]
630 630
631 631 return datadec
632 632
633 633 def convolutionInTime(self, data):
634 634
635 635 code = self.code[self.__profIndex]
636 636
637 637 for i in range(self.__nChannels):
638 638 self.datadecTime[i,:] = numpy.correlate(data[i,:], code, mode='valid')
639 639
640 640 return self.datadecTime
641 641
642 642 def run(self, dataOut, code=None, nCode=None, nBaud=None, mode = 0):
643 643
644 644 if not self.__isConfig:
645 645
646 646 if code == None:
647 647 code = dataOut.code
648 648 else:
649 649 code = numpy.array(code).reshape(nCode,nBaud)
650 650 dataOut.code = code
651 651 dataOut.nCode = nCode
652 652 dataOut.nBaud = nBaud
653 653
654 654 if code == None:
655 655 return 1
656 656
657 657 self.setup(code, dataOut.data.shape)
658 658 self.__isConfig = True
659 659
660 660 if mode == 0:
661 661 datadec = self.convolutionInTime(dataOut.data)
662 662
663 663 if mode == 1:
664 664 datadec = self.convolutionInFreq(dataOut.data)
665 665
666 666 if mode == 2:
667 667 datadec = self.convolutionInFreqOpt(dataOut.data)
668 668
669 669 dataOut.data = datadec
670 670
671 671 dataOut.heightList = dataOut.heightList[0:self.ndatadec]
672 672
673 673 dataOut.flagDecodeData = True #asumo q la data no esta decodificada
674 674
675 675 if self.__profIndex == self.nCode-1:
676 676 self.__profIndex = 0
677 677 return 1
678 678
679 679 self.__profIndex += 1
680 680
681 681 return 1
682 682 # dataOut.flagDeflipData = True #asumo q la data no esta sin flip
683 683
684 684
685 685
686 686 class SpectraProc(ProcessingUnit):
687 687
688 688 def __init__(self):
689 689
690 690 self.objectDict = {}
691 691 self.buffer = None
692 692 self.firstdatatime = None
693 693 self.profIndex = 0
694 694 self.dataOut = Spectra()
695 695
696 696 def __updateObjFromInput(self):
697 697
698 self.dataOut.timeZone = self.dataIn.timeZone
699 self.dataOut.dstFlag = self.dataIn.dstFlag
700 self.dataOut.errorCount = self.dataIn.errorCount
701 self.dataOut.useLocalTime = self.dataIn.useLocalTime
702
698 703 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
699 704 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
700 705 self.dataOut.channelList = self.dataIn.channelList
701 706 self.dataOut.heightList = self.dataIn.heightList
702 707 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
703 708 # self.dataOut.nHeights = self.dataIn.nHeights
704 709 # self.dataOut.nChannels = self.dataIn.nChannels
705 710 self.dataOut.nBaud = self.dataIn.nBaud
706 711 self.dataOut.nCode = self.dataIn.nCode
707 712 self.dataOut.code = self.dataIn.code
708 713 self.dataOut.nProfiles = self.dataOut.nFFTPoints
709 714 # self.dataOut.channelIndexList = self.dataIn.channelIndexList
710 715 self.dataOut.flagTimeBlock = self.dataIn.flagTimeBlock
711 716 self.dataOut.utctime = self.firstdatatime
712 717 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
713 718 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
714 719 self.dataOut.flagShiftFFT = self.dataIn.flagShiftFFT
715 720 self.dataOut.nCohInt = self.dataIn.nCohInt
716 721 self.dataOut.nIncohInt = 1
717 722 self.dataOut.ippSeconds = self.dataIn.ippSeconds
718 723 self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
719 724
720 725 self.dataOut.timeInterval = self.dataIn.timeInterval*self.dataOut.nFFTPoints*self.dataOut.nIncohInt
721 726
722 727 def __getFft(self):
723 728 """
724 729 Convierte valores de Voltaje a Spectra
725 730
726 731 Affected:
727 732 self.dataOut.data_spc
728 733 self.dataOut.data_cspc
729 734 self.dataOut.data_dc
730 735 self.dataOut.heightList
731 736 self.profIndex
732 737 self.buffer
733 738 self.dataOut.flagNoData
734 739 """
735 740 fft_volt = numpy.fft.fft(self.buffer,axis=1)
736 741 fft_volt = fft_volt.astype(numpy.dtype('complex'))
737 742 dc = fft_volt[:,0,:]
738 743
739 744 #calculo de self-spectra
740 745 fft_volt = numpy.fft.fftshift(fft_volt,axes=(1,))
741 746 spc = fft_volt * numpy.conjugate(fft_volt)
742 747 spc = spc.real
743 748
744 749 blocksize = 0
745 750 blocksize += dc.size
746 751 blocksize += spc.size
747 752
748 753 cspc = None
749 754 pairIndex = 0
750 755 if self.dataOut.pairsList != None:
751 756 #calculo de cross-spectra
752 757 cspc = numpy.zeros((self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex')
753 758 for pair in self.dataOut.pairsList:
754 759 cspc[pairIndex,:,:] = fft_volt[pair[0],:,:] * numpy.conjugate(fft_volt[pair[1],:,:])
755 760 pairIndex += 1
756 761 blocksize += cspc.size
757 762
758 763 self.dataOut.data_spc = spc
759 764 self.dataOut.data_cspc = cspc
760 765 self.dataOut.data_dc = dc
761 766 self.dataOut.blockSize = blocksize
767 self.dataOut.flagShiftFFT = True
762 768
763 769 def init(self, nFFTPoints=None, pairsList=None):
764 770
765 771 self.dataOut.flagNoData = True
766 772
767 773 if self.dataIn.type == "Spectra":
768 774 self.dataOut.copy(self.dataIn)
769 775 return
770 776
771 777 if self.dataIn.type == "Voltage":
772 778
773 779 if nFFTPoints == None:
774 780 raise ValueError, "This SpectraProc.init() need nFFTPoints input variable"
775 781
776 782 if pairsList == None:
777 783 nPairs = 0
778 784 else:
779 785 nPairs = len(pairsList)
780 786
781 787 self.dataOut.nFFTPoints = nFFTPoints
782 788 self.dataOut.pairsList = pairsList
783 789 self.dataOut.nPairs = nPairs
784 790
785 791 if self.buffer == None:
786 792 self.buffer = numpy.zeros((self.dataIn.nChannels,
787 793 self.dataOut.nFFTPoints,
788 794 self.dataIn.nHeights),
789 795 dtype='complex')
790 796
791 797
792 798 self.buffer[:,self.profIndex,:] = self.dataIn.data.copy()
793 799 self.profIndex += 1
794 800
795 801 if self.firstdatatime == None:
796 802 self.firstdatatime = self.dataIn.utctime
797 803
798 804 if self.profIndex == self.dataOut.nFFTPoints:
799 805 self.__updateObjFromInput()
800 806 self.__getFft()
801 807
802 808 self.dataOut.flagNoData = False
803 809
804 810 self.buffer = None
805 811 self.firstdatatime = None
806 812 self.profIndex = 0
807 813
808 814 return
809 815
810 816 raise ValuError, "The type object %s is not valid"%(self.dataIn.type)
811 817
812 818 def selectChannels(self, channelList):
813 819
814 820 channelIndexList = []
815 821
816 822 for channel in channelList:
817 823 index = self.dataOut.channelList.index(channel)
818 824 channelIndexList.append(index)
819 825
820 826 self.selectChannelsByIndex(channelIndexList)
821 827
822 828 def selectChannelsByIndex(self, channelIndexList):
823 829 """
824 830 Selecciona un bloque de datos en base a canales segun el channelIndexList
825 831
826 832 Input:
827 833 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
828 834
829 835 Affected:
830 836 self.dataOut.data_spc
831 837 self.dataOut.channelIndexList
832 838 self.dataOut.nChannels
833 839
834 840 Return:
835 841 None
836 842 """
837 843
838 844 for channelIndex in channelIndexList:
839 845 if channelIndex not in self.dataOut.channelIndexList:
840 846 print channelIndexList
841 847 raise ValueError, "The value %d in channelIndexList is not valid" %channelIndex
842 848
843 849 nChannels = len(channelIndexList)
844 850
845 851 data_spc = self.dataOut.data_spc[channelIndexList,:]
846 852
847 853 self.dataOut.data_spc = data_spc
848 854 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
849 855 # self.dataOut.nChannels = nChannels
850 856
851 857 return 1
852 858
853 859 def selectHeights(self, minHei, maxHei):
854 860 """
855 861 Selecciona un bloque de datos en base a un grupo de valores de alturas segun el rango
856 862 minHei <= height <= maxHei
857 863
858 864 Input:
859 865 minHei : valor minimo de altura a considerar
860 866 maxHei : valor maximo de altura a considerar
861 867
862 868 Affected:
863 869 Indirectamente son cambiados varios valores a travez del metodo selectHeightsByIndex
864 870
865 871 Return:
866 872 1 si el metodo se ejecuto con exito caso contrario devuelve 0
867 873 """
868 874 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
869 875 raise ValueError, "some value in (%d,%d) is not valid" % (minHei, maxHei)
870 876
871 877 if (maxHei > self.dataOut.heightList[-1]):
872 878 maxHei = self.dataOut.heightList[-1]
873 879 # raise ValueError, "some value in (%d,%d) is not valid" % (minHei, maxHei)
874 880
875 881 minIndex = 0
876 882 maxIndex = 0
877 883 heights = self.dataOut.heightList
878 884
879 885 inda = numpy.where(heights >= minHei)
880 886 indb = numpy.where(heights <= maxHei)
881 887
882 888 try:
883 889 minIndex = inda[0][0]
884 890 except:
885 891 minIndex = 0
886 892
887 893 try:
888 894 maxIndex = indb[0][-1]
889 895 except:
890 896 maxIndex = len(heights)
891 897
892 898 self.selectHeightsByIndex(minIndex, maxIndex)
893 899
894 900 return 1
895 901
896 902
897 903 def selectHeightsByIndex(self, minIndex, maxIndex):
898 904 """
899 905 Selecciona un bloque de datos en base a un grupo indices de alturas segun el rango
900 906 minIndex <= index <= maxIndex
901 907
902 908 Input:
903 909 minIndex : valor de indice minimo de altura a considerar
904 910 maxIndex : valor de indice maximo de altura a considerar
905 911
906 912 Affected:
907 913 self.dataOut.data_spc
908 914 self.dataOut.data_cspc
909 915 self.dataOut.data_dc
910 916 self.dataOut.heightList
911 917
912 918 Return:
913 919 1 si el metodo se ejecuto con exito caso contrario devuelve 0
914 920 """
915 921
916 922 if (minIndex < 0) or (minIndex > maxIndex):
917 923 raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
918 924
919 925 if (maxIndex >= self.dataOut.nHeights):
920 926 maxIndex = self.dataOut.nHeights-1
921 927 # raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
922 928
923 929 nHeights = maxIndex - minIndex + 1
924 930
925 931 #Spectra
926 932 data_spc = self.dataOut.data_spc[:,:,minIndex:maxIndex+1]
927 933
928 934 data_cspc = None
929 935 if self.dataOut.data_cspc != None:
930 936 data_cspc = self.dataOut.data_cspc[:,:,minIndex:maxIndex+1]
931 937
932 938 data_dc = None
933 939 if self.dataOut.data_dc != None:
934 940 data_dc = self.dataOut.data_dc[:,minIndex:maxIndex+1]
935 941
936 942 self.dataOut.data_spc = data_spc
937 943 self.dataOut.data_cspc = data_cspc
938 944 self.dataOut.data_dc = data_dc
939 945
940 946 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex+1]
941 947
942 948 return 1
943 949
944 950 def removeDC(self, mode = 1):
945 951
946 952 dc_index = 0
947 953 freq_index = numpy.array([-2,-1,1,2])
948 954 data_spc = self.dataOut.data_spc
949 955 data_cspc = self.dataOut.data_cspc
950 956 data_dc = self.dataOut.data_dc
951 957
952 958 if self.dataOut.flagShiftFFT:
953 959 dc_index += self.dataOut.nFFTPoints/2
954 960 freq_index += self.dataOut.nFFTPoints/2
955 961
956 962 if mode == 1:
957 963 data_spc[dc_index] = (data_spc[:,freq_index[1],:] + data_spc[:,freq_index[2],:])/2
958 964 if data_cspc != None:
959 965 data_cspc[dc_index] = (data_cspc[:,freq_index[1],:] + data_cspc[:,freq_index[2],:])/2
960 966 return 1
961 967
962 968 if mode == 2:
963 969 pass
964 970
965 971 if mode == 3:
966 972 pass
967 973
968 974 raise ValueError, "mode parameter has to be 1, 2 or 3"
969 975
970 976 def removeInterference(self):
971 977
972 978 pass
973 979
974 980
975 981 class IncohInt(Operation):
976 982
977 983
978 984 __profIndex = 0
979 985 __withOverapping = False
980 986
981 987 __byTime = False
982 988 __initime = None
983 989 __lastdatatime = None
984 990 __integrationtime = None
985 991
986 992 __buffer_spc = None
987 993 __buffer_cspc = None
988 994 __buffer_dc = None
989 995
990 996 __dataReady = False
991 997
992 998 __timeInterval = None
993 999
994 1000 n = None
995 1001
996 1002
997 1003
998 1004 def __init__(self):
999 1005
1000 1006 self.__isConfig = False
1001 1007
1002 1008 def setup(self, n=None, timeInterval=None, overlapping=False):
1003 1009 """
1004 1010 Set the parameters of the integration class.
1005 1011
1006 1012 Inputs:
1007 1013
1008 1014 n : Number of coherent integrations
1009 1015 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
1010 1016 overlapping :
1011 1017
1012 1018 """
1013 1019
1014 1020 self.__initime = None
1015 1021 self.__lastdatatime = 0
1016 1022 self.__buffer_spc = None
1017 1023 self.__buffer_cspc = None
1018 1024 self.__buffer_dc = None
1019 1025 self.__dataReady = False
1020 1026
1021 1027
1022 1028 if n == None and timeInterval == None:
1023 1029 raise ValueError, "n or timeInterval should be specified ..."
1024 1030
1025 1031 if n != None:
1026 1032 self.n = n
1027 1033 self.__byTime = False
1028 1034 else:
1029 1035 self.__integrationtime = timeInterval * 60. #if (type(timeInterval)!=integer) -> change this line
1030 1036 self.n = 9999
1031 1037 self.__byTime = True
1032 1038
1033 1039 if overlapping:
1034 1040 self.__withOverapping = True
1035 1041 else:
1036 1042 self.__withOverapping = False
1037 1043 self.__buffer_spc = 0
1038 1044 self.__buffer_cspc = 0
1039 1045 self.__buffer_dc = 0
1040 1046
1041 1047 self.__profIndex = 0
1042 1048
1043 1049 def putData(self, data_spc, data_cspc, data_dc):
1044 1050
1045 1051 """
1046 1052 Add a profile to the __buffer_spc and increase in one the __profileIndex
1047 1053
1048 1054 """
1049 1055
1050 1056 if not self.__withOverapping:
1051 1057 self.__buffer_spc += data_spc
1052 1058
1053 1059 if data_cspc == None:
1054 1060 self.__buffer_cspc = None
1055 1061 else:
1056 1062 self.__buffer_cspc += data_cspc
1057 1063
1058 1064 if data_dc == None:
1059 1065 self.__buffer_dc = None
1060 1066 else:
1061 1067 self.__buffer_dc += data_dc
1062 1068
1063 1069 self.__profIndex += 1
1064 1070 return
1065 1071
1066 1072 #Overlapping data
1067 1073 nChannels, nFFTPoints, nHeis = data_spc.shape
1068 1074 data_spc = numpy.reshape(data_spc, (1, nChannels, nFFTPoints, nHeis))
1069 1075 if data_cspc != None:
1070 1076 data_cspc = numpy.reshape(data_cspc, (1, -1, nFFTPoints, nHeis))
1071 1077 if data_dc != None:
1072 1078 data_dc = numpy.reshape(data_dc, (1, -1, nHeis))
1073 1079
1074 1080 #If the buffer is empty then it takes the data value
1075 1081 if self.__buffer_spc == None:
1076 1082 self.__buffer_spc = data_spc
1077 1083
1078 1084 if data_cspc == None:
1079 1085 self.__buffer_cspc = None
1080 1086 else:
1081 1087 self.__buffer_cspc += data_cspc
1082 1088
1083 1089 if data_dc == None:
1084 1090 self.__buffer_dc = None
1085 1091 else:
1086 1092 self.__buffer_dc += data_dc
1087 1093
1088 1094 self.__profIndex += 1
1089 1095 return
1090 1096
1091 1097 #If the buffer length is lower than n then stakcing the data value
1092 1098 if self.__profIndex < self.n:
1093 1099 self.__buffer_spc = numpy.vstack((self.__buffer_spc, data_spc))
1094 1100
1095 1101 if data_cspc != None:
1096 1102 self.__buffer_cspc = numpy.vstack((self.__buffer_cspc, data_cspc))
1097 1103
1098 1104 if data_dc != None:
1099 1105 self.__buffer_dc = numpy.vstack((self.__buffer_dc, data_dc))
1100 1106
1101 1107 self.__profIndex += 1
1102 1108 return
1103 1109
1104 1110 #If the buffer length is equal to n then replacing the last buffer value with the data value
1105 1111 self.__buffer_spc = numpy.roll(self.__buffer_spc, -1, axis=0)
1106 1112 self.__buffer_spc[self.n-1] = data_spc
1107 1113
1108 1114 if data_cspc != None:
1109 1115 self.__buffer_cspc = numpy.roll(self.__buffer_cspc, -1, axis=0)
1110 1116 self.__buffer_cspc[self.n-1] = data_cspc
1111 1117
1112 1118 if data_dc != None:
1113 1119 self.__buffer_dc = numpy.roll(self.__buffer_dc, -1, axis=0)
1114 1120 self.__buffer_dc[self.n-1] = data_dc
1115 1121
1116 1122 self.__profIndex = self.n
1117 1123 return
1118 1124
1119 1125
1120 1126 def pushData(self):
1121 1127 """
1122 1128 Return the sum of the last profiles and the profiles used in the sum.
1123 1129
1124 1130 Affected:
1125 1131
1126 1132 self.__profileIndex
1127 1133
1128 1134 """
1129 1135 data_spc = None
1130 1136 data_cspc = None
1131 1137 data_dc = None
1132 1138
1133 1139 if not self.__withOverapping:
1134 1140 data_spc = self.__buffer_spc
1135 1141 data_cspc = self.__buffer_cspc
1136 1142 data_dc = self.__buffer_dc
1137 1143
1138 1144 n = self.__profIndex
1139 1145
1140 1146 self.__buffer_spc = 0
1141 1147 self.__buffer_cspc = 0
1142 1148 self.__buffer_dc = 0
1143 1149 self.__profIndex = 0
1144 1150
1145 1151 return data_spc, data_cspc, data_dc, n
1146 1152
1147 1153 #Integration with Overlapping
1148 1154 data_spc = numpy.sum(self.__buffer_spc, axis=0)
1149 1155
1150 1156 if self.__buffer_cspc != None:
1151 1157 data_cspc = numpy.sum(self.__buffer_cspc, axis=0)
1152 1158
1153 1159 if self.__buffer_dc != None:
1154 1160 data_dc = numpy.sum(self.__buffer_dc, axis=0)
1155 1161
1156 1162 n = self.__profIndex
1157 1163
1158 1164 return data_spc, data_cspc, data_dc, n
1159 1165
1160 1166 def byProfiles(self, *args):
1161 1167
1162 1168 self.__dataReady = False
1163 1169 avgdata_spc = None
1164 1170 avgdata_cspc = None
1165 1171 avgdata_dc = None
1166 1172 n = None
1167 1173
1168 1174 self.putData(*args)
1169 1175
1170 1176 if self.__profIndex == self.n:
1171 1177
1172 1178 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1173 1179 self.__dataReady = True
1174 1180
1175 1181 return avgdata_spc, avgdata_cspc, avgdata_dc
1176 1182
1177 1183 def byTime(self, datatime, *args):
1178 1184
1179 1185 self.__dataReady = False
1180 1186 avgdata_spc = None
1181 1187 avgdata_cspc = None
1182 1188 avgdata_dc = None
1183 1189 n = None
1184 1190
1185 1191 self.putData(*args)
1186 1192
1187 1193 if (datatime - self.__initime) >= self.__integrationtime:
1188 1194 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1189 1195 self.n = n
1190 1196 self.__dataReady = True
1191 1197
1192 1198 return avgdata_spc, avgdata_cspc, avgdata_dc
1193 1199
1194 1200 def integrate(self, datatime, *args):
1195 1201
1196 1202 if self.__initime == None:
1197 1203 self.__initime = datatime
1198 1204
1199 1205 if self.__byTime:
1200 1206 avgdata_spc, avgdata_cspc, avgdata_dc = self.byTime(datatime, *args)
1201 1207 else:
1202 1208 avgdata_spc, avgdata_cspc, avgdata_dc = self.byProfiles(*args)
1203 1209
1204 1210 self.__lastdatatime = datatime
1205 1211
1206 1212 if avgdata_spc == None:
1207 1213 return None, None, None, None
1208 1214
1209 1215 avgdatatime = self.__initime
1210 1216 self.__timeInterval = (self.__lastdatatime - self.__initime)/(self.n - 1)
1211 1217
1212 1218 deltatime = datatime -self.__lastdatatime
1213 1219
1214 1220 if not self.__withOverapping:
1215 1221 self.__initime = datatime
1216 1222 else:
1217 1223 self.__initime += deltatime
1218 1224
1219 1225 return avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc
1220 1226
1221 1227 def run(self, dataOut, n=None, timeInterval=None, overlapping=False):
1222 1228
1223 1229 if n==1:
1224 1230 dataOut.flagNoData = False
1225 1231 return
1226 1232
1227 1233 if not self.__isConfig:
1228 1234 self.setup(n, timeInterval, overlapping)
1229 1235 self.__isConfig = True
1230 1236
1231 1237 avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc = self.integrate(dataOut.utctime,
1232 1238 dataOut.data_spc,
1233 1239 dataOut.data_cspc,
1234 1240 dataOut.data_dc)
1235 1241
1236 1242 # dataOut.timeInterval *= n
1237 1243 dataOut.flagNoData = True
1238 1244
1239 1245 if self.__dataReady:
1240 1246
1241 1247 dataOut.data_spc = avgdata_spc
1242 1248 dataOut.data_cspc = avgdata_cspc
1243 1249 dataOut.data_dc = avgdata_dc
1244 1250
1245 1251 dataOut.nIncohInt *= self.n
1246 1252 dataOut.utctime = avgdatatime
1247 1253 #dataOut.timeInterval = dataOut.ippSeconds * dataOut.nCohInt * dataOut.nIncohInt * dataOut.nFFTPoints
1248 1254 dataOut.timeInterval = self.__timeInterval*self.n
1249 1255 dataOut.flagNoData = False
1250 1256
1251 1257 class ProfileSelector(Operation):
1252 1258
1253 1259 profileIndex = None
1254 1260 # Tamanho total de los perfiles
1255 1261 nProfiles = None
1256 1262
1257 1263 def __init__(self):
1258 1264
1259 1265 self.profileIndex = 0
1260 1266
1261 1267 def incIndex(self):
1262 1268 self.profileIndex += 1
1263 1269
1264 1270 if self.profileIndex >= self.nProfiles:
1265 1271 self.profileIndex = 0
1266 1272
1267 1273 def isProfileInRange(self, minIndex, maxIndex):
1268 1274
1269 1275 if self.profileIndex < minIndex:
1270 1276 return False
1271 1277
1272 1278 if self.profileIndex > maxIndex:
1273 1279 return False
1274 1280
1275 1281 return True
1276 1282
1277 1283 def isProfileInList(self, profileList):
1278 1284
1279 1285 if self.profileIndex not in profileList:
1280 1286 return False
1281 1287
1282 1288 return True
1283 1289
1284 1290 def run(self, dataOut, profileList=None, profileRangeList=None):
1285 1291
1286 1292 dataOut.flagNoData = True
1287 1293 self.nProfiles = dataOut.nProfiles
1288 1294
1289 1295 if profileList != None:
1290 1296 if self.isProfileInList(profileList):
1291 1297 dataOut.flagNoData = False
1292 1298
1293 1299 self.incIndex()
1294 1300 return 1
1295 1301
1296 1302
1297 1303 elif profileRangeList != None:
1298 1304 minIndex = profileRangeList[0]
1299 1305 maxIndex = profileRangeList[1]
1300 1306 if self.isProfileInRange(minIndex, maxIndex):
1301 1307 dataOut.flagNoData = False
1302 1308
1303 1309 self.incIndex()
1304 1310 return 1
1305 1311
1306 1312 else:
1307 1313 raise ValueError, "ProfileSelector needs profileList or profileRangeList"
1308 1314
1309 1315 return 0
1310 1316
1311 1317 class SpectraHeisProc(ProcessingUnit):
1312 1318 def __init__(self):
1313 1319 self.objectDict = {}
1314 1320 # self.buffer = None
1315 1321 # self.firstdatatime = None
1316 1322 # self.profIndex = 0
1317 1323 self.dataOut = SpectraHeis()
1318 1324
1319 1325 def __updateObjFromInput(self):
1320 1326 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()#
1321 1327 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()#
1322 1328 self.dataOut.channelList = self.dataIn.channelList
1323 1329 self.dataOut.heightList = self.dataIn.heightList
1324 1330 # self.dataOut.dtype = self.dataIn.dtype
1325 1331 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
1326 1332 # self.dataOut.nHeights = self.dataIn.nHeights
1327 1333 # self.dataOut.nChannels = self.dataIn.nChannels
1328 1334 self.dataOut.nBaud = self.dataIn.nBaud
1329 1335 self.dataOut.nCode = self.dataIn.nCode
1330 1336 self.dataOut.code = self.dataIn.code
1331 1337 # self.dataOut.nProfiles = 1
1332 1338 # self.dataOut.nProfiles = self.dataOut.nFFTPoints
1333 1339 self.dataOut.nFFTPoints = self.dataIn.nHeights
1334 1340 # self.dataOut.channelIndexList = self.dataIn.channelIndexList
1335 1341 # self.dataOut.flagNoData = self.dataIn.flagNoData
1336 1342 self.dataOut.flagTimeBlock = self.dataIn.flagTimeBlock
1337 1343 self.dataOut.utctime = self.dataIn.utctime
1338 1344 # self.dataOut.utctime = self.firstdatatime
1339 1345 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
1340 1346 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
1341 1347 self.dataOut.flagShiftFFT = self.dataIn.flagShiftFFT
1342 1348 self.dataOut.nCohInt = self.dataIn.nCohInt
1343 1349 self.dataOut.nIncohInt = 1
1344 1350 self.dataOut.ippSeconds= self.dataIn.ippSeconds
1345 1351 self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
1346 1352
1347 1353 self.dataOut.timeInterval = self.dataIn.timeInterval*self.dataOut.nIncohInt
1348 1354 # self.dataOut.set=self.dataIn.set
1349 1355 # self.dataOut.deltaHeight=self.dataIn.deltaHeight
1350 1356
1351 1357
1352 1358 def __getFft(self):
1353 1359
1354 1360 fft_volt = numpy.fft.fft(self.dataIn.data, axis=1)
1355 1361 fft_volt = numpy.fft.fftshift(fft_volt,axes=(1,))
1356 1362 spc = numpy.abs(fft_volt * numpy.conjugate(fft_volt))/(self.dataOut.nFFTPoints)
1357 1363 self.dataOut.data_spc = spc
1358 1364
1359 1365 def init(self):
1360 1366
1361 1367 self.dataOut.flagNoData = True
1362 1368
1363 1369 if self.dataIn.type == "SpectraHeis":
1364 1370 self.dataOut.copy(self.dataIn)
1365 1371 return
1366 1372
1367 1373 if self.dataIn.type == "Voltage":
1368 1374 self.__updateObjFromInput()
1369 1375 self.__getFft()
1370 1376 self.dataOut.flagNoData = False
1371 1377
1372 1378 return
1373 1379
1374 1380 raise ValuError, "The type object %s is not valid"%(self.dataIn.type)
1375 1381
1376 1382
1377 1383 def selectChannels(self, channelList):
1378 1384
1379 1385 channelIndexList = []
1380 1386
1381 1387 for channel in channelList:
1382 1388 index = self.dataOut.channelList.index(channel)
1383 1389 channelIndexList.append(index)
1384 1390
1385 1391 self.selectChannelsByIndex(channelIndexList)
1386 1392
1387 1393 def selectChannelsByIndex(self, channelIndexList):
1388 1394 """
1389 1395 Selecciona un bloque de datos en base a canales segun el channelIndexList
1390 1396
1391 1397 Input:
1392 1398 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
1393 1399
1394 1400 Affected:
1395 1401 self.dataOut.data
1396 1402 self.dataOut.channelIndexList
1397 1403 self.dataOut.nChannels
1398 1404 self.dataOut.m_ProcessingHeader.totalSpectra
1399 1405 self.dataOut.systemHeaderObj.numChannels
1400 1406 self.dataOut.m_ProcessingHeader.blockSize
1401 1407
1402 1408 Return:
1403 1409 None
1404 1410 """
1405 1411
1406 1412 for channelIndex in channelIndexList:
1407 1413 if channelIndex not in self.dataOut.channelIndexList:
1408 1414 print channelIndexList
1409 1415 raise ValueError, "The value %d in channelIndexList is not valid" %channelIndex
1410 1416
1411 1417 nChannels = len(channelIndexList)
1412 1418
1413 1419 data_spc = self.dataOut.data_spc[channelIndexList,:]
1414 1420
1415 1421 self.dataOut.data_spc = data_spc
1416 1422 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
1417 1423
1418 1424 return 1
1419 1425
1420 1426 class IncohInt4SpectraHeis(Operation):
1421 1427
1422 1428 __isConfig = False
1423 1429
1424 1430 __profIndex = 0
1425 1431 __withOverapping = False
1426 1432
1427 1433 __byTime = False
1428 1434 __initime = None
1429 1435 __lastdatatime = None
1430 1436 __integrationtime = None
1431 1437
1432 1438 __buffer = None
1433 1439
1434 1440 __dataReady = False
1435 1441
1436 1442 n = None
1437 1443
1438 1444
1439 1445 def __init__(self):
1440 1446
1441 1447 self.__isConfig = False
1442 1448
1443 1449 def setup(self, n=None, timeInterval=None, overlapping=False):
1444 1450 """
1445 1451 Set the parameters of the integration class.
1446 1452
1447 1453 Inputs:
1448 1454
1449 1455 n : Number of coherent integrations
1450 1456 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
1451 1457 overlapping :
1452 1458
1453 1459 """
1454 1460
1455 1461 self.__initime = None
1456 1462 self.__lastdatatime = 0
1457 1463 self.__buffer = None
1458 1464 self.__dataReady = False
1459 1465
1460 1466
1461 1467 if n == None and timeInterval == None:
1462 1468 raise ValueError, "n or timeInterval should be specified ..."
1463 1469
1464 1470 if n != None:
1465 1471 self.n = n
1466 1472 self.__byTime = False
1467 1473 else:
1468 1474 self.__integrationtime = timeInterval #* 60. #if (type(timeInterval)!=integer) -> change this line
1469 1475 self.n = 9999
1470 1476 self.__byTime = True
1471 1477
1472 1478 if overlapping:
1473 1479 self.__withOverapping = True
1474 1480 self.__buffer = None
1475 1481 else:
1476 1482 self.__withOverapping = False
1477 1483 self.__buffer = 0
1478 1484
1479 1485 self.__profIndex = 0
1480 1486
1481 1487 def putData(self, data):
1482 1488
1483 1489 """
1484 1490 Add a profile to the __buffer and increase in one the __profileIndex
1485 1491
1486 1492 """
1487 1493
1488 1494 if not self.__withOverapping:
1489 1495 self.__buffer += data.copy()
1490 1496 self.__profIndex += 1
1491 1497 return
1492 1498
1493 1499 #Overlapping data
1494 1500 nChannels, nHeis = data.shape
1495 1501 data = numpy.reshape(data, (1, nChannels, nHeis))
1496 1502
1497 1503 #If the buffer is empty then it takes the data value
1498 1504 if self.__buffer == None:
1499 1505 self.__buffer = data
1500 1506 self.__profIndex += 1
1501 1507 return
1502 1508
1503 1509 #If the buffer length is lower than n then stakcing the data value
1504 1510 if self.__profIndex < self.n:
1505 1511 self.__buffer = numpy.vstack((self.__buffer, data))
1506 1512 self.__profIndex += 1
1507 1513 return
1508 1514
1509 1515 #If the buffer length is equal to n then replacing the last buffer value with the data value
1510 1516 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
1511 1517 self.__buffer[self.n-1] = data
1512 1518 self.__profIndex = self.n
1513 1519 return
1514 1520
1515 1521
1516 1522 def pushData(self):
1517 1523 """
1518 1524 Return the sum of the last profiles and the profiles used in the sum.
1519 1525
1520 1526 Affected:
1521 1527
1522 1528 self.__profileIndex
1523 1529
1524 1530 """
1525 1531
1526 1532 if not self.__withOverapping:
1527 1533 data = self.__buffer
1528 1534 n = self.__profIndex
1529 1535
1530 1536 self.__buffer = 0
1531 1537 self.__profIndex = 0
1532 1538
1533 1539 return data, n
1534 1540
1535 1541 #Integration with Overlapping
1536 1542 data = numpy.sum(self.__buffer, axis=0)
1537 1543 n = self.__profIndex
1538 1544
1539 1545 return data, n
1540 1546
1541 1547 def byProfiles(self, data):
1542 1548
1543 1549 self.__dataReady = False
1544 1550 avgdata = None
1545 1551 n = None
1546 1552
1547 1553 self.putData(data)
1548 1554
1549 1555 if self.__profIndex == self.n:
1550 1556
1551 1557 avgdata, n = self.pushData()
1552 1558 self.__dataReady = True
1553 1559
1554 1560 return avgdata
1555 1561
1556 1562 def byTime(self, data, datatime):
1557 1563
1558 1564 self.__dataReady = False
1559 1565 avgdata = None
1560 1566 n = None
1561 1567
1562 1568 self.putData(data)
1563 1569
1564 1570 if (datatime - self.__initime) >= self.__integrationtime:
1565 1571 avgdata, n = self.pushData()
1566 1572 self.n = n
1567 1573 self.__dataReady = True
1568 1574
1569 1575 return avgdata
1570 1576
1571 1577 def integrate(self, data, datatime=None):
1572 1578
1573 1579 if self.__initime == None:
1574 1580 self.__initime = datatime
1575 1581
1576 1582 if self.__byTime:
1577 1583 avgdata = self.byTime(data, datatime)
1578 1584 else:
1579 1585 avgdata = self.byProfiles(data)
1580 1586
1581 1587
1582 1588 self.__lastdatatime = datatime
1583 1589
1584 1590 if avgdata == None:
1585 1591 return None, None
1586 1592
1587 1593 avgdatatime = self.__initime
1588 1594
1589 1595 deltatime = datatime -self.__lastdatatime
1590 1596
1591 1597 if not self.__withOverapping:
1592 1598 self.__initime = datatime
1593 1599 else:
1594 1600 self.__initime += deltatime
1595 1601
1596 1602 return avgdata, avgdatatime
1597 1603
1598 1604 def run(self, dataOut, **kwargs):
1599 1605
1600 1606 if not self.__isConfig:
1601 1607 self.setup(**kwargs)
1602 1608 self.__isConfig = True
1603 1609
1604 1610 avgdata, avgdatatime = self.integrate(dataOut.data_spc, dataOut.utctime)
1605 1611
1606 1612 # dataOut.timeInterval *= n
1607 1613 dataOut.flagNoData = True
1608 1614
1609 1615 if self.__dataReady:
1610 1616 dataOut.data_spc = avgdata
1611 1617 dataOut.nIncohInt *= self.n
1612 1618 # dataOut.nCohInt *= self.n
1613 1619 dataOut.utctime = avgdatatime
1614 1620 dataOut.timeInterval = dataOut.ippSeconds * dataOut.nIncohInt
1615 1621 # dataOut.timeInterval = self.__timeInterval*self.n
1616 1622 dataOut.flagNoData = False
1617 1623
1618 1624
1619 1625
1620 1626
1621 1627 No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now