##// END OF EJS Templates
Se agregaron las propiedades flag_cspc y flag_dc a la clase Spectra...
Miguel Valdez -
r266:ab6d630dba95
parent child
Show More
@@ -1,536 +1,552
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JROData.py 173 2012-11-20 15:06:21Z murco $
5 5 '''
6 6
7 7 import os, sys
8 8 import copy
9 9 import numpy
10 10 import datetime
11 11
12 12 from jroheaderIO import SystemHeader, RadarControllerHeader
13 13
14 14 def hildebrand_sekhon(data, navg):
15 15 """
16 16 This method is for the objective determination of de noise level in Doppler spectra. This
17 17 implementation technique is based on the fact that the standard deviation of the spectral
18 18 densities is equal to the mean spectral density for white Gaussian noise
19 19
20 20 Inputs:
21 21 Data : heights
22 22 navg : numbers of averages
23 23
24 24 Return:
25 25 -1 : any error
26 26 anoise : noise's level
27 27 """
28 28
29 29 dataflat = data.copy().reshape(-1)
30 30 dataflat.sort()
31 31 npts = dataflat.size #numbers of points of the data
32 32 npts_noise = 0.2*npts
33 33
34 34 if npts < 32:
35 35 print "error in noise - requires at least 32 points"
36 36 return -1.0
37 37
38 38 dataflat2 = numpy.power(dataflat,2)
39 39
40 40 cs = numpy.cumsum(dataflat)
41 41 cs2 = numpy.cumsum(dataflat2)
42 42
43 43 # data sorted in ascending order
44 44 nmin = int((npts + 7.)/8)
45 45
46 46 for i in range(nmin, npts):
47 47 s = cs[i]
48 48 s2 = cs2[i]
49 49 p = s / float(i);
50 50 p2 = p**2;
51 51 q = s2 / float(i) - p2;
52 52 leftc = p2;
53 53 rightc = q * float(navg);
54 54 R2 = leftc/rightc
55 55
56 56 # Signal detect: R2 < 1 (R2 = leftc/rightc)
57 57 if R2 < 1:
58 58 npts_noise = i
59 59 break
60 60
61 61
62 62 anoise = numpy.average(dataflat[0:npts_noise])
63 63
64 64 return anoise;
65 65
66 66 def sorting_bruce(data, navg):
67 67
68 68 data = data.copy()
69 69
70 70 sortdata = numpy.sort(data)
71 71 lenOfData = len(data)
72 72 nums_min = lenOfData/10
73 73
74 74 if (lenOfData/10) > 0:
75 75 nums_min = lenOfData/10
76 76 else:
77 77 nums_min = 0
78 78
79 79 rtest = 1.0 + 1.0/navg
80 80
81 81 sum = 0.
82 82
83 83 sumq = 0.
84 84
85 85 j = 0
86 86
87 87 cont = 1
88 88
89 89 while((cont==1)and(j<lenOfData)):
90 90
91 91 sum += sortdata[j]
92 92
93 93 sumq += sortdata[j]**2
94 94
95 95 j += 1
96 96
97 97 if j > nums_min:
98 98 if ((sumq*j) <= (rtest*sum**2)):
99 99 lnoise = sum / j
100 100 else:
101 101 j = j - 1
102 102 sum = sum - sordata[j]
103 103 sumq = sumq - sordata[j]**2
104 104 cont = 0
105 105
106 106 if j == nums_min:
107 107 lnoise = sum /j
108 108
109 109 return lnoise
110 110
111 111 class JROData:
112 112
113 113 # m_BasicHeader = BasicHeader()
114 114 # m_ProcessingHeader = ProcessingHeader()
115 115
116 116 systemHeaderObj = SystemHeader()
117 117
118 118 radarControllerHeaderObj = RadarControllerHeader()
119 119
120 120 # data = None
121 121
122 122 type = None
123 123
124 124 dtype = None
125 125
126 126 # nChannels = None
127 127
128 128 # nHeights = None
129 129
130 130 nProfiles = None
131 131
132 132 heightList = None
133 133
134 134 channelList = None
135 135
136 136 flagNoData = True
137 137
138 138 flagTimeBlock = False
139 139
140 140 utctime = None
141 141
142 142 blocksize = None
143 143
144 144 nCode = None
145 145
146 146 nBaud = None
147 147
148 148 code = None
149 149
150 150 flagDecodeData = False #asumo q la data no esta decodificada
151 151
152 152 flagDeflipData = False #asumo q la data no esta sin flip
153 153
154 154 flagShiftFFT = False
155 155
156 156 ippSeconds = None
157 157
158 158 timeInterval = None
159 159
160 160 nCohInt = None
161 161
162 162 noise = None
163 163
164 164 windowOfFilter = 1
165 165
166 166 #Speed of ligth
167 167 C = 3e8
168 168
169 169 frequency = 49.92e6
170 170
171 171 def __init__(self):
172 172
173 173 raise ValueError, "This class has not been implemented"
174 174
175 175 def copy(self, inputObj=None):
176 176
177 177 if inputObj == None:
178 178 return copy.deepcopy(self)
179 179
180 180 for key in inputObj.__dict__.keys():
181 181 self.__dict__[key] = inputObj.__dict__[key]
182 182
183 183 def deepcopy(self):
184 184
185 185 return copy.deepcopy(self)
186 186
187 187 def isEmpty(self):
188 188
189 189 return self.flagNoData
190 190
191 191 def getNoise(self):
192 192
193 193 raise ValueError, "Not implemented"
194 194
195 195 def getNChannels(self):
196 196
197 197 return len(self.channelList)
198 198
199 199 def getChannelIndexList(self):
200 200
201 201 return range(self.nChannels)
202 202
203 203 def getNHeights(self):
204 204
205 205 return len(self.heightList)
206 206
207 207 def getHeiRange(self, extrapoints=0):
208 208
209 209 heis = self.heightList
210 210 # deltah = self.heightList[1] - self.heightList[0]
211 211 #
212 212 # heis.append(self.heightList[-1])
213 213
214 214 return heis
215 215
216 216 def getDatatime(self):
217 217
218 218 datatime = datetime.datetime.utcfromtimestamp(self.utctime)
219 219 return datatime
220 220
221 221 def getTimeRange(self):
222 222
223 223 datatime = []
224 224
225 225 datatime.append(self.utctime)
226 226 datatime.append(self.utctime + self.timeInterval)
227 227
228 228 datatime = numpy.array(datatime)
229 229
230 230 return datatime
231 231
232 232 def getFmax(self):
233 233
234 234 PRF = 1./(self.ippSeconds * self.nCohInt)
235 235
236 236 fmax = PRF/2.
237 237
238 238 return fmax
239 239
240 240 def getVmax(self):
241 241
242 242 _lambda = self.C/self.frequency
243 243
244 244 vmax = self.getFmax() * _lambda
245 245
246 246 return vmax
247 247
248 248 nChannels = property(getNChannels, "I'm the 'nChannel' property.")
249 249 channelIndexList = property(getChannelIndexList, "I'm the 'channelIndexList' property.")
250 250 nHeights = property(getNHeights, "I'm the 'nHeights' property.")
251 251 noise = property(getNoise, "I'm the 'nHeights' property.")
252 252 datatime = property(getDatatime, "I'm the 'datatime' property")
253 253
254 254 class Voltage(JROData):
255 255
256 256 #data es un numpy array de 2 dmensiones (canales, alturas)
257 257 data = None
258 258
259 259 def __init__(self):
260 260 '''
261 261 Constructor
262 262 '''
263 263
264 264 self.radarControllerHeaderObj = RadarControllerHeader()
265 265
266 266 self.systemHeaderObj = SystemHeader()
267 267
268 268 self.type = "Voltage"
269 269
270 270 self.data = None
271 271
272 272 self.dtype = None
273 273
274 274 # self.nChannels = 0
275 275
276 276 # self.nHeights = 0
277 277
278 278 self.nProfiles = None
279 279
280 280 self.heightList = None
281 281
282 282 self.channelList = None
283 283
284 284 # self.channelIndexList = None
285 285
286 286 self.flagNoData = True
287 287
288 288 self.flagTimeBlock = False
289 289
290 290 self.utctime = None
291 291
292 292 self.nCohInt = None
293 293
294 294 self.blocksize = None
295 295
296 296 self.flagDecodeData = False #asumo q la data no esta decodificada
297 297
298 298 self.flagDeflipData = False #asumo q la data no esta sin flip
299 299
300 300 self.flagShiftFFT = False
301 301
302 302
303 303 def getNoisebyHildebrand(self):
304 304 """
305 305 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
306 306
307 307 Return:
308 308 noiselevel
309 309 """
310 310
311 311 for channel in range(self.nChannels):
312 312 daux = self.data_spc[channel,:,:]
313 313 self.noise[channel] = hildebrand_sekhon(daux, self.nCohInt)
314 314
315 315 return self.noise
316 316
317 317 def getNoise(self, type = 1):
318 318
319 319 self.noise = numpy.zeros(self.nChannels)
320 320
321 321 if type == 1:
322 322 noise = self.getNoisebyHildebrand()
323 323
324 324 return 10*numpy.log10(noise)
325 325
326 326 class Spectra(JROData):
327 327
328 328 #data es un numpy array de 2 dmensiones (canales, perfiles, alturas)
329 329 data_spc = None
330 330
331 331 #data es un numpy array de 2 dmensiones (canales, pares, alturas)
332 332 data_cspc = None
333 333
334 334 #data es un numpy array de 2 dmensiones (canales, alturas)
335 335 data_dc = None
336 336
337 337 nFFTPoints = None
338 338
339 339 nPairs = None
340 340
341 341 pairsList = None
342 342
343 343 nIncohInt = None
344 344
345 345 wavelength = None #Necesario para cacular el rango de velocidad desde la frecuencia
346 346
347 347 nCohInt = None #se requiere para determinar el valor de timeInterval
348 348
349 349 def __init__(self):
350 350 '''
351 351 Constructor
352 352 '''
353 353
354 354 self.radarControllerHeaderObj = RadarControllerHeader()
355 355
356 356 self.systemHeaderObj = SystemHeader()
357 357
358 358 self.type = "Spectra"
359 359
360 360 # self.data = None
361 361
362 362 self.dtype = None
363 363
364 364 # self.nChannels = 0
365 365
366 366 # self.nHeights = 0
367 367
368 368 self.nProfiles = None
369 369
370 370 self.heightList = None
371 371
372 372 self.channelList = None
373 373
374 374 # self.channelIndexList = None
375 375
376 376 self.flagNoData = True
377 377
378 378 self.flagTimeBlock = False
379 379
380 380 self.utctime = None
381 381
382 382 self.nCohInt = None
383 383
384 384 self.nIncohInt = None
385 385
386 386 self.blocksize = None
387 387
388 388 self.nFFTPoints = None
389 389
390 390 self.wavelength = None
391 391
392 392 self.flagDecodeData = False #asumo q la data no esta decodificada
393 393
394 394 self.flagDeflipData = False #asumo q la data no esta sin flip
395 395
396 396 self.flagShiftFFT = False
397 397
398 398 def getNoisebyHildebrand(self):
399 399 """
400 400 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
401 401
402 402 Return:
403 403 noiselevel
404 404 """
405 405
406 406 for channel in range(self.nChannels):
407 407 daux = self.data_spc[channel,:,:]
408 408 self.noise[channel] = hildebrand_sekhon(daux, self.nIncohInt)
409 409
410 410 return self.noise
411 411
412 412 def getNoisebyWindow(self, heiIndexMin=0, heiIndexMax=-1, freqIndexMin=0, freqIndexMax=-1):
413 413 """
414 414 Determina el ruido del canal utilizando la ventana indicada con las coordenadas:
415 415 (heiIndexMIn, freqIndexMin) hasta (heiIndexMax, freqIndexMAx)
416 416
417 417 Inputs:
418 418 heiIndexMin: Limite inferior del eje de alturas
419 419 heiIndexMax: Limite superior del eje de alturas
420 420 freqIndexMin: Limite inferior del eje de frecuencia
421 421 freqIndexMax: Limite supoerior del eje de frecuencia
422 422 """
423 423
424 424 data = self.data_spc[:, heiIndexMin:heiIndexMax, freqIndexMin:freqIndexMax]
425 425
426 426 for channel in range(self.nChannels):
427 427 daux = data[channel,:,:]
428 428 self.noise[channel] = numpy.average(daux)
429 429
430 430 return self.noise
431 431
432 432 def getNoisebySort(self):
433 433
434 434 for channel in range(self.nChannels):
435 435 daux = self.data_spc[channel,:,:]
436 436 self.noise[channel] = sorting_bruce(daux, self.nIncohInt)
437 437
438 438 return self.noise
439 439
440 440 def getNoise(self, type = 1):
441 441
442 442 self.noise = numpy.zeros(self.nChannels)
443 443
444 444 if type == 1:
445 445 noise = self.getNoisebyHildebrand()
446 446
447 447 if type == 2:
448 448 noise = self.getNoisebySort()
449 449
450 450 if type == 3:
451 451 noise = self.getNoisebyWindow()
452 452
453 453 return noise
454 454
455 455
456 456 def getFreqRange(self, extrapoints=0):
457 457
458 458 deltafreq = self.getFmax() / self.nFFTPoints
459 459 freqrange = deltafreq*(numpy.arange(self.nFFTPoints+extrapoints)-self.nFFTPoints/2.) - deltafreq/2
460 460
461 461 return freqrange
462 462
463 463 def getVelRange(self, extrapoints=0):
464 464
465 465 deltav = self.getVmax() / self.nFFTPoints
466 466 velrange = deltav*(numpy.arange(self.nFFTPoints+extrapoints)-self.nFFTPoints/2.) - deltav/2
467 467
468 468 return velrange
469 469
470 470 def getNPairs(self):
471 471
472 472 return len(self.pairsList)
473 473
474 474 def getPairsIndexList(self):
475 475
476 476 return range(self.nPairs)
477 477
478 478 def getNormFactor(self):
479 479 pwcode = 1
480 480 if self.flagDecodeData:
481 481 pwcode = numpy.sum(self.code[0]**2)
482 482 normFactor = min(self.nFFTPoints,self.nProfiles)*self.nIncohInt*self.nCohInt*self.windowOfFilter*pwcode
483 483
484 484 return normFactor
485 485
486 def getFlagCspc(self):
487
488 if self.data_cspc == None:
489 return True
490
491 return False
492
493 def getFlagDc(self):
494
495 if self.data_dc == None:
496 return True
497
498 return False
499
486 500 nPairs = property(getNPairs, "I'm the 'nPairs' property.")
487 501 pairsIndexList = property(getPairsIndexList, "I'm the 'pairsIndexList' property.")
488 502 normFactor = property(getNormFactor, "I'm the 'getNormFactor' property.")
503 flag_cspc = property(getFlagCspc)
504 flag_dc = property(getFlagDc)
489 505
490 506 class SpectraHeis(JROData):
491 507
492 508 data_spc = None
493 509
494 510 data_cspc = None
495 511
496 512 data_dc = None
497 513
498 514 nFFTPoints = None
499 515
500 516 nPairs = None
501 517
502 518 pairsList = None
503 519
504 520 nIncohInt = None
505 521
506 522 def __init__(self):
507 523
508 524 self.radarControllerHeaderObj = RadarControllerHeader()
509 525
510 526 self.systemHeaderObj = SystemHeader()
511 527
512 528 self.type = "SpectraHeis"
513 529
514 530 self.dtype = None
515 531
516 532 # self.nChannels = 0
517 533
518 534 # self.nHeights = 0
519 535
520 536 self.nProfiles = None
521 537
522 538 self.heightList = None
523 539
524 540 self.channelList = None
525 541
526 542 # self.channelIndexList = None
527 543
528 544 self.flagNoData = True
529 545
530 546 self.flagTimeBlock = False
531 547
532 548 self.nPairs = 0
533 549
534 550 self.utctime = None
535 551
536 552 self.blocksize = None
@@ -1,2573 +1,2574
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JRODataIO.py 169 2012-11-19 21:57:03Z murco $
5 5 '''
6 6
7 7 import os, sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import time, datetime
13 13
14 14 from jrodata import *
15 15 from jroheaderIO import *
16 16 from jroprocessing import *
17 17
18 18 LOCALTIME = -18000
19 19
20 20 def isNumber(str):
21 21 """
22 22 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
23 23
24 24 Excepciones:
25 25 Si un determinado string no puede ser convertido a numero
26 26 Input:
27 27 str, string al cual se le analiza para determinar si convertible a un numero o no
28 28
29 29 Return:
30 30 True : si el string es uno numerico
31 31 False : no es un string numerico
32 32 """
33 33 try:
34 34 float( str )
35 35 return True
36 36 except:
37 37 return False
38 38
39 39 def isThisFileinRange(filename, startUTSeconds, endUTSeconds):
40 40 """
41 41 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
42 42
43 43 Inputs:
44 44 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
45 45
46 46 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
47 47 segundos contados desde 01/01/1970.
48 48 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
49 49 segundos contados desde 01/01/1970.
50 50
51 51 Return:
52 52 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
53 53 fecha especificado, de lo contrario retorna False.
54 54
55 55 Excepciones:
56 56 Si el archivo no existe o no puede ser abierto
57 57 Si la cabecera no puede ser leida.
58 58
59 59 """
60 60 basicHeaderObj = BasicHeader(LOCALTIME)
61 61
62 62 try:
63 63 fp = open(filename,'rb')
64 64 except:
65 65 raise IOError, "The file %s can't be opened" %(filename)
66 66
67 67 sts = basicHeaderObj.read(fp)
68 68 fp.close()
69 69
70 70 if not(sts):
71 71 print "Skipping the file %s because it has not a valid header" %(filename)
72 72 return 0
73 73
74 74 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
75 75 return 0
76 76
77 77 return 1
78 78
79 79 def isFileinThisTime(filename, startTime, endTime):
80 80 """
81 81 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
82 82
83 83 Inputs:
84 84 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
85 85
86 86 startTime : tiempo inicial del rango seleccionado en formato datetime.time
87 87
88 88 endTime : tiempo final del rango seleccionado en formato datetime.time
89 89
90 90 Return:
91 91 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
92 92 fecha especificado, de lo contrario retorna False.
93 93
94 94 Excepciones:
95 95 Si el archivo no existe o no puede ser abierto
96 96 Si la cabecera no puede ser leida.
97 97
98 98 """
99 99
100 100
101 101 try:
102 102 fp = open(filename,'rb')
103 103 except:
104 104 raise IOError, "The file %s can't be opened" %(filename)
105 105
106 106 basicHeaderObj = BasicHeader(LOCALTIME)
107 107 sts = basicHeaderObj.read(fp)
108 108 fp.close()
109 109
110 110 thisTime = basicHeaderObj.datatime.time()
111 111
112 112 if not(sts):
113 113 print "Skipping the file %s because it has not a valid header" %(filename)
114 114 return 0
115 115
116 116 if not ((startTime <= thisTime) and (endTime > thisTime)):
117 117 return 0
118 118
119 119 return 1
120 120
121 121 def getlastFileFromPath(path, ext):
122 122 """
123 123 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
124 124 al final de la depuracion devuelve el ultimo file de la lista que quedo.
125 125
126 126 Input:
127 127 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
128 128 ext : extension de los files contenidos en una carpeta
129 129
130 130 Return:
131 131 El ultimo file de una determinada carpeta, no se considera el path.
132 132 """
133 133 validFilelist = []
134 134 fileList = os.listdir(path)
135 135
136 136 # 0 1234 567 89A BCDE
137 137 # H YYYY DDD SSS .ext
138 138
139 139 for file in fileList:
140 140 try:
141 141 year = int(file[1:5])
142 142 doy = int(file[5:8])
143 143
144 144
145 145 except:
146 146 continue
147 147
148 148 if (os.path.splitext(file)[-1].lower() != ext.lower()):
149 149 continue
150 150
151 151 validFilelist.append(file)
152 152
153 153 if validFilelist:
154 154 validFilelist = sorted( validFilelist, key=str.lower )
155 155 return validFilelist[-1]
156 156
157 157 return None
158 158
159 159 def checkForRealPath(path, year, doy, set, ext):
160 160 """
161 161 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
162 162 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
163 163 el path exacto de un determinado file.
164 164
165 165 Example :
166 166 nombre correcto del file es .../.../D2009307/P2009307367.ext
167 167
168 168 Entonces la funcion prueba con las siguientes combinaciones
169 169 .../.../y2009307367.ext
170 170 .../.../Y2009307367.ext
171 171 .../.../x2009307/y2009307367.ext
172 172 .../.../x2009307/Y2009307367.ext
173 173 .../.../X2009307/y2009307367.ext
174 174 .../.../X2009307/Y2009307367.ext
175 175 siendo para este caso, la ultima combinacion de letras, identica al file buscado
176 176
177 177 Return:
178 178 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
179 179 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
180 180 para el filename
181 181 """
182 182 fullfilename = None
183 183 find_flag = False
184 184 filename = None
185 185
186 186 prefixDirList = [None,'d','D']
187 187 if ext.lower() == ".r": #voltage
188 188 prefixFileList = ['d','D']
189 189 elif ext.lower() == ".pdata": #spectra
190 190 prefixFileList = ['p','P']
191 191 else:
192 192 return None, filename
193 193
194 194 #barrido por las combinaciones posibles
195 195 for prefixDir in prefixDirList:
196 196 thispath = path
197 197 if prefixDir != None:
198 198 #formo el nombre del directorio xYYYYDDD (x=d o x=D)
199 199 thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy ))
200 200
201 201 for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D"
202 202 filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
203 203 fullfilename = os.path.join( thispath, filename ) #formo el path completo
204 204
205 205 if os.path.exists( fullfilename ): #verifico que exista
206 206 find_flag = True
207 207 break
208 208 if find_flag:
209 209 break
210 210
211 211 if not(find_flag):
212 212 return None, filename
213 213
214 214 return fullfilename, filename
215 215
216 216 class JRODataIO:
217 217
218 218 c = 3E8
219 219
220 220 isConfig = False
221 221
222 222 basicHeaderObj = BasicHeader(LOCALTIME)
223 223
224 224 systemHeaderObj = SystemHeader()
225 225
226 226 radarControllerHeaderObj = RadarControllerHeader()
227 227
228 228 processingHeaderObj = ProcessingHeader()
229 229
230 230 online = 0
231 231
232 232 dtype = None
233 233
234 234 pathList = []
235 235
236 236 filenameList = []
237 237
238 238 filename = None
239 239
240 240 ext = None
241 241
242 242 flagIsNewFile = 1
243 243
244 244 flagTimeBlock = 0
245 245
246 246 flagIsNewBlock = 0
247 247
248 248 fp = None
249 249
250 250 firstHeaderSize = 0
251 251
252 252 basicHeaderSize = 24
253 253
254 254 versionFile = 1103
255 255
256 256 fileSize = None
257 257
258 258 ippSeconds = None
259 259
260 260 fileSizeByHeader = None
261 261
262 262 fileIndex = None
263 263
264 264 profileIndex = None
265 265
266 266 blockIndex = None
267 267
268 268 nTotalBlocks = None
269 269
270 270 maxTimeStep = 30
271 271
272 272 lastUTTime = None
273 273
274 274 datablock = None
275 275
276 276 dataOut = None
277 277
278 278 blocksize = None
279 279
280 280 def __init__(self):
281 281
282 282 raise ValueError, "Not implemented"
283 283
284 284 def run(self):
285 285
286 286 raise ValueError, "Not implemented"
287 287
288 288 def getOutput(self):
289 289
290 290 return self.dataOut
291 291
292 292 class JRODataReader(JRODataIO, ProcessingUnit):
293 293
294 294 nReadBlocks = 0
295 295
296 296 delay = 10 #number of seconds waiting a new file
297 297
298 298 nTries = 3 #quantity tries
299 299
300 300 nFiles = 3 #number of files for searching
301 301
302 302 flagNoMoreFiles = 0
303 303
304 304 def __init__(self):
305 305
306 306 """
307 307
308 308 """
309 309
310 310 raise ValueError, "This method has not been implemented"
311 311
312 312
313 313 def createObjByDefault(self):
314 314 """
315 315
316 316 """
317 317 raise ValueError, "This method has not been implemented"
318 318
319 319 def getBlockDimension(self):
320 320
321 321 raise ValueError, "No implemented"
322 322
323 323 def __searchFilesOffLine(self,
324 324 path,
325 325 startDate,
326 326 endDate,
327 327 startTime=datetime.time(0,0,0),
328 328 endTime=datetime.time(23,59,59),
329 329 set=None,
330 330 expLabel='',
331 331 ext='.r',
332 332 walk=True):
333 333
334 334 pathList = []
335 335
336 336 if not walk:
337 337 pathList.append(path)
338 338
339 339 else:
340 340 dirList = []
341 341 for thisPath in os.listdir(path):
342 342 if os.path.isdir(os.path.join(path,thisPath)):
343 343 dirList.append(thisPath)
344 344
345 345 if not(dirList):
346 346 return None, None
347 347
348 348 thisDate = startDate
349 349
350 350 while(thisDate <= endDate):
351 351 year = thisDate.timetuple().tm_year
352 352 doy = thisDate.timetuple().tm_yday
353 353
354 354 match = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy))
355 355 if len(match) == 0:
356 356 thisDate += datetime.timedelta(1)
357 357 continue
358 358
359 359 pathList.append(os.path.join(path,match[0],expLabel))
360 360 thisDate += datetime.timedelta(1)
361 361
362 362 if pathList == []:
363 363 print "Any folder was found for the date range: %s-%s" %(startDate, endDate)
364 364 return None, None
365 365
366 366 print "%d folder(s) was(were) found for the date range: %s-%s" %(len(pathList), startDate, endDate)
367 367
368 368 filenameList = []
369 369 for thisPath in pathList:
370 370
371 371 fileList = glob.glob1(thisPath, "*%s" %ext)
372 372 fileList.sort()
373 373
374 374 for file in fileList:
375 375
376 376 filename = os.path.join(thisPath,file)
377 377
378 378 if isFileinThisTime(filename, startTime, endTime):
379 379 filenameList.append(filename)
380 380
381 381 if not(filenameList):
382 382 print "Any file was found for the time range %s - %s" %(startTime, endTime)
383 383 return None, None
384 384
385 385 print "%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime)
386 386
387 387 self.filenameList = filenameList
388 388
389 389 return pathList, filenameList
390 390
391 391 def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True):
392 392
393 393 """
394 394 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
395 395 devuelve el archivo encontrado ademas de otros datos.
396 396
397 397 Input:
398 398 path : carpeta donde estan contenidos los files que contiene data
399 399
400 400 expLabel : Nombre del subexperimento (subfolder)
401 401
402 402 ext : extension de los files
403 403
404 404 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
405 405
406 406 Return:
407 407 directory : eL directorio donde esta el file encontrado
408 408 filename : el ultimo file de una determinada carpeta
409 409 year : el anho
410 410 doy : el numero de dia del anho
411 411 set : el set del archivo
412 412
413 413
414 414 """
415 415 dirList = []
416 416
417 417 if walk:
418 418
419 419 #Filtra solo los directorios
420 420 for thisPath in os.listdir(path):
421 421 if os.path.isdir(os.path.join(path, thisPath)):
422 422 dirList.append(thisPath)
423 423
424 424 if not(dirList):
425 425 return None, None, None, None, None
426 426
427 427 dirList = sorted( dirList, key=str.lower )
428 428
429 429 doypath = dirList[-1]
430 430 fullpath = os.path.join(path, doypath, expLabel)
431 431
432 432 else:
433 433 fullpath = path
434 434
435 435 filename = getlastFileFromPath(fullpath, ext)
436 436
437 437 if not(filename):
438 438 return None, None, None, None, None
439 439
440 440 if not(self.__verifyFile(os.path.join(fullpath, filename))):
441 441 return None, None, None, None, None
442 442
443 443 year = int( filename[1:5] )
444 444 doy = int( filename[5:8] )
445 445 set = int( filename[8:11] )
446 446
447 447 return fullpath, filename, year, doy, set
448 448
449 449
450 450
451 451 def __setNextFileOffline(self):
452 452
453 453 idFile = self.fileIndex
454 454
455 455 while (True):
456 456 idFile += 1
457 457 if not(idFile < len(self.filenameList)):
458 458 self.flagNoMoreFiles = 1
459 459 print "No more Files"
460 460 return 0
461 461
462 462 filename = self.filenameList[idFile]
463 463
464 464 if not(self.__verifyFile(filename)):
465 465 continue
466 466
467 467 fileSize = os.path.getsize(filename)
468 468 fp = open(filename,'rb')
469 469 break
470 470
471 471 self.flagIsNewFile = 1
472 472 self.fileIndex = idFile
473 473 self.filename = filename
474 474 self.fileSize = fileSize
475 475 self.fp = fp
476 476
477 477 print "Setting the file: %s"%self.filename
478 478
479 479 return 1
480 480
481 481 def __setNextFileOnline(self):
482 482 """
483 483 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
484 484 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
485 485 siguientes.
486 486
487 487 Affected:
488 488 self.flagIsNewFile
489 489 self.filename
490 490 self.fileSize
491 491 self.fp
492 492 self.set
493 493 self.flagNoMoreFiles
494 494
495 495 Return:
496 496 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
497 497 1 : si el file fue abierto con exito y esta listo a ser leido
498 498
499 499 Excepciones:
500 500 Si un determinado file no puede ser abierto
501 501 """
502 502 nFiles = 0
503 503 fileOk_flag = False
504 504 firstTime_flag = True
505 505
506 506 self.set += 1
507 507
508 508 #busca el 1er file disponible
509 509 fullfilename, filename = checkForRealPath( self.path, self.year, self.doy, self.set, self.ext )
510 510 if fullfilename:
511 511 if self.__verifyFile(fullfilename, False):
512 512 fileOk_flag = True
513 513
514 514 #si no encuentra un file entonces espera y vuelve a buscar
515 515 if not(fileOk_flag):
516 516 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
517 517
518 518 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
519 519 tries = self.nTries
520 520 else:
521 521 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
522 522
523 523 for nTries in range( tries ):
524 524 if firstTime_flag:
525 525 print "\tWaiting %0.2f sec for the file \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
526 526 time.sleep( self.delay )
527 527 else:
528 528 print "\tSearching next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
529 529
530 530 fullfilename, filename = checkForRealPath( self.path, self.year, self.doy, self.set, self.ext )
531 531 if fullfilename:
532 532 if self.__verifyFile(fullfilename):
533 533 fileOk_flag = True
534 534 break
535 535
536 536 if fileOk_flag:
537 537 break
538 538
539 539 firstTime_flag = False
540 540
541 541 print "\tSkipping the file \"%s\" due to this file doesn't exist" % filename
542 542 self.set += 1
543 543
544 544 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
545 545 self.set = 0
546 546 self.doy += 1
547 547
548 548 if fileOk_flag:
549 549 self.fileSize = os.path.getsize( fullfilename )
550 550 self.filename = fullfilename
551 551 self.flagIsNewFile = 1
552 552 if self.fp != None: self.fp.close()
553 553 self.fp = open(fullfilename, 'rb')
554 554 self.flagNoMoreFiles = 0
555 555 print 'Setting the file: %s' % fullfilename
556 556 else:
557 557 self.fileSize = 0
558 558 self.filename = None
559 559 self.flagIsNewFile = 0
560 560 self.fp = None
561 561 self.flagNoMoreFiles = 1
562 562 print 'No more Files'
563 563
564 564 return fileOk_flag
565 565
566 566
567 567 def setNextFile(self):
568 568 if self.fp != None:
569 569 self.fp.close()
570 570
571 571 if self.online:
572 572 newFile = self.__setNextFileOnline()
573 573 else:
574 574 newFile = self.__setNextFileOffline()
575 575
576 576 if not(newFile):
577 577 return 0
578 578
579 579 self.__readFirstHeader()
580 580 self.nReadBlocks = 0
581 581 return 1
582 582
583 583 def __waitNewBlock(self):
584 584 """
585 585 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
586 586
587 587 Si el modo de lectura es OffLine siempre retorn 0
588 588 """
589 589 if not self.online:
590 590 return 0
591 591
592 592 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
593 593 return 0
594 594
595 595 currentPointer = self.fp.tell()
596 596
597 597 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
598 598
599 599 for nTries in range( self.nTries ):
600 600
601 601 self.fp.close()
602 602 self.fp = open( self.filename, 'rb' )
603 603 self.fp.seek( currentPointer )
604 604
605 605 self.fileSize = os.path.getsize( self.filename )
606 606 currentSize = self.fileSize - currentPointer
607 607
608 608 if ( currentSize >= neededSize ):
609 609 self.__rdBasicHeader()
610 610 return 1
611 611
612 612 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
613 613 time.sleep( self.delay )
614 614
615 615
616 616 return 0
617 617
618 618 def __setNewBlock(self):
619 619
620 620 if self.fp == None:
621 621 return 0
622 622
623 623 if self.flagIsNewFile:
624 624 return 1
625 625
626 626 self.lastUTTime = self.basicHeaderObj.utc
627 627 currentSize = self.fileSize - self.fp.tell()
628 628 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
629 629
630 630 if (currentSize >= neededSize):
631 631 self.__rdBasicHeader()
632 632 return 1
633 633
634 634 if self.__waitNewBlock():
635 635 return 1
636 636
637 637 if not(self.setNextFile()):
638 638 return 0
639 639
640 640 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
641 641
642 642 self.flagTimeBlock = 0
643 643
644 644 if deltaTime > self.maxTimeStep:
645 645 self.flagTimeBlock = 1
646 646
647 647 return 1
648 648
649 649
650 650 def readNextBlock(self):
651 651 if not(self.__setNewBlock()):
652 652 return 0
653 653
654 654 if not(self.readBlock()):
655 655 return 0
656 656
657 657 return 1
658 658
659 659 def __rdProcessingHeader(self, fp=None):
660 660 if fp == None:
661 661 fp = self.fp
662 662
663 663 self.processingHeaderObj.read(fp)
664 664
665 665 def __rdRadarControllerHeader(self, fp=None):
666 666 if fp == None:
667 667 fp = self.fp
668 668
669 669 self.radarControllerHeaderObj.read(fp)
670 670
671 671 def __rdSystemHeader(self, fp=None):
672 672 if fp == None:
673 673 fp = self.fp
674 674
675 675 self.systemHeaderObj.read(fp)
676 676
677 677 def __rdBasicHeader(self, fp=None):
678 678 if fp == None:
679 679 fp = self.fp
680 680
681 681 self.basicHeaderObj.read(fp)
682 682
683 683
684 684 def __readFirstHeader(self):
685 685 self.__rdBasicHeader()
686 686 self.__rdSystemHeader()
687 687 self.__rdRadarControllerHeader()
688 688 self.__rdProcessingHeader()
689 689
690 690 self.firstHeaderSize = self.basicHeaderObj.size
691 691
692 692 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
693 693 if datatype == 0:
694 694 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
695 695 elif datatype == 1:
696 696 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
697 697 elif datatype == 2:
698 698 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
699 699 elif datatype == 3:
700 700 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
701 701 elif datatype == 4:
702 702 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
703 703 elif datatype == 5:
704 704 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
705 705 else:
706 706 raise ValueError, 'Data type was not defined'
707 707
708 708 self.dtype = datatype_str
709 709 self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
710 710 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
711 711 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
712 712 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
713 713 self.getBlockDimension()
714 714
715 715
716 716 def __verifyFile(self, filename, msgFlag=True):
717 717 msg = None
718 718 try:
719 719 fp = open(filename, 'rb')
720 720 currentPosition = fp.tell()
721 721 except:
722 722 if msgFlag:
723 723 print "The file %s can't be opened" % (filename)
724 724 return False
725 725
726 726 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
727 727
728 728 if neededSize == 0:
729 729 basicHeaderObj = BasicHeader(LOCALTIME)
730 730 systemHeaderObj = SystemHeader()
731 731 radarControllerHeaderObj = RadarControllerHeader()
732 732 processingHeaderObj = ProcessingHeader()
733 733
734 734 try:
735 735 if not( basicHeaderObj.read(fp) ): raise IOError
736 736 if not( systemHeaderObj.read(fp) ): raise IOError
737 737 if not( radarControllerHeaderObj.read(fp) ): raise IOError
738 738 if not( processingHeaderObj.read(fp) ): raise IOError
739 739 data_type = int(numpy.log2((processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
740 740
741 741 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
742 742
743 743 except:
744 744 if msgFlag:
745 745 print "\tThe file %s is empty or it hasn't enough data" % filename
746 746
747 747 fp.close()
748 748 return False
749 749 else:
750 750 msg = "\tSkipping the file %s due to it hasn't enough data" %filename
751 751
752 752 fp.close()
753 753 fileSize = os.path.getsize(filename)
754 754 currentSize = fileSize - currentPosition
755 755 if currentSize < neededSize:
756 756 if msgFlag and (msg != None):
757 757 print msg #print"\tSkipping the file %s due to it hasn't enough data" %filename
758 758 return False
759 759
760 760 return True
761 761
762 762 def setup(self,
763 763 path=None,
764 764 startDate=None,
765 765 endDate=None,
766 766 startTime=datetime.time(0,0,0),
767 767 endTime=datetime.time(23,59,59),
768 768 set=0,
769 769 expLabel = "",
770 770 ext = None,
771 771 online = False,
772 772 delay = 60,
773 773 walk = True):
774 774
775 775 if path == None:
776 776 raise ValueError, "The path is not valid"
777 777
778 778 if ext == None:
779 779 ext = self.ext
780 780
781 781 if online:
782 782 print "Searching files in online mode..."
783 783
784 784 for nTries in range( self.nTries ):
785 785 fullpath, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk)
786 786
787 787 if fullpath:
788 788 break
789 789
790 790 print '\tWaiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
791 791 time.sleep( self.delay )
792 792
793 793 if not(fullpath):
794 794 print "There 'isn't valied files in %s" % path
795 795 return None
796 796
797 797 self.year = year
798 798 self.doy = doy
799 799 self.set = set - 1
800 800 self.path = path
801 801
802 802 else:
803 803 print "Searching files in offline mode ..."
804 804 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
805 805 startTime=startTime, endTime=endTime,
806 806 set=set, expLabel=expLabel, ext=ext,
807 807 walk=walk)
808 808
809 809 if not(pathList):
810 810 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
811 811 datetime.datetime.combine(startDate,startTime).ctime(),
812 812 datetime.datetime.combine(endDate,endTime).ctime())
813 813
814 814 sys.exit(-1)
815 815
816 816
817 817 self.fileIndex = -1
818 818 self.pathList = pathList
819 819 self.filenameList = filenameList
820 820
821 821 self.online = online
822 822 self.delay = delay
823 823 ext = ext.lower()
824 824 self.ext = ext
825 825
826 826 if not(self.setNextFile()):
827 827 if (startDate!=None) and (endDate!=None):
828 828 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
829 829 elif startDate != None:
830 830 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
831 831 else:
832 832 print "No files"
833 833
834 834 sys.exit(-1)
835 835
836 836 # self.updateDataHeader()
837 837
838 838 return self.dataOut
839 839
840 840 def getData():
841 841
842 842 raise ValueError, "This method has not been implemented"
843 843
844 844 def hasNotDataInBuffer():
845 845
846 846 raise ValueError, "This method has not been implemented"
847 847
848 848 def readBlock():
849 849
850 850 raise ValueError, "This method has not been implemented"
851 851
852 852 def isEndProcess(self):
853 853
854 854 return self.flagNoMoreFiles
855 855
856 856 def printReadBlocks(self):
857 857
858 858 print "Number of read blocks per file %04d" %self.nReadBlocks
859 859
860 860 def printTotalBlocks(self):
861 861
862 862 print "Number of read blocks %04d" %self.nTotalBlocks
863 863
864 864 def printInfo(self):
865 865
866 866 print self.basicHeaderObj.printInfo()
867 867 print self.systemHeaderObj.printInfo()
868 868 print self.radarControllerHeaderObj.printInfo()
869 869 print self.processingHeaderObj.printInfo()
870 870
871 871
872 872 def run(self, **kwargs):
873 873
874 874 if not(self.isConfig):
875 875
876 876 # self.dataOut = dataOut
877 877 self.setup(**kwargs)
878 878 self.isConfig = True
879 879
880 880 self.getData()
881 881
882 882 class JRODataWriter(JRODataIO, Operation):
883 883
884 884 """
885 885 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
886 886 de los datos siempre se realiza por bloques.
887 887 """
888 888
889 889 blockIndex = 0
890 890
891 891 path = None
892 892
893 893 setFile = None
894 894
895 895 profilesPerBlock = None
896 896
897 897 blocksPerFile = None
898 898
899 899 nWriteBlocks = 0
900 900
901 901 def __init__(self, dataOut=None):
902 902 raise ValueError, "Not implemented"
903 903
904 904
905 905 def hasAllDataInBuffer(self):
906 906 raise ValueError, "Not implemented"
907 907
908 908
909 909 def setBlockDimension(self):
910 910 raise ValueError, "Not implemented"
911 911
912 912
913 913 def writeBlock(self):
914 914 raise ValueError, "No implemented"
915 915
916 916
917 917 def putData(self):
918 918 raise ValueError, "No implemented"
919 919
920 920 def getDataHeader(self):
921 921 """
922 922 Obtiene una copia del First Header
923 923
924 924 Affected:
925 925
926 926 self.basicHeaderObj
927 927 self.systemHeaderObj
928 928 self.radarControllerHeaderObj
929 929 self.processingHeaderObj self.
930 930
931 931 Return:
932 932 None
933 933 """
934 934
935 935 raise ValueError, "No implemented"
936 936
937 937 def getBasicHeader(self):
938 938
939 939 self.basicHeaderObj.size = self.basicHeaderSize #bytes
940 940 self.basicHeaderObj.version = self.versionFile
941 941 self.basicHeaderObj.dataBlock = self.nTotalBlocks
942 942
943 943 utc = numpy.floor(self.dataOut.utctime)
944 944 milisecond = (self.dataOut.utctime - utc)* 1000.0
945 945
946 946 self.basicHeaderObj.utc = utc
947 947 self.basicHeaderObj.miliSecond = milisecond
948 948 self.basicHeaderObj.timeZone = 0
949 949 self.basicHeaderObj.dstFlag = 0
950 950 self.basicHeaderObj.errorCount = 0
951 951
952 952 def __writeFirstHeader(self):
953 953 """
954 954 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
955 955
956 956 Affected:
957 957 __dataType
958 958
959 959 Return:
960 960 None
961 961 """
962 962
963 963 # CALCULAR PARAMETROS
964 964
965 965 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
966 966 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
967 967
968 968 self.basicHeaderObj.write(self.fp)
969 969 self.systemHeaderObj.write(self.fp)
970 970 self.radarControllerHeaderObj.write(self.fp)
971 971 self.processingHeaderObj.write(self.fp)
972 972
973 973 self.dtype = self.dataOut.dtype
974 974
975 975 def __setNewBlock(self):
976 976 """
977 977 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
978 978
979 979 Return:
980 980 0 : si no pudo escribir nada
981 981 1 : Si escribio el Basic el First Header
982 982 """
983 983 if self.fp == None:
984 984 self.setNextFile()
985 985
986 986 if self.flagIsNewFile:
987 987 return 1
988 988
989 989 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
990 990 self.basicHeaderObj.write(self.fp)
991 991 return 1
992 992
993 993 if not( self.setNextFile() ):
994 994 return 0
995 995
996 996 return 1
997 997
998 998
999 999 def writeNextBlock(self):
1000 1000 """
1001 1001 Selecciona el bloque siguiente de datos y los escribe en un file
1002 1002
1003 1003 Return:
1004 1004 0 : Si no hizo pudo escribir el bloque de datos
1005 1005 1 : Si no pudo escribir el bloque de datos
1006 1006 """
1007 1007 if not( self.__setNewBlock() ):
1008 1008 return 0
1009 1009
1010 1010 self.writeBlock()
1011 1011
1012 1012 return 1
1013 1013
1014 1014 def setNextFile(self):
1015 1015 """
1016 1016 Determina el siguiente file que sera escrito
1017 1017
1018 1018 Affected:
1019 1019 self.filename
1020 1020 self.subfolder
1021 1021 self.fp
1022 1022 self.setFile
1023 1023 self.flagIsNewFile
1024 1024
1025 1025 Return:
1026 1026 0 : Si el archivo no puede ser escrito
1027 1027 1 : Si el archivo esta listo para ser escrito
1028 1028 """
1029 1029 ext = self.ext
1030 1030 path = self.path
1031 1031
1032 1032 if self.fp != None:
1033 1033 self.fp.close()
1034 1034
1035 1035 timeTuple = time.localtime( self.dataOut.dataUtcTime)
1036 1036 subfolder = 'D%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1037 1037
1038 1038 fullpath = os.path.join( path, subfolder )
1039 1039 if not( os.path.exists(fullpath) ):
1040 1040 os.mkdir(fullpath)
1041 1041 self.setFile = -1 #inicializo mi contador de seteo
1042 1042 else:
1043 1043 filesList = os.listdir( fullpath )
1044 1044 if len( filesList ) > 0:
1045 1045 filesList = sorted( filesList, key=str.lower )
1046 1046 filen = filesList[-1]
1047 1047 # el filename debera tener el siguiente formato
1048 1048 # 0 1234 567 89A BCDE (hex)
1049 1049 # x YYYY DDD SSS .ext
1050 1050 if isNumber( filen[8:11] ):
1051 1051 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1052 1052 else:
1053 1053 self.setFile = -1
1054 1054 else:
1055 1055 self.setFile = -1 #inicializo mi contador de seteo
1056 1056
1057 1057 setFile = self.setFile
1058 1058 setFile += 1
1059 1059
1060 1060 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
1061 1061 timeTuple.tm_year,
1062 1062 timeTuple.tm_yday,
1063 1063 setFile,
1064 1064 ext )
1065 1065
1066 1066 filename = os.path.join( path, subfolder, file )
1067 1067
1068 1068 fp = open( filename,'wb' )
1069 1069
1070 1070 self.blockIndex = 0
1071 1071
1072 1072 #guardando atributos
1073 1073 self.filename = filename
1074 1074 self.subfolder = subfolder
1075 1075 self.fp = fp
1076 1076 self.setFile = setFile
1077 1077 self.flagIsNewFile = 1
1078 1078
1079 1079 self.getDataHeader()
1080 1080
1081 1081 print 'Writing the file: %s'%self.filename
1082 1082
1083 1083 self.__writeFirstHeader()
1084 1084
1085 1085 return 1
1086 1086
1087 1087 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=None, set=0, ext=None):
1088 1088 """
1089 1089 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1090 1090
1091 1091 Inputs:
1092 1092 path : el path destino en el cual se escribiran los files a crear
1093 1093 format : formato en el cual sera salvado un file
1094 1094 set : el setebo del file
1095 1095
1096 1096 Return:
1097 1097 0 : Si no realizo un buen seteo
1098 1098 1 : Si realizo un buen seteo
1099 1099 """
1100 1100
1101 1101 if ext == None:
1102 1102 ext = self.ext
1103 1103
1104 1104 ext = ext.lower()
1105 1105
1106 1106 self.ext = ext
1107 1107
1108 1108 self.path = path
1109 1109
1110 1110 self.setFile = set - 1
1111 1111
1112 1112 self.blocksPerFile = blocksPerFile
1113 1113
1114 1114 self.profilesPerBlock = profilesPerBlock
1115 1115
1116 1116 self.dataOut = dataOut
1117 1117
1118 1118 if not(self.setNextFile()):
1119 1119 print "There isn't a next file"
1120 1120 return 0
1121 1121
1122 1122 self.setBlockDimension()
1123 1123
1124 1124 return 1
1125 1125
1126 1126 def run(self, dataOut, **kwargs):
1127 1127
1128 1128 if not(self.isConfig):
1129 1129
1130 1130 self.setup(dataOut, **kwargs)
1131 1131 self.isConfig = True
1132 1132
1133 1133 self.putData()
1134 1134
1135 1135 class VoltageReader(JRODataReader):
1136 1136 """
1137 1137 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
1138 1138 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
1139 1139 perfiles*alturas*canales) son almacenados en la variable "buffer".
1140 1140
1141 1141 perfiles * alturas * canales
1142 1142
1143 1143 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1144 1144 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
1145 1145 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
1146 1146 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1147 1147
1148 1148 Example:
1149 1149
1150 1150 dpath = "/home/myuser/data"
1151 1151
1152 1152 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1153 1153
1154 1154 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1155 1155
1156 1156 readerObj = VoltageReader()
1157 1157
1158 1158 readerObj.setup(dpath, startTime, endTime)
1159 1159
1160 1160 while(True):
1161 1161
1162 1162 #to get one profile
1163 1163 profile = readerObj.getData()
1164 1164
1165 1165 #print the profile
1166 1166 print profile
1167 1167
1168 1168 #If you want to see all datablock
1169 1169 print readerObj.datablock
1170 1170
1171 1171 if readerObj.flagNoMoreFiles:
1172 1172 break
1173 1173
1174 1174 """
1175 1175
1176 1176 ext = ".r"
1177 1177
1178 1178 optchar = "D"
1179 1179 dataOut = None
1180 1180
1181 1181
1182 1182 def __init__(self):
1183 1183 """
1184 1184 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
1185 1185
1186 1186 Input:
1187 1187 dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para
1188 1188 almacenar un perfil de datos cada vez que se haga un requerimiento
1189 1189 (getData). El perfil sera obtenido a partir del buffer de datos,
1190 1190 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1191 1191 bloque de datos.
1192 1192 Si este parametro no es pasado se creara uno internamente.
1193 1193
1194 1194 Variables afectadas:
1195 1195 self.dataOut
1196 1196
1197 1197 Return:
1198 1198 None
1199 1199 """
1200 1200
1201 1201 self.isConfig = False
1202 1202
1203 1203 self.datablock = None
1204 1204
1205 1205 self.utc = 0
1206 1206
1207 1207 self.ext = ".r"
1208 1208
1209 1209 self.optchar = "D"
1210 1210
1211 1211 self.basicHeaderObj = BasicHeader(LOCALTIME)
1212 1212
1213 1213 self.systemHeaderObj = SystemHeader()
1214 1214
1215 1215 self.radarControllerHeaderObj = RadarControllerHeader()
1216 1216
1217 1217 self.processingHeaderObj = ProcessingHeader()
1218 1218
1219 1219 self.online = 0
1220 1220
1221 1221 self.fp = None
1222 1222
1223 1223 self.idFile = None
1224 1224
1225 1225 self.dtype = None
1226 1226
1227 1227 self.fileSizeByHeader = None
1228 1228
1229 1229 self.filenameList = []
1230 1230
1231 1231 self.filename = None
1232 1232
1233 1233 self.fileSize = None
1234 1234
1235 1235 self.firstHeaderSize = 0
1236 1236
1237 1237 self.basicHeaderSize = 24
1238 1238
1239 1239 self.pathList = []
1240 1240
1241 1241 self.filenameList = []
1242 1242
1243 1243 self.lastUTTime = 0
1244 1244
1245 1245 self.maxTimeStep = 30
1246 1246
1247 1247 self.flagNoMoreFiles = 0
1248 1248
1249 1249 self.set = 0
1250 1250
1251 1251 self.path = None
1252 1252
1253 1253 self.profileIndex = 9999
1254 1254
1255 1255 self.delay = 3 #seconds
1256 1256
1257 1257 self.nTries = 3 #quantity tries
1258 1258
1259 1259 self.nFiles = 3 #number of files for searching
1260 1260
1261 1261 self.nReadBlocks = 0
1262 1262
1263 1263 self.flagIsNewFile = 1
1264 1264
1265 1265 self.ippSeconds = 0
1266 1266
1267 1267 self.flagTimeBlock = 0
1268 1268
1269 1269 self.flagIsNewBlock = 0
1270 1270
1271 1271 self.nTotalBlocks = 0
1272 1272
1273 1273 self.blocksize = 0
1274 1274
1275 1275 self.dataOut = self.createObjByDefault()
1276 1276
1277 1277 def createObjByDefault(self):
1278 1278
1279 1279 dataObj = Voltage()
1280 1280
1281 1281 return dataObj
1282 1282
1283 1283 def __hasNotDataInBuffer(self):
1284 1284 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1285 1285 return 1
1286 1286 return 0
1287 1287
1288 1288
1289 1289 def getBlockDimension(self):
1290 1290 """
1291 1291 Obtiene la cantidad de puntos a leer por cada bloque de datos
1292 1292
1293 1293 Affected:
1294 1294 self.blocksize
1295 1295
1296 1296 Return:
1297 1297 None
1298 1298 """
1299 1299 pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
1300 1300 self.blocksize = pts2read
1301 1301
1302 1302
1303 1303 def readBlock(self):
1304 1304 """
1305 1305 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
1306 1306 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1307 1307 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1308 1308 es seteado a 0
1309 1309
1310 1310 Inputs:
1311 1311 None
1312 1312
1313 1313 Return:
1314 1314 None
1315 1315
1316 1316 Affected:
1317 1317 self.profileIndex
1318 1318 self.datablock
1319 1319 self.flagIsNewFile
1320 1320 self.flagIsNewBlock
1321 1321 self.nTotalBlocks
1322 1322
1323 1323 Exceptions:
1324 1324 Si un bloque leido no es un bloque valido
1325 1325 """
1326 1326
1327 1327 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
1328 1328
1329 1329 try:
1330 1330 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
1331 1331 except:
1332 1332 print "The read block (%3d) has not enough data" %self.nReadBlocks
1333 1333 return 0
1334 1334
1335 1335 junk = numpy.transpose(junk, (2,0,1))
1336 1336 self.datablock = junk['real'] + junk['imag']*1j
1337 1337
1338 1338 self.profileIndex = 0
1339 1339
1340 1340 self.flagIsNewFile = 0
1341 1341 self.flagIsNewBlock = 1
1342 1342
1343 1343 self.nTotalBlocks += 1
1344 1344 self.nReadBlocks += 1
1345 1345
1346 1346 return 1
1347 1347
1348 1348
1349 1349 def getData(self):
1350 1350 """
1351 1351 getData obtiene una unidad de datos del buffer de lectura y la copia a la clase "Voltage"
1352 1352 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
1353 1353 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
1354 1354
1355 1355 Ademas incrementa el contador del buffer en 1.
1356 1356
1357 1357 Return:
1358 1358 data : retorna un perfil de voltages (alturas * canales) copiados desde el
1359 1359 buffer. Si no hay mas archivos a leer retorna None.
1360 1360
1361 1361 Variables afectadas:
1362 1362 self.dataOut
1363 1363 self.profileIndex
1364 1364
1365 1365 Affected:
1366 1366 self.dataOut
1367 1367 self.profileIndex
1368 1368 self.flagTimeBlock
1369 1369 self.flagIsNewBlock
1370 1370 """
1371 1371
1372 1372 if self.flagNoMoreFiles:
1373 1373 self.dataOut.flagNoData = True
1374 1374 print 'Process finished'
1375 1375 return 0
1376 1376
1377 1377 self.flagTimeBlock = 0
1378 1378 self.flagIsNewBlock = 0
1379 1379
1380 1380 if self.__hasNotDataInBuffer():
1381 1381
1382 1382 if not( self.readNextBlock() ):
1383 1383 return 0
1384 1384
1385 1385 self.dataOut.dtype = self.dtype
1386 1386
1387 1387 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
1388 1388
1389 1389 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
1390 1390
1391 1391 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
1392 1392
1393 1393 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
1394 1394
1395 1395 self.dataOut.flagTimeBlock = self.flagTimeBlock
1396 1396
1397 1397 self.dataOut.ippSeconds = self.ippSeconds
1398 1398
1399 1399 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt
1400 1400
1401 1401 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
1402 1402
1403 1403 self.dataOut.flagShiftFFT = False
1404 1404
1405 1405 if self.radarControllerHeaderObj.code != None:
1406 1406
1407 1407 self.dataOut.nCode = self.radarControllerHeaderObj.nCode
1408 1408
1409 1409 self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
1410 1410
1411 1411 self.dataOut.code = self.radarControllerHeaderObj.code
1412 1412
1413 1413 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
1414 1414
1415 1415 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
1416 1416
1417 1417 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
1418 1418
1419 1419 self.dataOut.flagDeflipData = False #asumo q la data no esta sin flip
1420 1420
1421 1421 self.dataOut.flagShiftFFT = False
1422 1422
1423 1423
1424 1424 # self.updateDataHeader()
1425 1425
1426 1426 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
1427 1427
1428 1428 if self.datablock == None:
1429 1429 self.dataOut.flagNoData = True
1430 1430 return 0
1431 1431
1432 1432 self.dataOut.data = self.datablock[:,self.profileIndex,:]
1433 1433
1434 1434 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.ippSeconds
1435 1435
1436 1436 self.profileIndex += 1
1437 1437
1438 1438 self.dataOut.flagNoData = False
1439 1439
1440 1440 # print self.profileIndex, self.dataOut.utctime
1441 1441 # if self.profileIndex == 800:
1442 1442 # a=1
1443 1443
1444 1444
1445 1445 return self.dataOut.data
1446 1446
1447 1447
1448 1448 class VoltageWriter(JRODataWriter):
1449 1449 """
1450 1450 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
1451 1451 de los datos siempre se realiza por bloques.
1452 1452 """
1453 1453
1454 1454 ext = ".r"
1455 1455
1456 1456 optchar = "D"
1457 1457
1458 1458 shapeBuffer = None
1459 1459
1460 1460
1461 1461 def __init__(self):
1462 1462 """
1463 1463 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
1464 1464
1465 1465 Affected:
1466 1466 self.dataOut
1467 1467
1468 1468 Return: None
1469 1469 """
1470 1470
1471 1471 self.nTotalBlocks = 0
1472 1472
1473 1473 self.profileIndex = 0
1474 1474
1475 1475 self.isConfig = False
1476 1476
1477 1477 self.fp = None
1478 1478
1479 1479 self.flagIsNewFile = 1
1480 1480
1481 1481 self.nTotalBlocks = 0
1482 1482
1483 1483 self.flagIsNewBlock = 0
1484 1484
1485 1485 self.setFile = None
1486 1486
1487 1487 self.dtype = None
1488 1488
1489 1489 self.path = None
1490 1490
1491 1491 self.filename = None
1492 1492
1493 1493 self.basicHeaderObj = BasicHeader(LOCALTIME)
1494 1494
1495 1495 self.systemHeaderObj = SystemHeader()
1496 1496
1497 1497 self.radarControllerHeaderObj = RadarControllerHeader()
1498 1498
1499 1499 self.processingHeaderObj = ProcessingHeader()
1500 1500
1501 1501 def hasAllDataInBuffer(self):
1502 1502 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1503 1503 return 1
1504 1504 return 0
1505 1505
1506 1506
1507 1507 def setBlockDimension(self):
1508 1508 """
1509 1509 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
1510 1510
1511 1511 Affected:
1512 1512 self.shape_spc_Buffer
1513 1513 self.shape_cspc_Buffer
1514 1514 self.shape_dc_Buffer
1515 1515
1516 1516 Return: None
1517 1517 """
1518 1518 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
1519 1519 self.processingHeaderObj.nHeights,
1520 1520 self.systemHeaderObj.nChannels)
1521 1521
1522 1522 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
1523 1523 self.processingHeaderObj.profilesPerBlock,
1524 1524 self.processingHeaderObj.nHeights),
1525 1525 dtype=numpy.dtype('complex'))
1526 1526
1527 1527
1528 1528 def writeBlock(self):
1529 1529 """
1530 1530 Escribe el buffer en el file designado
1531 1531
1532 1532 Affected:
1533 1533 self.profileIndex
1534 1534 self.flagIsNewFile
1535 1535 self.flagIsNewBlock
1536 1536 self.nTotalBlocks
1537 1537 self.blockIndex
1538 1538
1539 1539 Return: None
1540 1540 """
1541 1541 data = numpy.zeros( self.shapeBuffer, self.dtype )
1542 1542
1543 1543 junk = numpy.transpose(self.datablock, (1,2,0))
1544 1544
1545 1545 data['real'] = junk.real
1546 1546 data['imag'] = junk.imag
1547 1547
1548 1548 data = data.reshape( (-1) )
1549 1549
1550 1550 data.tofile( self.fp )
1551 1551
1552 1552 self.datablock.fill(0)
1553 1553
1554 1554 self.profileIndex = 0
1555 1555 self.flagIsNewFile = 0
1556 1556 self.flagIsNewBlock = 1
1557 1557
1558 1558 self.blockIndex += 1
1559 1559 self.nTotalBlocks += 1
1560 1560
1561 1561 def putData(self):
1562 1562 """
1563 1563 Setea un bloque de datos y luego los escribe en un file
1564 1564
1565 1565 Affected:
1566 1566 self.flagIsNewBlock
1567 1567 self.profileIndex
1568 1568
1569 1569 Return:
1570 1570 0 : Si no hay data o no hay mas files que puedan escribirse
1571 1571 1 : Si se escribio la data de un bloque en un file
1572 1572 """
1573 1573 if self.dataOut.flagNoData:
1574 1574 return 0
1575 1575
1576 1576 self.flagIsNewBlock = 0
1577 1577
1578 1578 if self.dataOut.flagTimeBlock:
1579 1579
1580 1580 self.datablock.fill(0)
1581 1581 self.profileIndex = 0
1582 1582 self.setNextFile()
1583 1583
1584 1584 if self.profileIndex == 0:
1585 1585 self.getBasicHeader()
1586 1586
1587 1587 self.datablock[:,self.profileIndex,:] = self.dataOut.data
1588 1588
1589 1589 self.profileIndex += 1
1590 1590
1591 1591 if self.hasAllDataInBuffer():
1592 1592 #if self.flagIsNewFile:
1593 1593 self.writeNextBlock()
1594 1594 # self.getDataHeader()
1595 1595
1596 1596 return 1
1597 1597
1598 1598 def __getProcessFlags(self):
1599 1599
1600 1600 processFlags = 0
1601 1601
1602 1602 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1603 1603 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1604 1604 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1605 1605 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1606 1606 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1607 1607 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1608 1608
1609 1609 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1610 1610
1611 1611
1612 1612
1613 1613 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
1614 1614 PROCFLAG.DATATYPE_SHORT,
1615 1615 PROCFLAG.DATATYPE_LONG,
1616 1616 PROCFLAG.DATATYPE_INT64,
1617 1617 PROCFLAG.DATATYPE_FLOAT,
1618 1618 PROCFLAG.DATATYPE_DOUBLE]
1619 1619
1620 1620
1621 1621 for index in range(len(dtypeList)):
1622 1622 if self.dataOut.dtype == dtypeList[index]:
1623 1623 dtypeValue = datatypeValueList[index]
1624 1624 break
1625 1625
1626 1626 processFlags += dtypeValue
1627 1627
1628 1628 if self.dataOut.flagDecodeData:
1629 1629 processFlags += PROCFLAG.DECODE_DATA
1630 1630
1631 1631 if self.dataOut.flagDeflipData:
1632 1632 processFlags += PROCFLAG.DEFLIP_DATA
1633 1633
1634 1634 if self.dataOut.code != None:
1635 1635 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1636 1636
1637 1637 if self.dataOut.nCohInt > 1:
1638 1638 processFlags += PROCFLAG.COHERENT_INTEGRATION
1639 1639
1640 1640 return processFlags
1641 1641
1642 1642
1643 1643 def __getBlockSize(self):
1644 1644 '''
1645 1645 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
1646 1646 '''
1647 1647
1648 1648 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1649 1649 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1650 1650 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1651 1651 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1652 1652 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1653 1653 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1654 1654
1655 1655 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1656 1656 datatypeValueList = [1,2,4,8,4,8]
1657 1657 for index in range(len(dtypeList)):
1658 1658 if self.dataOut.dtype == dtypeList[index]:
1659 1659 datatypeValue = datatypeValueList[index]
1660 1660 break
1661 1661
1662 1662 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels * self.dataOut.nProfiles * datatypeValue * 2)
1663 1663
1664 1664 return blocksize
1665 1665
1666 1666 def getDataHeader(self):
1667 1667
1668 1668 """
1669 1669 Obtiene una copia del First Header
1670 1670
1671 1671 Affected:
1672 1672 self.systemHeaderObj
1673 1673 self.radarControllerHeaderObj
1674 1674 self.dtype
1675 1675
1676 1676 Return:
1677 1677 None
1678 1678 """
1679 1679
1680 1680 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
1681 1681 self.systemHeaderObj.nChannels = self.dataOut.nChannels
1682 1682 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
1683 1683
1684 1684 self.getBasicHeader()
1685 1685
1686 1686 processingHeaderSize = 40 # bytes
1687 1687 self.processingHeaderObj.dtype = 0 # Voltage
1688 1688 self.processingHeaderObj.blockSize = self.__getBlockSize()
1689 1689 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
1690 1690 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
1691 1691 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
1692 1692 self.processingHeaderObj.processFlags = self.__getProcessFlags()
1693 1693 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
1694 1694 self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage
1695 1695 self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage
1696 1696
1697 1697 if self.dataOut.code != None:
1698 1698 self.processingHeaderObj.code = self.dataOut.code
1699 1699 self.processingHeaderObj.nCode = self.dataOut.nCode
1700 1700 self.processingHeaderObj.nBaud = self.dataOut.nBaud
1701 1701 codesize = int(8 + 4 * self.dataOut.nCode * self.dataOut.nBaud)
1702 1702 processingHeaderSize += codesize
1703 1703
1704 1704 if self.processingHeaderObj.nWindows != 0:
1705 1705 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
1706 1706 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
1707 1707 self.processingHeaderObj.nHeights = self.dataOut.nHeights
1708 1708 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
1709 1709 processingHeaderSize += 12
1710 1710
1711 1711 self.processingHeaderObj.size = processingHeaderSize
1712 1712
1713 1713 class SpectraReader(JRODataReader):
1714 1714 """
1715 1715 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
1716 1716 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
1717 1717 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
1718 1718
1719 1719 paresCanalesIguales * alturas * perfiles (Self Spectra)
1720 1720 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
1721 1721 canales * alturas (DC Channels)
1722 1722
1723 1723 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1724 1724 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
1725 1725 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
1726 1726 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1727 1727
1728 1728 Example:
1729 1729 dpath = "/home/myuser/data"
1730 1730
1731 1731 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1732 1732
1733 1733 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1734 1734
1735 1735 readerObj = SpectraReader()
1736 1736
1737 1737 readerObj.setup(dpath, startTime, endTime)
1738 1738
1739 1739 while(True):
1740 1740
1741 1741 readerObj.getData()
1742 1742
1743 1743 print readerObj.data_spc
1744 1744
1745 1745 print readerObj.data_cspc
1746 1746
1747 1747 print readerObj.data_dc
1748 1748
1749 1749 if readerObj.flagNoMoreFiles:
1750 1750 break
1751 1751
1752 1752 """
1753 1753
1754 1754 pts2read_SelfSpectra = 0
1755 1755
1756 1756 pts2read_CrossSpectra = 0
1757 1757
1758 1758 pts2read_DCchannels = 0
1759 1759
1760 1760 ext = ".pdata"
1761 1761
1762 1762 optchar = "P"
1763 1763
1764 1764 dataOut = None
1765 1765
1766 1766 nRdChannels = None
1767 1767
1768 1768 nRdPairs = None
1769 1769
1770 1770 rdPairList = []
1771 1771
1772 1772
1773 1773 def __init__(self):
1774 1774 """
1775 1775 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
1776 1776
1777 1777 Inputs:
1778 1778 dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para
1779 1779 almacenar un perfil de datos cada vez que se haga un requerimiento
1780 1780 (getData). El perfil sera obtenido a partir del buffer de datos,
1781 1781 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1782 1782 bloque de datos.
1783 1783 Si este parametro no es pasado se creara uno internamente.
1784 1784
1785 1785 Affected:
1786 1786 self.dataOut
1787 1787
1788 1788 Return : None
1789 1789 """
1790 1790
1791 1791 self.isConfig = False
1792 1792
1793 1793 self.pts2read_SelfSpectra = 0
1794 1794
1795 1795 self.pts2read_CrossSpectra = 0
1796 1796
1797 1797 self.pts2read_DCchannels = 0
1798 1798
1799 1799 self.datablock = None
1800 1800
1801 1801 self.utc = None
1802 1802
1803 1803 self.ext = ".pdata"
1804 1804
1805 1805 self.optchar = "P"
1806 1806
1807 1807 self.basicHeaderObj = BasicHeader(LOCALTIME)
1808 1808
1809 1809 self.systemHeaderObj = SystemHeader()
1810 1810
1811 1811 self.radarControllerHeaderObj = RadarControllerHeader()
1812 1812
1813 1813 self.processingHeaderObj = ProcessingHeader()
1814 1814
1815 1815 self.online = 0
1816 1816
1817 1817 self.fp = None
1818 1818
1819 1819 self.idFile = None
1820 1820
1821 1821 self.dtype = None
1822 1822
1823 1823 self.fileSizeByHeader = None
1824 1824
1825 1825 self.filenameList = []
1826 1826
1827 1827 self.filename = None
1828 1828
1829 1829 self.fileSize = None
1830 1830
1831 1831 self.firstHeaderSize = 0
1832 1832
1833 1833 self.basicHeaderSize = 24
1834 1834
1835 1835 self.pathList = []
1836 1836
1837 1837 self.lastUTTime = 0
1838 1838
1839 1839 self.maxTimeStep = 30
1840 1840
1841 1841 self.flagNoMoreFiles = 0
1842 1842
1843 1843 self.set = 0
1844 1844
1845 1845 self.path = None
1846 1846
1847 1847 self.delay = 3 #seconds
1848 1848
1849 1849 self.nTries = 3 #quantity tries
1850 1850
1851 1851 self.nFiles = 3 #number of files for searching
1852 1852
1853 1853 self.nReadBlocks = 0
1854 1854
1855 1855 self.flagIsNewFile = 1
1856 1856
1857 1857 self.ippSeconds = 0
1858 1858
1859 1859 self.flagTimeBlock = 0
1860 1860
1861 1861 self.flagIsNewBlock = 0
1862 1862
1863 1863 self.nTotalBlocks = 0
1864 1864
1865 1865 self.blocksize = 0
1866 1866
1867 1867 self.dataOut = self.createObjByDefault()
1868 1868
1869 1869
1870 1870 def createObjByDefault(self):
1871 1871
1872 1872 dataObj = Spectra()
1873 1873
1874 1874 return dataObj
1875 1875
1876 1876 def __hasNotDataInBuffer(self):
1877 1877 return 1
1878 1878
1879 1879
1880 1880 def getBlockDimension(self):
1881 1881 """
1882 1882 Obtiene la cantidad de puntos a leer por cada bloque de datos
1883 1883
1884 1884 Affected:
1885 1885 self.nRdChannels
1886 1886 self.nRdPairs
1887 1887 self.pts2read_SelfSpectra
1888 1888 self.pts2read_CrossSpectra
1889 1889 self.pts2read_DCchannels
1890 1890 self.blocksize
1891 1891 self.dataOut.nChannels
1892 1892 self.dataOut.nPairs
1893 1893
1894 1894 Return:
1895 1895 None
1896 1896 """
1897 1897 self.nRdChannels = 0
1898 1898 self.nRdPairs = 0
1899 1899 self.rdPairList = []
1900 1900
1901 1901 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
1902 1902 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
1903 1903 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
1904 1904 else:
1905 1905 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
1906 1906 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
1907 1907
1908 1908 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
1909 1909
1910 1910 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
1911 1911 self.blocksize = self.pts2read_SelfSpectra
1912 1912
1913 1913 if self.processingHeaderObj.flag_cspc:
1914 1914 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
1915 1915 self.blocksize += self.pts2read_CrossSpectra
1916 1916
1917 1917 if self.processingHeaderObj.flag_dc:
1918 1918 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
1919 1919 self.blocksize += self.pts2read_DCchannels
1920 1920
1921 1921 # self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels
1922 1922
1923 1923
1924 1924 def readBlock(self):
1925 1925 """
1926 1926 Lee el bloque de datos desde la posicion actual del puntero del archivo
1927 1927 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1928 1928 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1929 1929 es seteado a 0
1930 1930
1931 1931 Return: None
1932 1932
1933 1933 Variables afectadas:
1934 1934
1935 1935 self.flagIsNewFile
1936 1936 self.flagIsNewBlock
1937 1937 self.nTotalBlocks
1938 1938 self.data_spc
1939 1939 self.data_cspc
1940 1940 self.data_dc
1941 1941
1942 1942 Exceptions:
1943 1943 Si un bloque leido no es un bloque valido
1944 1944 """
1945 1945 blockOk_flag = False
1946 1946 fpointer = self.fp.tell()
1947 1947
1948 1948 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
1949 1949 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
1950 1950
1951 1951 if self.processingHeaderObj.flag_cspc:
1952 1952 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
1953 1953 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
1954 1954
1955 1955 if self.processingHeaderObj.flag_dc:
1956 1956 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
1957 1957 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
1958 1958
1959 1959
1960 1960 if not(self.processingHeaderObj.shif_fft):
1961 1961 #desplaza a la derecha en el eje 2 determinadas posiciones
1962 1962 shift = int(self.processingHeaderObj.profilesPerBlock/2)
1963 1963 spc = numpy.roll( spc, shift , axis=2 )
1964 1964
1965 1965 if self.processingHeaderObj.flag_cspc:
1966 1966 #desplaza a la derecha en el eje 2 determinadas posiciones
1967 1967 cspc = numpy.roll( cspc, shift, axis=2 )
1968
1968
1969 self.processingHeaderObj.shif_fft = True
1969 1970
1970 1971 spc = numpy.transpose( spc, (0,2,1) )
1971 1972 self.data_spc = spc
1972 1973
1973 1974 if self.processingHeaderObj.flag_cspc:
1974 1975 cspc = numpy.transpose( cspc, (0,2,1) )
1975 1976 self.data_cspc = cspc['real'] + cspc['imag']*1j
1976 1977 else:
1977 1978 self.data_cspc = None
1978 1979
1979 1980 if self.processingHeaderObj.flag_dc:
1980 1981 self.data_dc = dc['real'] + dc['imag']*1j
1981 1982 else:
1982 1983 self.data_dc = None
1983 1984
1984 1985 self.flagIsNewFile = 0
1985 1986 self.flagIsNewBlock = 1
1986 1987
1987 1988 self.nTotalBlocks += 1
1988 1989 self.nReadBlocks += 1
1989 1990
1990 1991 return 1
1991 1992
1992 1993
1993 1994 def getData(self):
1994 1995 """
1995 1996 Copia el buffer de lectura a la clase "Spectra",
1996 1997 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
1997 1998 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
1998 1999
1999 2000 Return:
2000 2001 0 : Si no hay mas archivos disponibles
2001 2002 1 : Si hizo una buena copia del buffer
2002 2003
2003 2004 Affected:
2004 2005 self.dataOut
2005 2006
2006 2007 self.flagTimeBlock
2007 2008 self.flagIsNewBlock
2008 2009 """
2009 2010
2010 2011 if self.flagNoMoreFiles:
2011 2012 self.dataOut.flagNoData = True
2012 2013 print 'Process finished'
2013 2014 return 0
2014 2015
2015 2016 self.flagTimeBlock = 0
2016 2017 self.flagIsNewBlock = 0
2017 2018
2018 2019 if self.__hasNotDataInBuffer():
2019 2020
2020 2021 if not( self.readNextBlock() ):
2021 2022 self.dataOut.flagNoData = True
2022 2023 return 0
2023 2024
2024 2025 # self.updateDataHeader()
2025 2026
2026 2027 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
2027 2028
2028 2029 if self.data_dc == None:
2029 2030 self.dataOut.flagNoData = True
2030 2031 return 0
2031 2032
2032 2033 self.dataOut.data_spc = self.data_spc
2033 2034
2034 2035 self.dataOut.data_cspc = self.data_cspc
2035 2036
2036 2037 self.dataOut.data_dc = self.data_dc
2037 2038
2038 2039 self.dataOut.flagTimeBlock = self.flagTimeBlock
2039 2040
2040 2041 self.dataOut.flagNoData = False
2041 2042
2042 2043 self.dataOut.dtype = self.dtype
2043 2044
2044 2045 # self.dataOut.nChannels = self.nRdChannels
2045 2046
2046 2047 self.dataOut.nPairs = self.nRdPairs
2047 2048
2048 2049 self.dataOut.pairsList = self.rdPairList
2049 2050
2050 2051 # self.dataOut.nHeights = self.processingHeaderObj.nHeights
2051 2052
2052 2053 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
2053 2054
2054 2055 self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
2055 2056
2056 2057 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
2057 2058
2058 2059 self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
2059 2060
2060 2061 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
2061 2062
2062 2063 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
2063 2064
2064 2065 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
2065 2066
2066 2067 # self.dataOut.channelIndexList = range(self.systemHeaderObj.nChannels)
2067 2068
2068 2069 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000.#+ self.profileIndex * self.ippSeconds
2069 2070
2070 2071 self.dataOut.ippSeconds = self.ippSeconds
2071 2072
2072 2073 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt * self.processingHeaderObj.nIncohInt * self.dataOut.nFFTPoints
2073 2074
2074 2075 # self.profileIndex += 1
2075 2076
2076 2077 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
2077 2078
2078 2079 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
2079 2080
2080 2081 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
2081 2082
2082 2083 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
2083 2084
2084 2085 self.dataOut.flagDeflipData = True #asumo q la data no esta sin flip
2085 2086
2086 2087 if self.processingHeaderObj.code != None:
2087 2088
2088 2089 self.dataOut.nCode = self.processingHeaderObj.nCode
2089 2090
2090 2091 self.dataOut.nBaud = self.processingHeaderObj.nBaud
2091 2092
2092 2093 self.dataOut.code = self.processingHeaderObj.code
2093 2094
2094 2095 self.dataOut.flagDecodeData = True
2095 2096
2096 2097 return self.dataOut.data_spc
2097 2098
2098 2099
2099 2100 class SpectraWriter(JRODataWriter):
2100 2101
2101 2102 """
2102 2103 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
2103 2104 de los datos siempre se realiza por bloques.
2104 2105 """
2105 2106
2106 2107 ext = ".pdata"
2107 2108
2108 2109 optchar = "P"
2109 2110
2110 2111 shape_spc_Buffer = None
2111 2112
2112 2113 shape_cspc_Buffer = None
2113 2114
2114 2115 shape_dc_Buffer = None
2115 2116
2116 2117 data_spc = None
2117 2118
2118 2119 data_cspc = None
2119 2120
2120 2121 data_dc = None
2121 2122
2122 2123 # dataOut = None
2123 2124
2124 2125 def __init__(self):
2125 2126 """
2126 2127 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
2127 2128
2128 2129 Affected:
2129 2130 self.dataOut
2130 2131 self.basicHeaderObj
2131 2132 self.systemHeaderObj
2132 2133 self.radarControllerHeaderObj
2133 2134 self.processingHeaderObj
2134 2135
2135 2136 Return: None
2136 2137 """
2137 2138
2138 2139 self.isConfig = False
2139 2140
2140 2141 self.nTotalBlocks = 0
2141 2142
2142 2143 self.data_spc = None
2143 2144
2144 2145 self.data_cspc = None
2145 2146
2146 2147 self.data_dc = None
2147 2148
2148 2149 self.fp = None
2149 2150
2150 2151 self.flagIsNewFile = 1
2151 2152
2152 2153 self.nTotalBlocks = 0
2153 2154
2154 2155 self.flagIsNewBlock = 0
2155 2156
2156 2157 self.setFile = None
2157 2158
2158 2159 self.dtype = None
2159 2160
2160 2161 self.path = None
2161 2162
2162 2163 self.noMoreFiles = 0
2163 2164
2164 2165 self.filename = None
2165 2166
2166 2167 self.basicHeaderObj = BasicHeader(LOCALTIME)
2167 2168
2168 2169 self.systemHeaderObj = SystemHeader()
2169 2170
2170 2171 self.radarControllerHeaderObj = RadarControllerHeader()
2171 2172
2172 2173 self.processingHeaderObj = ProcessingHeader()
2173 2174
2174 2175
2175 2176 def hasAllDataInBuffer(self):
2176 2177 return 1
2177 2178
2178 2179
2179 2180 def setBlockDimension(self):
2180 2181 """
2181 2182 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
2182 2183
2183 2184 Affected:
2184 2185 self.shape_spc_Buffer
2185 2186 self.shape_cspc_Buffer
2186 2187 self.shape_dc_Buffer
2187 2188
2188 2189 Return: None
2189 2190 """
2190 2191 self.shape_spc_Buffer = (self.dataOut.nChannels,
2191 2192 self.processingHeaderObj.nHeights,
2192 2193 self.processingHeaderObj.profilesPerBlock)
2193 2194
2194 2195 self.shape_cspc_Buffer = (self.dataOut.nPairs,
2195 2196 self.processingHeaderObj.nHeights,
2196 2197 self.processingHeaderObj.profilesPerBlock)
2197 2198
2198 2199 self.shape_dc_Buffer = (self.dataOut.nChannels,
2199 2200 self.processingHeaderObj.nHeights)
2200 2201
2201 2202
2202 2203 def writeBlock(self):
2203 2204 """
2204 2205 Escribe el buffer en el file designado
2205 2206
2206 2207 Affected:
2207 2208 self.data_spc
2208 2209 self.data_cspc
2209 2210 self.data_dc
2210 2211 self.flagIsNewFile
2211 2212 self.flagIsNewBlock
2212 2213 self.nTotalBlocks
2213 2214 self.nWriteBlocks
2214 2215
2215 2216 Return: None
2216 2217 """
2217 2218
2218 2219 spc = numpy.transpose( self.data_spc, (0,2,1) )
2219 2220 if not( self.processingHeaderObj.shif_fft ):
2220 2221 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2221 2222 data = spc.reshape((-1))
2222 2223 data.tofile(self.fp)
2223 2224
2224 2225 if self.data_cspc != None:
2225 2226 data = numpy.zeros( self.shape_cspc_Buffer, self.dtype )
2226 2227 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
2227 2228 if not( self.processingHeaderObj.shif_fft ):
2228 2229 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2229 2230 data['real'] = cspc.real
2230 2231 data['imag'] = cspc.imag
2231 2232 data = data.reshape((-1))
2232 2233 data.tofile(self.fp)
2233 2234
2234 2235 if self.data_dc != None:
2235 2236 data = numpy.zeros( self.shape_dc_Buffer, self.dtype )
2236 2237 dc = self.data_dc
2237 2238 data['real'] = dc.real
2238 2239 data['imag'] = dc.imag
2239 2240 data = data.reshape((-1))
2240 2241 data.tofile(self.fp)
2241 2242
2242 2243 self.data_spc.fill(0)
2243 2244 self.data_dc.fill(0)
2244 2245 if self.data_cspc != None:
2245 2246 self.data_cspc.fill(0)
2246 2247
2247 2248 self.flagIsNewFile = 0
2248 2249 self.flagIsNewBlock = 1
2249 2250 self.nTotalBlocks += 1
2250 2251 self.nWriteBlocks += 1
2251 2252 self.blockIndex += 1
2252 2253
2253 2254
2254 2255 def putData(self):
2255 2256 """
2256 2257 Setea un bloque de datos y luego los escribe en un file
2257 2258
2258 2259 Affected:
2259 2260 self.data_spc
2260 2261 self.data_cspc
2261 2262 self.data_dc
2262 2263
2263 2264 Return:
2264 2265 0 : Si no hay data o no hay mas files que puedan escribirse
2265 2266 1 : Si se escribio la data de un bloque en un file
2266 2267 """
2267 2268
2268 2269 if self.dataOut.flagNoData:
2269 2270 return 0
2270 2271
2271 2272 self.flagIsNewBlock = 0
2272 2273
2273 2274 if self.dataOut.flagTimeBlock:
2274 2275 self.data_spc.fill(0)
2275 2276 self.data_cspc.fill(0)
2276 2277 self.data_dc.fill(0)
2277 2278 self.setNextFile()
2278 2279
2279 2280 if self.flagIsNewFile == 0:
2280 2281 self.getBasicHeader()
2281 2282
2282 2283 self.data_spc = self.dataOut.data_spc
2283 2284 self.data_cspc = self.dataOut.data_cspc
2284 2285 self.data_dc = self.dataOut.data_dc
2285 2286
2286 2287 # #self.processingHeaderObj.dataBlocksPerFile)
2287 2288 if self.hasAllDataInBuffer():
2288 2289 # self.getDataHeader()
2289 2290 self.writeNextBlock()
2290 2291
2291 2292 return 1
2292 2293
2293 2294
2294 2295 def __getProcessFlags(self):
2295 2296
2296 2297 processFlags = 0
2297 2298
2298 2299 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2299 2300 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2300 2301 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2301 2302 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2302 2303 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2303 2304 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2304 2305
2305 2306 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2306 2307
2307 2308
2308 2309
2309 2310 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
2310 2311 PROCFLAG.DATATYPE_SHORT,
2311 2312 PROCFLAG.DATATYPE_LONG,
2312 2313 PROCFLAG.DATATYPE_INT64,
2313 2314 PROCFLAG.DATATYPE_FLOAT,
2314 2315 PROCFLAG.DATATYPE_DOUBLE]
2315 2316
2316 2317
2317 2318 for index in range(len(dtypeList)):
2318 2319 if self.dataOut.dtype == dtypeList[index]:
2319 2320 dtypeValue = datatypeValueList[index]
2320 2321 break
2321 2322
2322 2323 processFlags += dtypeValue
2323 2324
2324 2325 if self.dataOut.flagDecodeData:
2325 2326 processFlags += PROCFLAG.DECODE_DATA
2326 2327
2327 2328 if self.dataOut.flagDeflipData:
2328 2329 processFlags += PROCFLAG.DEFLIP_DATA
2329 2330
2330 2331 if self.dataOut.code != None:
2331 2332 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
2332 2333
2333 2334 if self.dataOut.nIncohInt > 1:
2334 2335 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
2335 2336
2336 2337 if self.dataOut.data_dc != None:
2337 2338 processFlags += PROCFLAG.SAVE_CHANNELS_DC
2338 2339
2339 2340 return processFlags
2340 2341
2341 2342
2342 2343 def __getBlockSize(self):
2343 2344 '''
2344 2345 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
2345 2346 '''
2346 2347
2347 2348 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2348 2349 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2349 2350 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2350 2351 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2351 2352 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2352 2353 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2353 2354
2354 2355 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2355 2356 datatypeValueList = [1,2,4,8,4,8]
2356 2357 for index in range(len(dtypeList)):
2357 2358 if self.dataOut.dtype == dtypeList[index]:
2358 2359 datatypeValue = datatypeValueList[index]
2359 2360 break
2360 2361
2361 2362
2362 2363 pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints
2363 2364
2364 2365 pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write)
2365 2366 blocksize = (pts2write_SelfSpectra*datatypeValue)
2366 2367
2367 2368 if self.dataOut.data_cspc != None:
2368 2369 pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write)
2369 2370 blocksize += (pts2write_CrossSpectra*datatypeValue*2)
2370 2371
2371 2372 if self.dataOut.data_dc != None:
2372 2373 pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights)
2373 2374 blocksize += (pts2write_DCchannels*datatypeValue*2)
2374 2375
2375 2376 blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
2376 2377
2377 2378 return blocksize
2378 2379
2379 2380 def getDataHeader(self):
2380 2381
2381 2382 """
2382 2383 Obtiene una copia del First Header
2383 2384
2384 2385 Affected:
2385 2386 self.systemHeaderObj
2386 2387 self.radarControllerHeaderObj
2387 2388 self.dtype
2388 2389
2389 2390 Return:
2390 2391 None
2391 2392 """
2392 2393
2393 2394 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
2394 2395 self.systemHeaderObj.nChannels = self.dataOut.nChannels
2395 2396 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
2396 2397
2397 2398 self.getBasicHeader()
2398 2399
2399 2400 processingHeaderSize = 40 # bytes
2400 2401 self.processingHeaderObj.dtype = 0 # Voltage
2401 2402 self.processingHeaderObj.blockSize = self.__getBlockSize()
2402 2403 self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints
2403 2404 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
2404 2405 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
2405 2406 self.processingHeaderObj.processFlags = self.__getProcessFlags()
2406 2407 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval
2407 2408 self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt
2408 2409 self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
2409 2410
2410 2411 if self.processingHeaderObj.totalSpectra > 0:
2411 2412 channelList = []
2412 2413 for channel in range(self.dataOut.nChannels):
2413 2414 channelList.append(channel)
2414 2415 channelList.append(channel)
2415 2416
2416 2417 pairsList = []
2417 2418 for pair in self.dataOut.pairsList:
2418 2419 pairsList.append(pair[0])
2419 2420 pairsList.append(pair[1])
2420 2421 spectraComb = channelList + pairsList
2421 2422 spectraComb = numpy.array(spectraComb,dtype="u1")
2422 2423 self.processingHeaderObj.spectraComb = spectraComb
2423 2424 sizeOfSpcComb = len(spectraComb)
2424 2425 processingHeaderSize += sizeOfSpcComb
2425 2426
2426 2427 if self.dataOut.code != None:
2427 2428 self.processingHeaderObj.code = self.dataOut.code
2428 2429 self.processingHeaderObj.nCode = self.dataOut.nCode
2429 2430 self.processingHeaderObj.nBaud = self.dataOut.nBaud
2430 2431 nCodeSize = 4 # bytes
2431 2432 nBaudSize = 4 # bytes
2432 2433 codeSize = 4 # bytes
2433 2434 sizeOfCode = int(nCodeSize + nBaudSize + codeSize * self.dataOut.nCode * self.dataOut.nBaud)
2434 2435 processingHeaderSize += sizeOfCode
2435 2436
2436 2437 if self.processingHeaderObj.nWindows != 0:
2437 2438 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
2438 2439 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
2439 2440 self.processingHeaderObj.nHeights = self.dataOut.nHeights
2440 2441 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
2441 2442 sizeOfFirstHeight = 4
2442 2443 sizeOfdeltaHeight = 4
2443 2444 sizeOfnHeights = 4
2444 2445 sizeOfWindows = (sizeOfFirstHeight + sizeOfdeltaHeight + sizeOfnHeights)*self.processingHeaderObj.nWindows
2445 2446 processingHeaderSize += sizeOfWindows
2446 2447
2447 2448 self.processingHeaderObj.size = processingHeaderSize
2448 2449
2449 2450 class SpectraHeisWriter():
2450 2451
2451 2452 i=0
2452 2453
2453 2454 def __init__(self, dataOut):
2454 2455
2455 2456 self.wrObj = FITS()
2456 2457 self.dataOut = dataOut
2457 2458
2458 2459 def isNumber(str):
2459 2460 """
2460 2461 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
2461 2462
2462 2463 Excepciones:
2463 2464 Si un determinado string no puede ser convertido a numero
2464 2465 Input:
2465 2466 str, string al cual se le analiza para determinar si convertible a un numero o no
2466 2467
2467 2468 Return:
2468 2469 True : si el string es uno numerico
2469 2470 False : no es un string numerico
2470 2471 """
2471 2472 try:
2472 2473 float( str )
2473 2474 return True
2474 2475 except:
2475 2476 return False
2476 2477
2477 2478 def setup(self, wrpath,):
2478 2479
2479 2480 if not(os.path.exists(wrpath)):
2480 2481 os.mkdir(wrpath)
2481 2482
2482 2483 self.wrpath = wrpath
2483 2484 self.setFile = 0
2484 2485
2485 2486 def putData(self):
2486 2487 # self.wrObj.writeHeader(nChannels=self.dataOut.nChannels, nFFTPoints=self.dataOut.nFFTPoints)
2487 2488 #name = self.dataOut.utctime
2488 2489 name= time.localtime( self.dataOut.utctime)
2489 2490 ext=".fits"
2490 2491 #folder='D%4.4d%3.3d'%(name.tm_year,name.tm_yday)
2491 2492 subfolder = 'D%4.4d%3.3d' % (name.tm_year,name.tm_yday)
2492 2493
2493 2494 fullpath = os.path.join( self.wrpath, subfolder )
2494 2495 if not( os.path.exists(fullpath) ):
2495 2496 os.mkdir(fullpath)
2496 2497 self.setFile += 1
2497 2498 file = 'D%4.4d%3.3d%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext)
2498 2499
2499 2500 filename = os.path.join(self.wrpath,subfolder, file)
2500 2501
2501 2502 # print self.dataOut.ippSeconds
2502 2503 freq=numpy.arange(-1*self.dataOut.nHeights/2.,self.dataOut.nHeights/2.)/(2*self.dataOut.ippSeconds)
2503 2504
2504 2505 col1=self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints)+'E', array=freq)
2505 2506 col2=self.wrObj.writeData(name="P_Ch1",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[0,:]))
2506 2507 col3=self.wrObj.writeData(name="P_Ch2",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[1,:]))
2507 2508 col4=self.wrObj.writeData(name="P_Ch3",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[2,:]))
2508 2509 col5=self.wrObj.writeData(name="P_Ch4",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[3,:]))
2509 2510 col6=self.wrObj.writeData(name="P_Ch5",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[4,:]))
2510 2511 col7=self.wrObj.writeData(name="P_Ch6",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[5,:]))
2511 2512 col8=self.wrObj.writeData(name="P_Ch7",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[6,:]))
2512 2513 col9=self.wrObj.writeData(name="P_Ch8",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[7,:]))
2513 2514 #n=numpy.arange((100))
2514 2515 n=self.dataOut.data_spc[6,:]
2515 2516 a=self.wrObj.cFImage(n)
2516 2517 b=self.wrObj.Ctable(col1,col2,col3,col4,col5,col6,col7,col8,col9)
2517 2518 self.wrObj.CFile(a,b)
2518 2519 self.wrObj.wFile(filename)
2519 2520 return 1
2520 2521
2521 2522 class FITS:
2522 2523
2523 2524 name=None
2524 2525 format=None
2525 2526 array =None
2526 2527 data =None
2527 2528 thdulist=None
2528 2529
2529 2530 def __init__(self):
2530 2531
2531 2532 pass
2532 2533
2533 2534 def setColF(self,name,format,array):
2534 2535 self.name=name
2535 2536 self.format=format
2536 2537 self.array=array
2537 2538 a1=numpy.array([self.array],dtype=numpy.float32)
2538 2539 self.col1 = pyfits.Column(name=self.name, format=self.format, array=a1)
2539 2540 return self.col1
2540 2541
2541 2542 # def setColP(self,name,format,data):
2542 2543 # self.name=name
2543 2544 # self.format=format
2544 2545 # self.data=data
2545 2546 # a2=numpy.array([self.data],dtype=numpy.float32)
2546 2547 # self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2547 2548 # return self.col2
2548 2549
2549 2550 def writeHeader(self,):
2550 2551 pass
2551 2552
2552 2553 def writeData(self,name,format,data):
2553 2554 self.name=name
2554 2555 self.format=format
2555 2556 self.data=data
2556 2557 a2=numpy.array([self.data],dtype=numpy.float32)
2557 2558 self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2558 2559 return self.col2
2559 2560
2560 2561 def cFImage(self,n):
2561 2562 self.hdu= pyfits.PrimaryHDU(n)
2562 2563 return self.hdu
2563 2564
2564 2565 def Ctable(self,col1,col2,col3,col4,col5,col6,col7,col8,col9):
2565 2566 self.cols=pyfits.ColDefs( [col1,col2,col3,col4,col5,col6,col7,col8,col9])
2566 2567 self.tbhdu = pyfits.new_table(self.cols)
2567 2568 return self.tbhdu
2568 2569
2569 2570 def CFile(self,hdu,tbhdu):
2570 2571 self.thdulist=pyfits.HDUList([hdu,tbhdu])
2571 2572
2572 2573 def wFile(self,filename):
2573 2574 self.thdulist.writeto(filename) No newline at end of file
@@ -1,1159 +1,1280
1 1 '''
2 2
3 3 $Author: dsuarez $
4 4 $Id: Processor.py 1 2012-11-12 18:56:07Z dsuarez $
5 5 '''
6 6 import os
7 7 import numpy
8 8 import datetime
9 9 import time
10 10
11 11 from jrodata import *
12 12 from jrodataIO import *
13 13 from jroplot import *
14 14
15 15 class ProcessingUnit:
16 16
17 17 """
18 18 Esta es la clase base para el procesamiento de datos.
19 19
20 20 Contiene el metodo "call" para llamar operaciones. Las operaciones pueden ser:
21 21 - Metodos internos (callMethod)
22 22 - Objetos del tipo Operation (callObject). Antes de ser llamados, estos objetos
23 23 tienen que ser agreagados con el metodo "add".
24 24
25 25 """
26 26 # objeto de datos de entrada (Voltage, Spectra o Correlation)
27 27 dataIn = None
28 28
29 29 # objeto de datos de entrada (Voltage, Spectra o Correlation)
30 30 dataOut = None
31 31
32 32
33 33 objectDict = None
34 34
35 35 def __init__(self):
36 36
37 37 self.objectDict = {}
38 38
39 39 def init(self):
40 40
41 41 raise ValueError, "Not implemented"
42 42
43 43 def addOperation(self, object, objId):
44 44
45 45 """
46 46 Agrega el objeto "object" a la lista de objetos "self.objectList" y retorna el
47 47 identificador asociado a este objeto.
48 48
49 49 Input:
50 50
51 51 object : objeto de la clase "Operation"
52 52
53 53 Return:
54 54
55 55 objId : identificador del objeto, necesario para ejecutar la operacion
56 56 """
57 57
58 58 self.objectDict[objId] = object
59 59
60 60 return objId
61 61
62 62 def operation(self, **kwargs):
63 63
64 64 """
65 65 Operacion directa sobre la data (dataOut.data). Es necesario actualizar los valores de los
66 66 atributos del objeto dataOut
67 67
68 68 Input:
69 69
70 70 **kwargs : Diccionario de argumentos de la funcion a ejecutar
71 71 """
72 72
73 73 raise ValueError, "ImplementedError"
74 74
75 75 def callMethod(self, name, **kwargs):
76 76
77 77 """
78 78 Ejecuta el metodo con el nombre "name" y con argumentos **kwargs de la propia clase.
79 79
80 80 Input:
81 81 name : nombre del metodo a ejecutar
82 82
83 83 **kwargs : diccionario con los nombres y valores de la funcion a ejecutar.
84 84
85 85 """
86 86 if name != 'run':
87 87
88 88 if name == 'init' and self.dataIn.isEmpty():
89 89 self.dataOut.flagNoData = True
90 90 return False
91 91
92 92 if name != 'init' and self.dataOut.isEmpty():
93 93 return False
94 94
95 95 methodToCall = getattr(self, name)
96 96
97 97 methodToCall(**kwargs)
98 98
99 99 if name != 'run':
100 100 return True
101 101
102 102 if self.dataOut.isEmpty():
103 103 return False
104 104
105 105 return True
106 106
107 107 def callObject(self, objId, **kwargs):
108 108
109 109 """
110 110 Ejecuta la operacion asociada al identificador del objeto "objId"
111 111
112 112 Input:
113 113
114 114 objId : identificador del objeto a ejecutar
115 115
116 116 **kwargs : diccionario con los nombres y valores de la funcion a ejecutar.
117 117
118 118 Return:
119 119
120 120 None
121 121 """
122 122
123 123 if self.dataOut.isEmpty():
124 124 return False
125 125
126 126 object = self.objectDict[objId]
127 127
128 128 object.run(self.dataOut, **kwargs)
129 129
130 130 return True
131 131
132 132 def call(self, operationConf, **kwargs):
133 133
134 134 """
135 135 Return True si ejecuta la operacion "operationConf.name" con los
136 136 argumentos "**kwargs". False si la operacion no se ha ejecutado.
137 137 La operacion puede ser de dos tipos:
138 138
139 139 1. Un metodo propio de esta clase:
140 140
141 141 operation.type = "self"
142 142
143 143 2. El metodo "run" de un objeto del tipo Operation o de un derivado de ella:
144 144 operation.type = "other".
145 145
146 146 Este objeto de tipo Operation debe de haber sido agregado antes con el metodo:
147 147 "addOperation" e identificado con el operation.id
148 148
149 149
150 150 con el id de la operacion.
151 151
152 152 Input:
153 153
154 154 Operation : Objeto del tipo operacion con los atributos: name, type y id.
155 155
156 156 """
157 157
158 158 if operationConf.type == 'self':
159 159 sts = self.callMethod(operationConf.name, **kwargs)
160 160
161 161 if operationConf.type == 'other':
162 162 sts = self.callObject(operationConf.id, **kwargs)
163 163
164 164 return sts
165 165
166 166 def setInput(self, dataIn):
167 167
168 168 self.dataIn = dataIn
169 169
170 170 def getOutput(self):
171 171
172 172 return self.dataOut
173 173
174 174 class Operation():
175 175
176 176 """
177 177 Clase base para definir las operaciones adicionales que se pueden agregar a la clase ProcessingUnit
178 178 y necesiten acumular informacion previa de los datos a procesar. De preferencia usar un buffer de
179 179 acumulacion dentro de esta clase
180 180
181 181 Ejemplo: Integraciones coherentes, necesita la informacion previa de los n perfiles anteriores (bufffer)
182 182
183 183 """
184 184
185 185 __buffer = None
186 186 __isConfig = False
187 187
188 188 def __init__(self):
189 189
190 190 pass
191 191
192 192 def run(self, dataIn, **kwargs):
193 193
194 194 """
195 195 Realiza las operaciones necesarias sobre la dataIn.data y actualiza los atributos del objeto dataIn.
196 196
197 197 Input:
198 198
199 199 dataIn : objeto del tipo JROData
200 200
201 201 Return:
202 202
203 203 None
204 204
205 205 Affected:
206 206 __buffer : buffer de recepcion de datos.
207 207
208 208 """
209 209
210 210 raise ValueError, "ImplementedError"
211 211
212 212 class VoltageProc(ProcessingUnit):
213 213
214 214
215 215 def __init__(self):
216 216
217 217 self.objectDict = {}
218 218 self.dataOut = Voltage()
219 219 self.flip = 1
220 220
221 221 def init(self):
222 222
223 223 self.dataOut.copy(self.dataIn)
224 224 # No necesita copiar en cada init() los atributos de dataIn
225 225 # la copia deberia hacerse por cada nuevo bloque de datos
226 226
227 227 def selectChannels(self, channelList):
228 228
229 229 channelIndexList = []
230 230
231 231 for channel in channelList:
232 232 index = self.dataOut.channelList.index(channel)
233 233 channelIndexList.append(index)
234 234
235 235 self.selectChannelsByIndex(channelIndexList)
236 236
237 237 def selectChannelsByIndex(self, channelIndexList):
238 238 """
239 239 Selecciona un bloque de datos en base a canales segun el channelIndexList
240 240
241 241 Input:
242 242 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
243 243
244 244 Affected:
245 245 self.dataOut.data
246 246 self.dataOut.channelIndexList
247 247 self.dataOut.nChannels
248 248 self.dataOut.m_ProcessingHeader.totalSpectra
249 249 self.dataOut.systemHeaderObj.numChannels
250 250 self.dataOut.m_ProcessingHeader.blockSize
251 251
252 252 Return:
253 253 None
254 254 """
255 255
256 256 for channelIndex in channelIndexList:
257 257 if channelIndex not in self.dataOut.channelIndexList:
258 258 print channelIndexList
259 259 raise ValueError, "The value %d in channelIndexList is not valid" %channelIndex
260 260
261 261 nChannels = len(channelIndexList)
262 262
263 263 data = self.dataOut.data[channelIndexList,:]
264 264
265 265 self.dataOut.data = data
266 266 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
267 267 # self.dataOut.nChannels = nChannels
268 268
269 269 return 1
270 270
271 271 def selectHeights(self, minHei, maxHei):
272 272 """
273 273 Selecciona un bloque de datos en base a un grupo de valores de alturas segun el rango
274 274 minHei <= height <= maxHei
275 275
276 276 Input:
277 277 minHei : valor minimo de altura a considerar
278 278 maxHei : valor maximo de altura a considerar
279 279
280 280 Affected:
281 281 Indirectamente son cambiados varios valores a travez del metodo selectHeightsByIndex
282 282
283 283 Return:
284 284 1 si el metodo se ejecuto con exito caso contrario devuelve 0
285 285 """
286 286 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
287 287 raise ValueError, "some value in (%d,%d) is not valid" % (minHei, maxHei)
288 288
289 289 if (maxHei > self.dataOut.heightList[-1]):
290 290 maxHei = self.dataOut.heightList[-1]
291 291 # raise ValueError, "some value in (%d,%d) is not valid" % (minHei, maxHei)
292 292
293 293 minIndex = 0
294 294 maxIndex = 0
295 295 heights = self.dataOut.heightList
296 296
297 297 inda = numpy.where(heights >= minHei)
298 298 indb = numpy.where(heights <= maxHei)
299 299
300 300 try:
301 301 minIndex = inda[0][0]
302 302 except:
303 303 minIndex = 0
304 304
305 305 try:
306 306 maxIndex = indb[0][-1]
307 307 except:
308 308 maxIndex = len(heights)
309 309
310 310 self.selectHeightsByIndex(minIndex, maxIndex)
311 311
312 312 return 1
313 313
314 314
315 315 def selectHeightsByIndex(self, minIndex, maxIndex):
316 316 """
317 317 Selecciona un bloque de datos en base a un grupo indices de alturas segun el rango
318 318 minIndex <= index <= maxIndex
319 319
320 320 Input:
321 321 minIndex : valor de indice minimo de altura a considerar
322 322 maxIndex : valor de indice maximo de altura a considerar
323 323
324 324 Affected:
325 325 self.dataOut.data
326 326 self.dataOut.heightList
327 327
328 328 Return:
329 329 1 si el metodo se ejecuto con exito caso contrario devuelve 0
330 330 """
331 331
332 332 if (minIndex < 0) or (minIndex > maxIndex):
333 333 raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
334 334
335 335 if (maxIndex >= self.dataOut.nHeights):
336 336 maxIndex = self.dataOut.nHeights-1
337 337 # raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
338 338
339 339 nHeights = maxIndex - minIndex + 1
340 340
341 341 #voltage
342 342 data = self.dataOut.data[:,minIndex:maxIndex+1]
343 343
344 344 firstHeight = self.dataOut.heightList[minIndex]
345 345
346 346 self.dataOut.data = data
347 347 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex+1]
348 348
349 349 return 1
350 350
351 351
352 352 def filterByHeights(self, window):
353 353 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
354 354
355 355 if window == None:
356 356 window = self.dataOut.radarControllerHeaderObj.txA / deltaHeight
357 357
358 358 newdelta = deltaHeight * window
359 359 r = self.dataOut.data.shape[1] % window
360 360 buffer = self.dataOut.data[:,0:self.dataOut.data.shape[1]-r]
361 361 buffer = buffer.reshape(self.dataOut.data.shape[0],self.dataOut.data.shape[1]/window,window)
362 362 buffer = numpy.sum(buffer,2)
363 363 self.dataOut.data = buffer
364 364 self.dataOut.heightList = numpy.arange(self.dataOut.heightList[0],newdelta*self.dataOut.nHeights/window-newdelta,newdelta)
365 365 self.dataOut.windowOfFilter = window
366 366
367 367 def deFlip(self):
368 368 self.dataOut.data *= self.flip
369 369 self.flip *= -1.
370 370
371 371
372 372 class CohInt(Operation):
373 373
374 374 __isConfig = False
375 375
376 376 __profIndex = 0
377 377 __withOverapping = False
378 378
379 379 __byTime = False
380 380 __initime = None
381 381 __lastdatatime = None
382 382 __integrationtime = None
383 383
384 384 __buffer = None
385 385
386 386 __dataReady = False
387 387
388 388 n = None
389 389
390 390
391 391 def __init__(self):
392 392
393 393 self.__isConfig = False
394 394
395 395 def setup(self, n=None, timeInterval=None, overlapping=False):
396 396 """
397 397 Set the parameters of the integration class.
398 398
399 399 Inputs:
400 400
401 401 n : Number of coherent integrations
402 402 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
403 403 overlapping :
404 404
405 405 """
406 406
407 407 self.__initime = None
408 408 self.__lastdatatime = 0
409 409 self.__buffer = None
410 410 self.__dataReady = False
411 411
412 412
413 413 if n == None and timeInterval == None:
414 414 raise ValueError, "n or timeInterval should be specified ..."
415 415
416 416 if n != None:
417 417 self.n = n
418 418 self.__byTime = False
419 419 else:
420 420 self.__integrationtime = timeInterval * 60. #if (type(timeInterval)!=integer) -> change this line
421 421 self.n = 9999
422 422 self.__byTime = True
423 423
424 424 if overlapping:
425 425 self.__withOverapping = True
426 426 self.__buffer = None
427 427 else:
428 428 self.__withOverapping = False
429 429 self.__buffer = 0
430 430
431 431 self.__profIndex = 0
432 432
433 433 def putData(self, data):
434 434
435 435 """
436 436 Add a profile to the __buffer and increase in one the __profileIndex
437 437
438 438 """
439 439
440 440 if not self.__withOverapping:
441 441 self.__buffer += data.copy()
442 442 self.__profIndex += 1
443 443 return
444 444
445 445 #Overlapping data
446 446 nChannels, nHeis = data.shape
447 447 data = numpy.reshape(data, (1, nChannels, nHeis))
448 448
449 449 #If the buffer is empty then it takes the data value
450 450 if self.__buffer == None:
451 451 self.__buffer = data
452 452 self.__profIndex += 1
453 453 return
454 454
455 455 #If the buffer length is lower than n then stakcing the data value
456 456 if self.__profIndex < self.n:
457 457 self.__buffer = numpy.vstack((self.__buffer, data))
458 458 self.__profIndex += 1
459 459 return
460 460
461 461 #If the buffer length is equal to n then replacing the last buffer value with the data value
462 462 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
463 463 self.__buffer[self.n-1] = data
464 464 self.__profIndex = self.n
465 465 return
466 466
467 467
468 468 def pushData(self):
469 469 """
470 470 Return the sum of the last profiles and the profiles used in the sum.
471 471
472 472 Affected:
473 473
474 474 self.__profileIndex
475 475
476 476 """
477 477
478 478 if not self.__withOverapping:
479 479 data = self.__buffer
480 480 n = self.__profIndex
481 481
482 482 self.__buffer = 0
483 483 self.__profIndex = 0
484 484
485 485 return data, n
486 486
487 487 #Integration with Overlapping
488 488 data = numpy.sum(self.__buffer, axis=0)
489 489 n = self.__profIndex
490 490
491 491 return data, n
492 492
493 493 def byProfiles(self, data):
494 494
495 495 self.__dataReady = False
496 496 avgdata = None
497 497 n = None
498 498
499 499 self.putData(data)
500 500
501 501 if self.__profIndex == self.n:
502 502
503 503 avgdata, n = self.pushData()
504 504 self.__dataReady = True
505 505
506 506 return avgdata
507 507
508 508 def byTime(self, data, datatime):
509 509
510 510 self.__dataReady = False
511 511 avgdata = None
512 512 n = None
513 513
514 514 self.putData(data)
515 515
516 516 if (datatime - self.__initime) >= self.__integrationtime:
517 517 avgdata, n = self.pushData()
518 518 self.n = n
519 519 self.__dataReady = True
520 520
521 521 return avgdata
522 522
523 523 def integrate(self, data, datatime=None):
524 524
525 525 if self.__initime == None:
526 526 self.__initime = datatime
527 527
528 528 if self.__byTime:
529 529 avgdata = self.byTime(data, datatime)
530 530 else:
531 531 avgdata = self.byProfiles(data)
532 532
533 533
534 534 self.__lastdatatime = datatime
535 535
536 536 if avgdata == None:
537 537 return None, None
538 538
539 539 avgdatatime = self.__initime
540 540
541 541 deltatime = datatime -self.__lastdatatime
542 542
543 543 if not self.__withOverapping:
544 544 self.__initime = datatime
545 545 else:
546 546 self.__initime += deltatime
547 547
548 548 return avgdata, avgdatatime
549 549
550 550 def run(self, dataOut, **kwargs):
551 551
552 552 if not self.__isConfig:
553 553 self.setup(**kwargs)
554 554 self.__isConfig = True
555 555
556 556 avgdata, avgdatatime = self.integrate(dataOut.data, dataOut.utctime)
557 557
558 558 # dataOut.timeInterval *= n
559 559 dataOut.flagNoData = True
560 560
561 561 if self.__dataReady:
562 562 dataOut.data = avgdata
563 563 dataOut.nCohInt *= self.n
564 564 dataOut.utctime = avgdatatime
565 565 dataOut.timeInterval = dataOut.ippSeconds * dataOut.nCohInt
566 566 dataOut.flagNoData = False
567 567
568 568
569 569 class Decoder(Operation):
570 570
571 571 __isConfig = False
572 572 __profIndex = 0
573 573
574 574 code = None
575 575
576 576 nCode = None
577 577 nBaud = None
578 578
579 579 def __init__(self):
580 580
581 581 self.__isConfig = False
582 582
583 583 def setup(self, code):
584 584
585 585 self.__profIndex = 0
586 586
587 587 self.code = code
588 588
589 589 self.nCode = len(code)
590 590 self.nBaud = len(code[0])
591 591
592 592 def convolutionInFreq(self, data):
593 593
594 594 nchannel, ndata = data.shape
595 595 newcode = numpy.zeros(ndata)
596 596 newcode[0:self.nBaud] = self.code[self.__profIndex]
597 597
598 598 fft_data = numpy.fft.fft(data, axis=1)
599 599 fft_code = numpy.conj(numpy.fft.fft(newcode))
600 600 fft_code = fft_code.reshape(1,len(fft_code))
601 601
602 602 # conv = fft_data.copy()
603 603 # conv.fill(0)
604 604
605 605 conv = fft_data*fft_code
606 606
607 607 data = numpy.fft.ifft(conv,axis=1)
608 608
609 609 datadec = data[:,:-self.nBaud+1]
610 610 ndatadec = ndata - self.nBaud + 1
611 611
612 612 if self.__profIndex == self.nCode-1:
613 613 self.__profIndex = 0
614 614 return ndatadec, datadec
615 615
616 616 self.__profIndex += 1
617 617
618 618 return ndatadec, datadec
619 619
620 620
621 621 def convolutionInTime(self, data):
622 622
623 623 nchannel, ndata = data.shape
624 624 newcode = self.code[self.__profIndex]
625 625 ndatadec = ndata - self.nBaud + 1
626 626
627 627 datadec = numpy.zeros((nchannel, ndatadec))
628 628
629 629 for i in range(nchannel):
630 630 datadec[i,:] = numpy.correlate(data[i,:], newcode)
631 631
632 632 if self.__profIndex == self.nCode-1:
633 633 self.__profIndex = 0
634 634 return ndatadec, datadec
635 635
636 636 self.__profIndex += 1
637 637
638 638 return ndatadec, datadec
639 639
640 640 def run(self, dataOut, code=None, mode = 0):
641 641
642 642 if not self.__isConfig:
643 643
644 644 if code == None:
645 645 code = dataOut.code
646 646
647 647 self.setup(code)
648 648 self.__isConfig = True
649 649
650 650 if mode == 0:
651 651 ndatadec, datadec = self.convolutionInFreq(dataOut.data)
652 652
653 653 if mode == 1:
654 654 print "This function is not implemented"
655 655 # ndatadec, datadec = self.convolutionInTime(dataOut.data)
656 656
657 657 dataOut.data = datadec
658 658
659 659 dataOut.heightList = dataOut.heightList[0:ndatadec]
660 660
661 661 dataOut.flagDecodeData = True #asumo q la data no esta decodificada
662 662
663 663 # dataOut.flagDeflipData = True #asumo q la data no esta sin flip
664 664
665 665
666 666 class SpectraProc(ProcessingUnit):
667 667
668 668 def __init__(self):
669 669
670 670 self.objectDict = {}
671 671 self.buffer = None
672 672 self.firstdatatime = None
673 673 self.profIndex = 0
674 674 self.dataOut = Spectra()
675 675
676 676 def __updateObjFromInput(self):
677 677
678 678 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
679 679 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
680 680 self.dataOut.channelList = self.dataIn.channelList
681 681 self.dataOut.heightList = self.dataIn.heightList
682 682 self.dataOut.dtype = self.dataIn.dtype
683 683 # self.dataOut.nHeights = self.dataIn.nHeights
684 684 # self.dataOut.nChannels = self.dataIn.nChannels
685 685 self.dataOut.nBaud = self.dataIn.nBaud
686 686 self.dataOut.nCode = self.dataIn.nCode
687 687 self.dataOut.code = self.dataIn.code
688 688 self.dataOut.nProfiles = self.dataOut.nFFTPoints
689 689 # self.dataOut.channelIndexList = self.dataIn.channelIndexList
690 690 self.dataOut.flagTimeBlock = self.dataIn.flagTimeBlock
691 691 self.dataOut.utctime = self.firstdatatime
692 692 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
693 693 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
694 694 self.dataOut.flagShiftFFT = self.dataIn.flagShiftFFT
695 695 self.dataOut.nCohInt = self.dataIn.nCohInt
696 696 self.dataOut.nIncohInt = 1
697 697 self.dataOut.ippSeconds = self.dataIn.ippSeconds
698 698 self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
699 699
700 700 self.dataOut.timeInterval = self.dataIn.timeInterval*self.dataOut.nFFTPoints*self.dataOut.nIncohInt
701 701
702 702 def __getFft(self):
703 703 """
704 704 Convierte valores de Voltaje a Spectra
705 705
706 706 Affected:
707 707 self.dataOut.data_spc
708 708 self.dataOut.data_cspc
709 709 self.dataOut.data_dc
710 710 self.dataOut.heightList
711 711 self.profIndex
712 712 self.buffer
713 713 self.dataOut.flagNoData
714 714 """
715 715 fft_volt = numpy.fft.fft(self.buffer,axis=1)
716 716 dc = fft_volt[:,0,:]
717 717
718 718 #calculo de self-spectra
719 719 fft_volt = numpy.fft.fftshift(fft_volt,axes=(1,))
720 720 spc = fft_volt * numpy.conjugate(fft_volt)
721 721 spc = spc.real
722 722
723 723 blocksize = 0
724 724 blocksize += dc.size
725 725 blocksize += spc.size
726 726
727 727 cspc = None
728 728 pairIndex = 0
729 729 if self.dataOut.pairsList != None:
730 730 #calculo de cross-spectra
731 731 cspc = numpy.zeros((self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex')
732 732 for pair in self.dataOut.pairsList:
733 733 cspc[pairIndex,:,:] = fft_volt[pair[0],:,:] * numpy.conjugate(fft_volt[pair[1],:,:])
734 734 pairIndex += 1
735 735 blocksize += cspc.size
736 736
737 737 self.dataOut.data_spc = spc
738 738 self.dataOut.data_cspc = cspc
739 739 self.dataOut.data_dc = dc
740 740 self.dataOut.blockSize = blocksize
741 741
742 742 def init(self, nFFTPoints=None, pairsList=None):
743 743
744 744 self.dataOut.flagNoData = True
745 745
746 746 if self.dataIn.type == "Spectra":
747 747 self.dataOut.copy(self.dataIn)
748 748 return
749 749
750 750 if self.dataIn.type == "Voltage":
751 751
752 752 if nFFTPoints == None:
753 753 raise ValueError, "This SpectraProc.init() need nFFTPoints input variable"
754 754
755 755 if pairsList == None:
756 756 nPairs = 0
757 757 else:
758 758 nPairs = len(pairsList)
759 759
760 760 self.dataOut.nFFTPoints = nFFTPoints
761 761 self.dataOut.pairsList = pairsList
762 762 self.dataOut.nPairs = nPairs
763 763
764 764 if self.buffer == None:
765 765 self.buffer = numpy.zeros((self.dataIn.nChannels,
766 766 self.dataOut.nFFTPoints,
767 767 self.dataIn.nHeights),
768 768 dtype='complex')
769 769
770 770
771 771 self.buffer[:,self.profIndex,:] = self.dataIn.data.copy()
772 772 self.profIndex += 1
773 773
774 774 if self.firstdatatime == None:
775 775 self.firstdatatime = self.dataIn.utctime
776 776
777 777 if self.profIndex == self.dataOut.nFFTPoints:
778 778 self.__updateObjFromInput()
779 779 self.__getFft()
780 780
781 781 self.dataOut.flagNoData = False
782 782
783 783 self.buffer = None
784 784 self.firstdatatime = None
785 785 self.profIndex = 0
786 786
787 787 return
788 788
789 789 raise ValuError, "The type object %s is not valid"%(self.dataIn.type)
790 790
791 791 def selectChannels(self, channelList):
792 792
793 793 channelIndexList = []
794 794
795 795 for channel in channelList:
796 796 index = self.dataOut.channelList.index(channel)
797 797 channelIndexList.append(index)
798 798
799 799 self.selectChannelsByIndex(channelIndexList)
800 800
801 801 def selectChannelsByIndex(self, channelIndexList):
802 802 """
803 803 Selecciona un bloque de datos en base a canales segun el channelIndexList
804 804
805 805 Input:
806 806 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
807 807
808 808 Affected:
809 809 self.dataOut.data_spc
810 810 self.dataOut.channelIndexList
811 811 self.dataOut.nChannels
812 812
813 813 Return:
814 814 None
815 815 """
816 816
817 817 for channelIndex in channelIndexList:
818 818 if channelIndex not in self.dataOut.channelIndexList:
819 819 print channelIndexList
820 820 raise ValueError, "The value %d in channelIndexList is not valid" %channelIndex
821 821
822 822 nChannels = len(channelIndexList)
823 823
824 824 data_spc = self.dataOut.data_spc[channelIndexList,:]
825 825
826 826 self.dataOut.data_spc = data_spc
827 827 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
828 828 # self.dataOut.nChannels = nChannels
829 829
830 830 return 1
831 831
832 def selectHeights(self, minHei, maxHei):
833 """
834 Selecciona un bloque de datos en base a un grupo de valores de alturas segun el rango
835 minHei <= height <= maxHei
836
837 Input:
838 minHei : valor minimo de altura a considerar
839 maxHei : valor maximo de altura a considerar
840
841 Affected:
842 Indirectamente son cambiados varios valores a travez del metodo selectHeightsByIndex
843
844 Return:
845 1 si el metodo se ejecuto con exito caso contrario devuelve 0
846 """
847 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
848 raise ValueError, "some value in (%d,%d) is not valid" % (minHei, maxHei)
849
850 if (maxHei > self.dataOut.heightList[-1]):
851 maxHei = self.dataOut.heightList[-1]
852 # raise ValueError, "some value in (%d,%d) is not valid" % (minHei, maxHei)
853
854 minIndex = 0
855 maxIndex = 0
856 heights = self.dataOut.heightList
857
858 inda = numpy.where(heights >= minHei)
859 indb = numpy.where(heights <= maxHei)
860
861 try:
862 minIndex = inda[0][0]
863 except:
864 minIndex = 0
865
866 try:
867 maxIndex = indb[0][-1]
868 except:
869 maxIndex = len(heights)
870
871 self.selectHeightsByIndex(minIndex, maxIndex)
872
873 return 1
874
875
876 def selectHeightsByIndex(self, minIndex, maxIndex):
877 """
878 Selecciona un bloque de datos en base a un grupo indices de alturas segun el rango
879 minIndex <= index <= maxIndex
880
881 Input:
882 minIndex : valor de indice minimo de altura a considerar
883 maxIndex : valor de indice maximo de altura a considerar
884
885 Affected:
886 self.dataOut.data_spc
887 self.dataOut.data_cspc
888 self.dataOut.data_dc
889 self.dataOut.heightList
890
891 Return:
892 1 si el metodo se ejecuto con exito caso contrario devuelve 0
893 """
894
895 if (minIndex < 0) or (minIndex > maxIndex):
896 raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
897
898 if (maxIndex >= self.dataOut.nHeights):
899 maxIndex = self.dataOut.nHeights-1
900 # raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
901
902 nHeights = maxIndex - minIndex + 1
832 903
904 #Spectra
905 data_spc = self.dataOut.data_spc[:,:,minIndex:maxIndex+1]
906
907 data_cspc = None
908 if self.dataOut.data_cspc != None:
909 data_cspc = self.dataOut.data_cspc[:,:,minIndex:maxIndex+1]
910
911 data_dc = None
912 if self.dataOut.data_dc != None:
913 data_dc = self.dataOut.data_dc[:,:,minIndex:maxIndex+1]
914
915 self.dataOut.data_spc = data_spc
916 self.dataOut.data_cspc = data_cspc
917 self.dataOut.data_dc = data_dc
918
919 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex+1]
920
921 return 1
922
923 def removeDC(self, mode = 1):
924
925 dc_index = 0
926 freq_index = numpy.array([-2,-1,1,2])
927 data_spc = self.dataOut.data_spc
928 data_cspc = self.dataOut.data_cspc
929 data_dc = self.dataOut.data_dc
930
931 if self.dataOut.flagShiftFFT:
932 dc_index += self.dataOut.nFFTPoints/2
933 freq_index += self.dataOut.nFFTPoints/2
934
935 if mode == 1:
936 data_spc[dc_index] = (data_spc[:,freq_index[1],:] + data_spc[:,freq_index[2],:])/2
937 if data_cspc != None:
938 data_cspc[dc_index] = (data_cspc[:,freq_index[1],:] + data_cspc[:,freq_index[2],:])/2
939 return 1
940
941 if mode == 2:
942 pass
943
944 if mode == 3:
945 pass
946
947 raise ValueError, "mode parameter has to be 1, 2 or 3"
948
949 def removeInterference(self):
950
951 pass
952
953
833 954 class IncohInt(Operation):
834 955
835 956
836 957 __profIndex = 0
837 958 __withOverapping = False
838 959
839 960 __byTime = False
840 961 __initime = None
841 962 __lastdatatime = None
842 963 __integrationtime = None
843 964
844 965 __buffer_spc = None
845 966 __buffer_cspc = None
846 967 __buffer_dc = None
847 968
848 969 __dataReady = False
849 970
850 971 n = None
851 972
852 973
853 974 def __init__(self):
854 975
855 976 self.__isConfig = False
856 977
857 978 def setup(self, n=None, timeInterval=None, overlapping=False):
858 979 """
859 980 Set the parameters of the integration class.
860 981
861 982 Inputs:
862 983
863 984 n : Number of coherent integrations
864 985 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
865 986 overlapping :
866 987
867 988 """
868 989
869 990 self.__initime = None
870 991 self.__lastdatatime = 0
871 992 self.__buffer_spc = None
872 993 self.__buffer_cspc = None
873 994 self.__buffer_dc = None
874 995 self.__dataReady = False
875 996
876 997
877 998 if n == None and timeInterval == None:
878 999 raise ValueError, "n or timeInterval should be specified ..."
879 1000
880 1001 if n != None:
881 1002 self.n = n
882 1003 self.__byTime = False
883 1004 else:
884 1005 self.__integrationtime = timeInterval * 60. #if (type(timeInterval)!=integer) -> change this line
885 1006 self.n = 9999
886 1007 self.__byTime = True
887 1008
888 1009 if overlapping:
889 1010 self.__withOverapping = True
890 1011 else:
891 1012 self.__withOverapping = False
892 1013 self.__buffer_spc = 0
893 1014 self.__buffer_cspc = 0
894 1015 self.__buffer_dc = 0
895 1016
896 1017 self.__profIndex = 0
897 1018
898 1019 def putData(self, data_spc, data_cspc, data_dc):
899 1020
900 1021 """
901 1022 Add a profile to the __buffer_spc and increase in one the __profileIndex
902 1023
903 1024 """
904 1025
905 1026 if not self.__withOverapping:
906 1027 self.__buffer_spc += data_spc
907 1028
908 1029 if data_cspc == None:
909 1030 self.__buffer_cspc = None
910 1031 else:
911 1032 self.__buffer_cspc += data_cspc
912 1033
913 1034 if data_dc == None:
914 1035 self.__buffer_dc = None
915 1036 else:
916 1037 self.__buffer_dc += data_dc
917 1038
918 1039 self.__profIndex += 1
919 1040 return
920 1041
921 1042 #Overlapping data
922 1043 nChannels, nFFTPoints, nHeis = data_spc.shape
923 1044 data_spc = numpy.reshape(data_spc, (1, nChannels, nFFTPoints, nHeis))
924 1045 if data_cspc != None:
925 1046 data_cspc = numpy.reshape(data_cspc, (1, -1, nFFTPoints, nHeis))
926 1047 if data_dc != None:
927 1048 data_dc = numpy.reshape(data_dc, (1, -1, nHeis))
928 1049
929 1050 #If the buffer is empty then it takes the data value
930 1051 if self.__buffer_spc == None:
931 1052 self.__buffer_spc = data_spc
932 1053
933 1054 if data_cspc == None:
934 1055 self.__buffer_cspc = None
935 1056 else:
936 1057 self.__buffer_cspc += data_cspc
937 1058
938 1059 if data_dc == None:
939 1060 self.__buffer_dc = None
940 1061 else:
941 1062 self.__buffer_dc += data_dc
942 1063
943 1064 self.__profIndex += 1
944 1065 return
945 1066
946 1067 #If the buffer length is lower than n then stakcing the data value
947 1068 if self.__profIndex < self.n:
948 1069 self.__buffer_spc = numpy.vstack((self.__buffer_spc, data_spc))
949 1070
950 1071 if data_cspc != None:
951 1072 self.__buffer_cspc = numpy.vstack((self.__buffer_cspc, data_cspc))
952 1073
953 1074 if data_dc != None:
954 1075 self.__buffer_dc = numpy.vstack((self.__buffer_dc, data_dc))
955 1076
956 1077 self.__profIndex += 1
957 1078 return
958 1079
959 1080 #If the buffer length is equal to n then replacing the last buffer value with the data value
960 1081 self.__buffer_spc = numpy.roll(self.__buffer_spc, -1, axis=0)
961 1082 self.__buffer_spc[self.n-1] = data_spc
962 1083
963 1084 if data_cspc != None:
964 1085 self.__buffer_cspc = numpy.roll(self.__buffer_cspc, -1, axis=0)
965 1086 self.__buffer_cspc[self.n-1] = data_cspc
966 1087
967 1088 if data_dc != None:
968 1089 self.__buffer_dc = numpy.roll(self.__buffer_dc, -1, axis=0)
969 1090 self.__buffer_dc[self.n-1] = data_dc
970 1091
971 1092 self.__profIndex = self.n
972 1093 return
973 1094
974 1095
975 1096 def pushData(self):
976 1097 """
977 1098 Return the sum of the last profiles and the profiles used in the sum.
978 1099
979 1100 Affected:
980 1101
981 1102 self.__profileIndex
982 1103
983 1104 """
984 1105 data_spc = None
985 1106 data_cspc = None
986 1107 data_dc = None
987 1108
988 1109 if not self.__withOverapping:
989 1110 data_spc = self.__buffer_spc
990 1111 data_cspc = self.__buffer_cspc
991 1112 data_dc = self.__buffer_dc
992 1113
993 1114 n = self.__profIndex
994 1115
995 1116 self.__buffer_spc = 0
996 1117 self.__buffer_cspc = 0
997 1118 self.__buffer_dc = 0
998 1119 self.__profIndex = 0
999 1120
1000 1121 return data_spc, data_cspc, data_dc, n
1001 1122
1002 1123 #Integration with Overlapping
1003 1124 data_spc = numpy.sum(self.__buffer_spc, axis=0)
1004 1125
1005 1126 if self.__buffer_cspc != None:
1006 1127 data_cspc = numpy.sum(self.__buffer_cspc, axis=0)
1007 1128
1008 1129 if self.__buffer_dc != None:
1009 1130 data_dc = numpy.sum(self.__buffer_dc, axis=0)
1010 1131
1011 1132 n = self.__profIndex
1012 1133
1013 1134 return data_spc, data_cspc, data_dc, n
1014 1135
1015 1136 def byProfiles(self, *args):
1016 1137
1017 1138 self.__dataReady = False
1018 1139 avgdata_spc = None
1019 1140 avgdata_cspc = None
1020 1141 avgdata_dc = None
1021 1142 n = None
1022 1143
1023 1144 self.putData(*args)
1024 1145
1025 1146 if self.__profIndex == self.n:
1026 1147
1027 1148 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1028 1149 self.__dataReady = True
1029 1150
1030 1151 return avgdata_spc, avgdata_cspc, avgdata_dc
1031 1152
1032 1153 def byTime(self, datatime, *args):
1033 1154
1034 1155 self.__dataReady = False
1035 1156 avgdata_spc = None
1036 1157 avgdata_cspc = None
1037 1158 avgdata_dc = None
1038 1159 n = None
1039 1160
1040 1161 self.putData(*args)
1041 1162
1042 1163 if (datatime - self.__initime) >= self.__integrationtime:
1043 1164 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
1044 1165 self.n = n
1045 1166 self.__dataReady = True
1046 1167
1047 1168 return avgdata_spc, avgdata_cspc, avgdata_dc
1048 1169
1049 1170 def integrate(self, datatime, *args):
1050 1171
1051 1172 if self.__initime == None:
1052 1173 self.__initime = datatime
1053 1174
1054 1175 if self.__byTime:
1055 1176 avgdata_spc, avgdata_cspc, avgdata_dc = self.byTime(datatime, *args)
1056 1177 else:
1057 1178 avgdata_spc, avgdata_cspc, avgdata_dc = self.byProfiles(*args)
1058 1179
1059 1180 self.__lastdatatime = datatime
1060 1181
1061 1182 if avgdata_spc == None:
1062 1183 return None, None, None, None
1063 1184
1064 1185 avgdatatime = self.__initime
1065 1186
1066 1187 deltatime = datatime -self.__lastdatatime
1067 1188
1068 1189 if not self.__withOverapping:
1069 1190 self.__initime = datatime
1070 1191 else:
1071 1192 self.__initime += deltatime
1072 1193
1073 1194 return avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc
1074 1195
1075 1196 def run(self, dataOut, n=None, timeInterval=None, overlapping=False):
1076 1197
1077 1198 if not self.__isConfig:
1078 1199 self.setup(n, timeInterval, overlapping)
1079 1200 self.__isConfig = True
1080 1201
1081 1202 avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc = self.integrate(dataOut.utctime,
1082 1203 dataOut.data_spc,
1083 1204 dataOut.data_cspc,
1084 1205 dataOut.data_dc)
1085 1206
1086 1207 # dataOut.timeInterval *= n
1087 1208 dataOut.flagNoData = True
1088 1209
1089 1210 if self.__dataReady:
1090 1211
1091 1212 dataOut.data_spc = avgdata_spc
1092 1213 dataOut.data_cspc = avgdata_cspc
1093 1214 dataOut.data_dc = avgdata_dc
1094 1215
1095 1216 dataOut.nIncohInt *= self.n
1096 1217 dataOut.utctime = avgdatatime
1097 1218 dataOut.timeInterval = dataOut.ippSeconds * dataOut.nCohInt * dataOut.nIncohInt * dataOut.nFFTPoints
1098 1219 dataOut.flagNoData = False
1099 1220
1100 1221 class ProfileSelector(Operation):
1101 1222
1102 1223 profileIndex = None
1103 1224 # Tamanho total de los perfiles
1104 1225 nProfiles = None
1105 1226
1106 1227 def __init__(self):
1107 1228
1108 1229 self.profileIndex = 0
1109 1230
1110 1231 def incIndex(self):
1111 1232 self.profileIndex += 1
1112 1233
1113 1234 if self.profileIndex >= self.nProfiles:
1114 1235 self.profileIndex = 0
1115 1236
1116 1237 def isProfileInRange(self, minIndex, maxIndex):
1117 1238
1118 1239 if self.profileIndex < minIndex:
1119 1240 return False
1120 1241
1121 1242 if self.profileIndex > maxIndex:
1122 1243 return False
1123 1244
1124 1245 return True
1125 1246
1126 1247 def isProfileInList(self, profileList):
1127 1248
1128 1249 if self.profileIndex not in profileList:
1129 1250 return False
1130 1251
1131 1252 return True
1132 1253
1133 1254 def run(self, dataOut, profileList=None, profileRangeList=None):
1134 1255
1135 1256 dataOut.flagNoData = True
1136 1257 self.nProfiles = dataOut.nProfiles
1137 1258
1138 1259 if profileList != None:
1139 1260 if self.isProfileInList(profileList):
1140 1261 dataOut.flagNoData = False
1141 1262
1142 1263 self.incIndex()
1143 1264 return 1
1144 1265
1145 1266
1146 1267 elif profileRangeList != None:
1147 1268 minIndex = profileRangeList[0]
1148 1269 maxIndex = profileRangeList[1]
1149 1270 if self.isProfileInRange(minIndex, maxIndex):
1150 1271 dataOut.flagNoData = False
1151 1272
1152 1273 self.incIndex()
1153 1274 return 1
1154 1275
1155 1276 else:
1156 1277 raise ValueError, "ProfileSelector needs profileList or profileRangeList"
1157 1278
1158 1279 return 0
1159 1280
General Comments 0
You need to be logged in to leave comments. Login now