##// END OF EJS Templates
Miguel Valdez -
r232:13e5ede6a371
parent child
Show More
@@ -1,512 +1,525
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JROData.py 173 2012-11-20 15:06:21Z murco $
5 5 '''
6 6
7 7 import os, sys
8 8 import copy
9 9 import numpy
10 10 import datetime
11 11
12 12 from jroheaderIO import SystemHeader, RadarControllerHeader
13 13
14 14 def hildebrand_sekhon(data, navg):
15 15 """
16 16 This method is for the objective determination of de noise level in Doppler spectra. This
17 17 implementation technique is based on the fact that the standard deviation of the spectral
18 18 densities is equal to the mean spectral density for white Gaussian noise
19 19
20 20 Inputs:
21 21 Data : heights
22 22 navg : numbers of averages
23 23
24 24 Return:
25 25 -1 : any error
26 26 anoise : noise's level
27 27 """
28 28
29 29 dataflat = data.copy().reshape(-1)
30 30 dataflat.sort()
31 31 npts = dataflat.size #numbers of points of the data
32 32 npts_noise = 0.2*npts
33 33
34 34 if npts < 32:
35 35 print "error in noise - requires at least 32 points"
36 36 return -1.0
37 37
38 38 dataflat2 = numpy.power(dataflat,2)
39 39
40 40 cs = numpy.cumsum(dataflat)
41 41 cs2 = numpy.cumsum(dataflat2)
42 42
43 43 # data sorted in ascending order
44 44 nmin = int((npts + 7.)/8)
45 45
46 46 for i in range(nmin, npts):
47 47 s = cs[i]
48 48 s2 = cs2[i]
49 49 p = s / float(i);
50 50 p2 = p**2;
51 51 q = s2 / float(i) - p2;
52 52 leftc = p2;
53 53 rightc = q * float(navg);
54 54 R2 = leftc/rightc
55 55
56 56 # Signal detect: R2 < 1 (R2 = leftc/rightc)
57 57 if R2 < 1:
58 58 npts_noise = i
59 59 break
60 60
61 61
62 62 anoise = numpy.average(dataflat[0:npts_noise])
63 63
64 64 return anoise;
65 65
66 66 def sorting_bruce(data, navg):
67 67
68 68 data = data.copy()
69 69
70 70 sortdata = numpy.sort(data)
71 71 lenOfData = len(data)
72 72 nums_min = lenOfData/10
73 73
74 74 if (lenOfData/10) > 0:
75 75 nums_min = lenOfData/10
76 76 else:
77 77 nums_min = 0
78 78
79 79 rtest = 1.0 + 1.0/navg
80 80
81 81 sum = 0.
82 82
83 83 sumq = 0.
84 84
85 85 j = 0
86 86
87 87 cont = 1
88 88
89 89 while((cont==1)and(j<lenOfData)):
90 90
91 91 sum += sortdata[j]
92 92
93 93 sumq += sortdata[j]**2
94 94
95 95 j += 1
96 96
97 97 if j > nums_min:
98 98 if ((sumq*j) <= (rtest*sum**2)):
99 99 lnoise = sum / j
100 100 else:
101 101 j = j - 1
102 102 sum = sum - sordata[j]
103 103 sumq = sumq - sordata[j]**2
104 104 cont = 0
105 105
106 106 if j == nums_min:
107 107 lnoise = sum /j
108 108
109 109 return lnoise
110 110
111 111 class JROData:
112 112
113 113 # m_BasicHeader = BasicHeader()
114 114 # m_ProcessingHeader = ProcessingHeader()
115 115
116 116 systemHeaderObj = SystemHeader()
117 117
118 118 radarControllerHeaderObj = RadarControllerHeader()
119 119
120 120 # data = None
121 121
122 122 type = None
123 123
124 124 dtype = None
125 125
126 126 # nChannels = None
127 127
128 128 # nHeights = None
129 129
130 130 nProfiles = None
131 131
132 132 heightList = None
133 133
134 134 channelList = None
135 135
136 136 flagNoData = True
137 137
138 138 flagTimeBlock = False
139 139
140 140 utctime = None
141 141
142 142 blocksize = None
143 143
144 144 nCode = None
145 145
146 146 nBaud = None
147 147
148 148 code = None
149 149
150 flagDecodeData = True #asumo q la data esta decodificada
150 flagDecodeData = False #asumo q la data no esta decodificada
151 151
152 flagDeflipData = True #asumo q la data esta sin flip
152 flagDeflipData = False #asumo q la data no esta sin flip
153 153
154 154 flagShiftFFT = False
155 155
156 156 ippSeconds = None
157 157
158 158 timeInterval = None
159 159
160 160 nCohInt = None
161 161
162 162 noise = None
163 163
164 164 #Speed of ligth
165 165 C = 3e8
166 166
167 167 frequency = 49.92e6
168 168
169 169 def __init__(self):
170 170
171 171 raise ValueError, "This class has not been implemented"
172 172
173 173 def copy(self, inputObj=None):
174 174
175 175 if inputObj == None:
176 176 return copy.deepcopy(self)
177 177
178 178 for key in inputObj.__dict__.keys():
179 179 self.__dict__[key] = inputObj.__dict__[key]
180 180
181 181 def deepcopy(self):
182 182
183 183 return copy.deepcopy(self)
184 184
185 185 def isEmpty(self):
186 186
187 187 return self.flagNoData
188 188
189 189 def getNoise(self):
190 190
191 191 raise ValueError, "Not implemented"
192 192
193 193 def getNChannels(self):
194 194
195 195 return len(self.channelList)
196 196
197 197 def getChannelIndexList(self):
198 198
199 199 return range(self.nChannels)
200 200
201 201 def getNHeights(self):
202 202
203 203 return len(self.heightList)
204 204
205 205 def getHeiRange(self, extrapoints=0):
206 206
207 207 heis = self.heightList
208 208 # deltah = self.heightList[1] - self.heightList[0]
209 209 #
210 210 # heis.append(self.heightList[-1])
211 211
212 212 return heis
213 213
214 214 def getDatatime(self):
215 215
216 216 datatime = datetime.datetime.utcfromtimestamp(self.utctime)
217 217 return datatime
218 218
219 219 def getTimeRange(self):
220 220
221 221 datatime = []
222 222
223 223 datatime.append(self.utctime)
224 224 datatime.append(self.utctime + self.timeInterval)
225 225
226 226 datatime = numpy.array(datatime)
227 227
228 228 return datatime
229 229
230 230 def getFmax(self):
231 231
232 232 PRF = 1./(self.ippSeconds * self.nCohInt)
233 233
234 234 fmax = PRF/2.
235 235
236 236 return fmax
237 237
238 238 def getVmax(self):
239 239
240 240 _lambda = self.C/self.frequency
241 241
242 242 vmax = self.getFmax() * _lambda
243 243
244 244 return vmax
245 245
246 246 nChannels = property(getNChannels, "I'm the 'nChannel' property.")
247 247 channelIndexList = property(getChannelIndexList, "I'm the 'channelIndexList' property.")
248 248 nHeights = property(getNHeights, "I'm the 'nHeights' property.")
249 249 noise = property(getNoise, "I'm the 'nHeights' property.")
250 250 datatime = property(getDatatime, "I'm the 'datatime' property")
251 251
252 252 class Voltage(JROData):
253 253
254 254 #data es un numpy array de 2 dmensiones (canales, alturas)
255 255 data = None
256 256
257 257 def __init__(self):
258 258 '''
259 259 Constructor
260 260 '''
261 261
262 262 self.radarControllerHeaderObj = RadarControllerHeader()
263 263
264 264 self.systemHeaderObj = SystemHeader()
265 265
266 266 self.type = "Voltage"
267 267
268 268 self.data = None
269 269
270 270 self.dtype = None
271 271
272 272 # self.nChannels = 0
273 273
274 274 # self.nHeights = 0
275 275
276 276 self.nProfiles = None
277 277
278 278 self.heightList = None
279 279
280 280 self.channelList = None
281 281
282 282 # self.channelIndexList = None
283 283
284 284 self.flagNoData = True
285 285
286 286 self.flagTimeBlock = False
287 287
288 288 self.utctime = None
289 289
290 290 self.nCohInt = None
291 291
292 292 self.blocksize = None
293
294 self.flagDecodeData = False #asumo q la data no esta decodificada
295
296 self.flagDeflipData = False #asumo q la data no esta sin flip
297
298 self.flagShiftFFT = False
299
293 300
294 301 def getNoisebyHildebrand(self):
295 302 """
296 303 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
297 304
298 305 Return:
299 306 noiselevel
300 307 """
301 308
302 309 for channel in range(self.nChannels):
303 310 daux = self.data_spc[channel,:,:]
304 311 self.noise[channel] = hildebrand_sekhon(daux, self.nCohInt)
305 312
306 313 return self.noise
307 314
308 315 def getNoise(self, type = 1):
309 316
310 317 self.noise = numpy.zeros(self.nChannels)
311 318
312 319 if type == 1:
313 320 noise = self.getNoisebyHildebrand()
314 321
315 322 return 10*numpy.log10(noise)
316 323
317 324 class Spectra(JROData):
318 325
319 326 #data es un numpy array de 2 dmensiones (canales, perfiles, alturas)
320 327 data_spc = None
321 328
322 329 #data es un numpy array de 2 dmensiones (canales, pares, alturas)
323 330 data_cspc = None
324 331
325 332 #data es un numpy array de 2 dmensiones (canales, alturas)
326 333 data_dc = None
327 334
328 335 nFFTPoints = None
329 336
330 337 nPairs = None
331 338
332 339 pairsList = None
333 340
334 341 nIncohInt = None
335 342
336 343 wavelength = None #Necesario para cacular el rango de velocidad desde la frecuencia
337 344
338 345 nCohInt = None #se requiere para determinar el valor de timeInterval
339 346
340 347 def __init__(self):
341 348 '''
342 349 Constructor
343 350 '''
344 351
345 352 self.radarControllerHeaderObj = RadarControllerHeader()
346 353
347 354 self.systemHeaderObj = SystemHeader()
348 355
349 356 self.type = "Spectra"
350 357
351 358 # self.data = None
352 359
353 360 self.dtype = None
354 361
355 362 # self.nChannels = 0
356 363
357 364 # self.nHeights = 0
358 365
359 366 self.nProfiles = None
360 367
361 368 self.heightList = None
362 369
363 370 self.channelList = None
364 371
365 372 # self.channelIndexList = None
366 373
367 374 self.flagNoData = True
368 375
369 376 self.flagTimeBlock = False
370 377
371 378 self.utctime = None
372 379
373 380 self.nCohInt = None
374 381
375 382 self.nIncohInt = None
376 383
377 384 self.blocksize = None
378 385
379 386 self.nFFTPoints = None
380 387
381 388 self.wavelength = None
389
390 self.flagDecodeData = False #asumo q la data no esta decodificada
391
392 self.flagDeflipData = False #asumo q la data no esta sin flip
393
394 self.flagShiftFFT = False
382 395
383 396 def getNoisebyHildebrand(self):
384 397 """
385 398 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
386 399
387 400 Return:
388 401 noiselevel
389 402 """
390 403
391 404 for channel in range(self.nChannels):
392 405 daux = self.data_spc[channel,:,:]
393 406 self.noise[channel] = hildebrand_sekhon(daux, self.nIncohInt)
394 407
395 408 return self.noise
396 409
397 410 def getNoisebyWindow(self, heiIndexMin=0, heiIndexMax=-1, freqIndexMin=0, freqIndexMax=-1):
398 411 """
399 412 Determina el ruido del canal utilizando la ventana indicada con las coordenadas:
400 413 (heiIndexMIn, freqIndexMin) hasta (heiIndexMax, freqIndexMAx)
401 414
402 415 Inputs:
403 416 heiIndexMin: Limite inferior del eje de alturas
404 417 heiIndexMax: Limite superior del eje de alturas
405 418 freqIndexMin: Limite inferior del eje de frecuencia
406 419 freqIndexMax: Limite supoerior del eje de frecuencia
407 420 """
408 421
409 422 data = self.data_spc[:, heiIndexMin:heiIndexMax, freqIndexMin:freqIndexMax]
410 423
411 424 for channel in range(self.nChannels):
412 425 daux = data[channel,:,:]
413 426 self.noise[channel] = numpy.average(daux)
414 427
415 428 return self.noise
416 429
417 430 def getNoisebySort(self):
418 431
419 432 for channel in range(self.nChannels):
420 433 daux = self.data_spc[channel,:,:]
421 434 self.noise[channel] = sorting_bruce(daux, self.nIncohInt)
422 435
423 436 return self.noise
424 437
425 438 def getNoise(self, type = 1):
426 439
427 440 self.noise = numpy.zeros(self.nChannels)
428 441
429 442 if type == 1:
430 443 noise = self.getNoisebyHildebrand()
431 444
432 445 if type == 2:
433 446 noise = self.getNoisebySort()
434 447
435 448 if type == 3:
436 449 noise = self.getNoisebyWindow()
437 450
438 451 return 10*numpy.log10(noise)
439 452
440 453
441 454 def getFreqRange(self, extrapoints=0):
442 455
443 456 deltafreq = self.getFmax() / self.nFFTPoints
444 457 freqrange = deltafreq*(numpy.arange(self.nFFTPoints+extrapoints)-self.nFFTPoints/2.) - deltafreq/2
445 458
446 459 return freqrange
447 460
448 461 def getVelRange(self, extrapoints=0):
449 462
450 463 deltav = self.getVmax() / self.nFFTPoints
451 464 velrange = deltav*(numpy.arange(self.nFFTPoints+extrapoints)-self.nFFTPoints/2.) - deltav/2
452 465
453 466 return velrange
454 467
455 468 def getNPairs(self):
456 469
457 470 return len(self.pairsList)
458 471
459 472 def getPairsIndexList(self):
460 473
461 474 return range(self.nPairs)
462 475
463 476 nPairs = property(getNPairs, "I'm the 'nPairs' property.")
464 477 pairsIndexList = property(getPairsIndexList, "I'm the 'pairsIndexList' property.")
465 478
466 479 class SpectraHeis(JROData):
467 480
468 481 data_spc = None
469 482
470 483 data_cspc = None
471 484
472 485 data_dc = None
473 486
474 487 nFFTPoints = None
475 488
476 489 nPairs = None
477 490
478 491 pairsList = None
479 492
480 493 nIncohInt = None
481 494
482 495 def __init__(self):
483 496
484 497 self.radarControllerHeaderObj = RadarControllerHeader()
485 498
486 499 self.systemHeaderObj = SystemHeader()
487 500
488 501 self.type = "SpectraHeis"
489 502
490 503 self.dtype = None
491 504
492 505 # self.nChannels = 0
493 506
494 507 # self.nHeights = 0
495 508
496 509 self.nProfiles = None
497 510
498 511 self.heightList = None
499 512
500 513 self.channelList = None
501 514
502 515 # self.channelIndexList = None
503 516
504 517 self.flagNoData = True
505 518
506 519 self.flagTimeBlock = False
507 520
508 521 self.nPairs = 0
509 522
510 523 self.utctime = None
511 524
512 525 self.blocksize = None
@@ -1,2547 +1,2560
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JRODataIO.py 169 2012-11-19 21:57:03Z murco $
5 5 '''
6 6
7 7 import os, sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import time, datetime
13 13
14 14 from jrodata import *
15 15 from jroheaderIO import *
16 16 from jroprocessing import *
17 17
18 18 def isNumber(str):
19 19 """
20 20 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
21 21
22 22 Excepciones:
23 23 Si un determinado string no puede ser convertido a numero
24 24 Input:
25 25 str, string al cual se le analiza para determinar si convertible a un numero o no
26 26
27 27 Return:
28 28 True : si el string es uno numerico
29 29 False : no es un string numerico
30 30 """
31 31 try:
32 32 float( str )
33 33 return True
34 34 except:
35 35 return False
36 36
37 37 def isThisFileinRange(filename, startUTSeconds, endUTSeconds):
38 38 """
39 39 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
40 40
41 41 Inputs:
42 42 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
43 43
44 44 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
45 45 segundos contados desde 01/01/1970.
46 46 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
47 47 segundos contados desde 01/01/1970.
48 48
49 49 Return:
50 50 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
51 51 fecha especificado, de lo contrario retorna False.
52 52
53 53 Excepciones:
54 54 Si el archivo no existe o no puede ser abierto
55 55 Si la cabecera no puede ser leida.
56 56
57 57 """
58 58 basicHeaderObj = BasicHeader()
59 59
60 60 try:
61 61 fp = open(filename,'rb')
62 62 except:
63 63 raise IOError, "The file %s can't be opened" %(filename)
64 64
65 65 sts = basicHeaderObj.read(fp)
66 66 fp.close()
67 67
68 68 if not(sts):
69 69 print "Skipping the file %s because it has not a valid header" %(filename)
70 70 return 0
71 71
72 72 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
73 73 return 0
74 74
75 75 return 1
76 76
77 77 def isFileinThisTime(filename, startTime, endTime):
78 78 """
79 79 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
80 80
81 81 Inputs:
82 82 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
83 83
84 84 startTime : tiempo inicial del rango seleccionado en formato datetime.time
85 85
86 86 endTime : tiempo final del rango seleccionado en formato datetime.time
87 87
88 88 Return:
89 89 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
90 90 fecha especificado, de lo contrario retorna False.
91 91
92 92 Excepciones:
93 93 Si el archivo no existe o no puede ser abierto
94 94 Si la cabecera no puede ser leida.
95 95
96 96 """
97 97
98 98
99 99 try:
100 100 fp = open(filename,'rb')
101 101 except:
102 102 raise IOError, "The file %s can't be opened" %(filename)
103 103
104 104 basicHeaderObj = BasicHeader()
105 105 sts = basicHeaderObj.read(fp)
106 106 fp.close()
107 107
108 108 thisTime = basicHeaderObj.datatime.time()
109 109
110 110 if not(sts):
111 111 print "Skipping the file %s because it has not a valid header" %(filename)
112 112 return 0
113 113
114 114 if not ((startTime <= thisTime) and (endTime > thisTime)):
115 115 return 0
116 116
117 117 return 1
118 118
119 119 def getlastFileFromPath(path, ext):
120 120 """
121 121 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
122 122 al final de la depuracion devuelve el ultimo file de la lista que quedo.
123 123
124 124 Input:
125 125 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
126 126 ext : extension de los files contenidos en una carpeta
127 127
128 128 Return:
129 129 El ultimo file de una determinada carpeta, no se considera el path.
130 130 """
131 131 validFilelist = []
132 132 fileList = os.listdir(path)
133 133
134 134 # 0 1234 567 89A BCDE
135 135 # H YYYY DDD SSS .ext
136 136
137 137 for file in fileList:
138 138 try:
139 139 year = int(file[1:5])
140 140 doy = int(file[5:8])
141 141
142 142
143 143 except:
144 144 continue
145 145
146 146 if (os.path.splitext(file)[-1].lower() != ext.lower()):
147 147 continue
148 148
149 149 validFilelist.append(file)
150 150
151 151 if validFilelist:
152 152 validFilelist = sorted( validFilelist, key=str.lower )
153 153 return validFilelist[-1]
154 154
155 155 return None
156 156
157 157 def checkForRealPath(path, year, doy, set, ext):
158 158 """
159 159 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
160 160 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
161 161 el path exacto de un determinado file.
162 162
163 163 Example :
164 164 nombre correcto del file es .../.../D2009307/P2009307367.ext
165 165
166 166 Entonces la funcion prueba con las siguientes combinaciones
167 167 .../.../y2009307367.ext
168 168 .../.../Y2009307367.ext
169 169 .../.../x2009307/y2009307367.ext
170 170 .../.../x2009307/Y2009307367.ext
171 171 .../.../X2009307/y2009307367.ext
172 172 .../.../X2009307/Y2009307367.ext
173 173 siendo para este caso, la ultima combinacion de letras, identica al file buscado
174 174
175 175 Return:
176 176 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
177 177 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
178 178 para el filename
179 179 """
180 180 fullfilename = None
181 181 find_flag = False
182 182 filename = None
183 183
184 184 prefixDirList = [None,'d','D']
185 185 if ext.lower() == ".r": #voltage
186 186 prefixFileList = ['d','D']
187 187 elif ext.lower() == ".pdata": #spectra
188 188 prefixFileList = ['p','P']
189 189 else:
190 190 return None, filename
191 191
192 192 #barrido por las combinaciones posibles
193 193 for prefixDir in prefixDirList:
194 194 thispath = path
195 195 if prefixDir != None:
196 196 #formo el nombre del directorio xYYYYDDD (x=d o x=D)
197 197 thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy ))
198 198
199 199 for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D"
200 200 filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
201 201 fullfilename = os.path.join( thispath, filename ) #formo el path completo
202 202
203 203 if os.path.exists( fullfilename ): #verifico que exista
204 204 find_flag = True
205 205 break
206 206 if find_flag:
207 207 break
208 208
209 209 if not(find_flag):
210 210 return None, filename
211 211
212 212 return fullfilename, filename
213 213
214 214 class JRODataIO:
215 215
216 216 c = 3E8
217 217
218 218 isConfig = False
219 219
220 220 basicHeaderObj = BasicHeader()
221 221
222 222 systemHeaderObj = SystemHeader()
223 223
224 224 radarControllerHeaderObj = RadarControllerHeader()
225 225
226 226 processingHeaderObj = ProcessingHeader()
227 227
228 228 online = 0
229 229
230 230 dtype = None
231 231
232 232 pathList = []
233 233
234 234 filenameList = []
235 235
236 236 filename = None
237 237
238 238 ext = None
239 239
240 240 flagIsNewFile = 1
241 241
242 242 flagTimeBlock = 0
243 243
244 244 flagIsNewBlock = 0
245 245
246 246 fp = None
247 247
248 248 firstHeaderSize = 0
249 249
250 250 basicHeaderSize = 24
251 251
252 252 versionFile = 1103
253 253
254 254 fileSize = None
255 255
256 256 ippSeconds = None
257 257
258 258 fileSizeByHeader = None
259 259
260 260 fileIndex = None
261 261
262 262 profileIndex = None
263 263
264 264 blockIndex = None
265 265
266 266 nTotalBlocks = None
267 267
268 268 maxTimeStep = 30
269 269
270 270 lastUTTime = None
271 271
272 272 datablock = None
273 273
274 274 dataOut = None
275 275
276 276 blocksize = None
277 277
278 278 def __init__(self):
279 279
280 280 raise ValueError, "Not implemented"
281 281
282 282 def run(self):
283 283
284 284 raise ValueError, "Not implemented"
285 285
286 286 def getOutput(self):
287 287
288 288 return self.dataOut
289 289
290 290 class JRODataReader(JRODataIO, ProcessingUnit):
291 291
292 292 nReadBlocks = 0
293 293
294 294 delay = 10 #number of seconds waiting a new file
295 295
296 296 nTries = 3 #quantity tries
297 297
298 298 nFiles = 3 #number of files for searching
299 299
300 300 flagNoMoreFiles = 0
301 301
302 302 def __init__(self):
303 303
304 304 """
305 305
306 306 """
307 307
308 308 raise ValueError, "This method has not been implemented"
309 309
310 310
311 311 def createObjByDefault(self):
312 312 """
313 313
314 314 """
315 315 raise ValueError, "This method has not been implemented"
316 316
317 317 def getBlockDimension(self):
318 318
319 319 raise ValueError, "No implemented"
320 320
321 321 def __searchFilesOffLine(self,
322 322 path,
323 323 startDate,
324 324 endDate,
325 325 startTime=datetime.time(0,0,0),
326 326 endTime=datetime.time(23,59,59),
327 327 set=None,
328 328 expLabel='',
329 329 ext='.r',
330 330 walk=True):
331 331
332 332 pathList = []
333 333
334 334 if not walk:
335 335 pathList.append(path)
336 336
337 337 else:
338 338 dirList = []
339 339 for thisPath in os.listdir(path):
340 340 if os.path.isdir(os.path.join(path,thisPath)):
341 341 dirList.append(thisPath)
342 342
343 343 if not(dirList):
344 344 return None, None
345 345
346 346 thisDate = startDate
347 347
348 348 while(thisDate <= endDate):
349 349 year = thisDate.timetuple().tm_year
350 350 doy = thisDate.timetuple().tm_yday
351 351
352 352 match = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy))
353 353 if len(match) == 0:
354 354 thisDate += datetime.timedelta(1)
355 355 continue
356 356
357 357 pathList.append(os.path.join(path,match[0],expLabel))
358 358 thisDate += datetime.timedelta(1)
359 359
360 360 if pathList == []:
361 361 print "Any folder found into date range %s-%s" %(startDate, endDate)
362 362 return None, None
363 363
364 364 print "%d folder(s) found [%s, ...]" %(len(pathList), pathList[0])
365 365
366 366 filenameList = []
367 367 for thisPath in pathList:
368 368
369 369 fileList = glob.glob1(thisPath, "*%s" %ext)
370 370 fileList.sort()
371 371
372 372 for file in fileList:
373 373
374 374 filename = os.path.join(thisPath,file)
375 375
376 376 if isFileinThisTime(filename, startTime, endTime):
377 377 filenameList.append(filename)
378 378
379 379 if not(filenameList):
380 380 print "Any file found into time range %s-%s" %(startTime, endTime)
381 381 return None, None
382 382
383 383 self.filenameList = filenameList
384 384
385 385 return pathList, filenameList
386 386
387 387 def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True):
388 388
389 389 """
390 390 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
391 391 devuelve el archivo encontrado ademas de otros datos.
392 392
393 393 Input:
394 394 path : carpeta donde estan contenidos los files que contiene data
395 395
396 396 expLabel : Nombre del subexperimento (subfolder)
397 397
398 398 ext : extension de los files
399 399
400 400 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
401 401
402 402 Return:
403 403 directory : eL directorio donde esta el file encontrado
404 404 filename : el ultimo file de una determinada carpeta
405 405 year : el anho
406 406 doy : el numero de dia del anho
407 407 set : el set del archivo
408 408
409 409
410 410 """
411 411 dirList = []
412 412
413 413 if walk:
414 414
415 415 #Filtra solo los directorios
416 416 for thisPath in os.listdir(path):
417 417 if os.path.isdir(os.path.join(path, thisPath)):
418 418 dirList.append(thisPath)
419 419
420 420 if not(dirList):
421 421 return None, None, None, None, None
422 422
423 423 dirList = sorted( dirList, key=str.lower )
424 424
425 425 doypath = dirList[-1]
426 426 fullpath = os.path.join(path, doypath, expLabel)
427 427
428 428 else:
429 429 fullpath = path
430 430
431 431 filename = getlastFileFromPath(fullpath, ext)
432 432
433 433 if not(filename):
434 434 return None, None, None, None, None
435 435
436 436 if not(self.__verifyFile(os.path.join(fullpath, filename))):
437 437 return None, None, None, None, None
438 438
439 439 year = int( filename[1:5] )
440 440 doy = int( filename[5:8] )
441 441 set = int( filename[8:11] )
442 442
443 443 return fullpath, filename, year, doy, set
444 444
445 445
446 446
447 447 def __setNextFileOffline(self):
448 448
449 449 idFile = self.fileIndex
450 450
451 451 while (True):
452 452 idFile += 1
453 453 if not(idFile < len(self.filenameList)):
454 454 self.flagNoMoreFiles = 1
455 455 print "No more Files"
456 456 return 0
457 457
458 458 filename = self.filenameList[idFile]
459 459
460 460 if not(self.__verifyFile(filename)):
461 461 continue
462 462
463 463 fileSize = os.path.getsize(filename)
464 464 fp = open(filename,'rb')
465 465 break
466 466
467 467 self.flagIsNewFile = 1
468 468 self.fileIndex = idFile
469 469 self.filename = filename
470 470 self.fileSize = fileSize
471 471 self.fp = fp
472 472
473 473 print "Setting the file: %s"%self.filename
474 474
475 475 return 1
476 476
477 477 def __setNextFileOnline(self):
478 478 """
479 479 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
480 480 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
481 481 siguientes.
482 482
483 483 Affected:
484 484 self.flagIsNewFile
485 485 self.filename
486 486 self.fileSize
487 487 self.fp
488 488 self.set
489 489 self.flagNoMoreFiles
490 490
491 491 Return:
492 492 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
493 493 1 : si el file fue abierto con exito y esta listo a ser leido
494 494
495 495 Excepciones:
496 496 Si un determinado file no puede ser abierto
497 497 """
498 498 nFiles = 0
499 499 fileOk_flag = False
500 500 firstTime_flag = True
501 501
502 502 self.set += 1
503 503
504 504 #busca el 1er file disponible
505 505 fullfilename, filename = checkForRealPath( self.path, self.year, self.doy, self.set, self.ext )
506 506 if fullfilename:
507 507 if self.__verifyFile(fullfilename, False):
508 508 fileOk_flag = True
509 509
510 510 #si no encuentra un file entonces espera y vuelve a buscar
511 511 if not(fileOk_flag):
512 512 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
513 513
514 514 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
515 515 tries = self.nTries
516 516 else:
517 517 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
518 518
519 519 for nTries in range( tries ):
520 520 if firstTime_flag:
521 521 print "\tWaiting %0.2f sec for the file \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
522 522 time.sleep( self.delay )
523 523 else:
524 524 print "\tSearching next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
525 525
526 526 fullfilename, filename = checkForRealPath( self.path, self.year, self.doy, self.set, self.ext )
527 527 if fullfilename:
528 528 if self.__verifyFile(fullfilename):
529 529 fileOk_flag = True
530 530 break
531 531
532 532 if fileOk_flag:
533 533 break
534 534
535 535 firstTime_flag = False
536 536
537 537 print "\tSkipping the file \"%s\" due to this file doesn't exist" % filename
538 538 self.set += 1
539 539
540 540 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
541 541 self.set = 0
542 542 self.doy += 1
543 543
544 544 if fileOk_flag:
545 545 self.fileSize = os.path.getsize( fullfilename )
546 546 self.filename = fullfilename
547 547 self.flagIsNewFile = 1
548 548 if self.fp != None: self.fp.close()
549 549 self.fp = open(fullfilename, 'rb')
550 550 self.flagNoMoreFiles = 0
551 551 print 'Setting the file: %s' % fullfilename
552 552 else:
553 553 self.fileSize = 0
554 554 self.filename = None
555 555 self.flagIsNewFile = 0
556 556 self.fp = None
557 557 self.flagNoMoreFiles = 1
558 558 print 'No more Files'
559 559
560 560 return fileOk_flag
561 561
562 562
563 563 def setNextFile(self):
564 564 if self.fp != None:
565 565 self.fp.close()
566 566
567 567 if self.online:
568 568 newFile = self.__setNextFileOnline()
569 569 else:
570 570 newFile = self.__setNextFileOffline()
571 571
572 572 if not(newFile):
573 573 return 0
574 574
575 575 self.__readFirstHeader()
576 576 self.nReadBlocks = 0
577 577 return 1
578 578
579 579 def __waitNewBlock(self):
580 580 """
581 581 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
582 582
583 583 Si el modo de lectura es OffLine siempre retorn 0
584 584 """
585 585 if not self.online:
586 586 return 0
587 587
588 588 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
589 589 return 0
590 590
591 591 currentPointer = self.fp.tell()
592 592
593 593 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
594 594
595 595 for nTries in range( self.nTries ):
596 596
597 597 self.fp.close()
598 598 self.fp = open( self.filename, 'rb' )
599 599 self.fp.seek( currentPointer )
600 600
601 601 self.fileSize = os.path.getsize( self.filename )
602 602 currentSize = self.fileSize - currentPointer
603 603
604 604 if ( currentSize >= neededSize ):
605 605 self.__rdBasicHeader()
606 606 return 1
607 607
608 608 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
609 609 time.sleep( self.delay )
610 610
611 611
612 612 return 0
613 613
614 614 def __setNewBlock(self):
615 615
616 616 if self.fp == None:
617 617 return 0
618 618
619 619 if self.flagIsNewFile:
620 620 return 1
621 621
622 622 self.lastUTTime = self.basicHeaderObj.utc
623 623 currentSize = self.fileSize - self.fp.tell()
624 624 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
625 625
626 626 if (currentSize >= neededSize):
627 627 self.__rdBasicHeader()
628 628 return 1
629 629
630 630 if self.__waitNewBlock():
631 631 return 1
632 632
633 633 if not(self.setNextFile()):
634 634 return 0
635 635
636 636 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
637 637
638 638 self.flagTimeBlock = 0
639 639
640 640 if deltaTime > self.maxTimeStep:
641 641 self.flagTimeBlock = 1
642 642
643 643 return 1
644 644
645 645
646 646 def readNextBlock(self):
647 647 if not(self.__setNewBlock()):
648 648 return 0
649 649
650 650 if not(self.readBlock()):
651 651 return 0
652 652
653 653 return 1
654 654
655 655 def __rdProcessingHeader(self, fp=None):
656 656 if fp == None:
657 657 fp = self.fp
658 658
659 659 self.processingHeaderObj.read(fp)
660 660
661 661 def __rdRadarControllerHeader(self, fp=None):
662 662 if fp == None:
663 663 fp = self.fp
664 664
665 665 self.radarControllerHeaderObj.read(fp)
666 666
667 667 def __rdSystemHeader(self, fp=None):
668 668 if fp == None:
669 669 fp = self.fp
670 670
671 671 self.systemHeaderObj.read(fp)
672 672
673 673 def __rdBasicHeader(self, fp=None):
674 674 if fp == None:
675 675 fp = self.fp
676 676
677 677 self.basicHeaderObj.read(fp)
678 678
679 679
680 680 def __readFirstHeader(self):
681 681 self.__rdBasicHeader()
682 682 self.__rdSystemHeader()
683 683 self.__rdRadarControllerHeader()
684 684 self.__rdProcessingHeader()
685 685
686 686 self.firstHeaderSize = self.basicHeaderObj.size
687 687
688 688 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
689 689 if datatype == 0:
690 690 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
691 691 elif datatype == 1:
692 692 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
693 693 elif datatype == 2:
694 694 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
695 695 elif datatype == 3:
696 696 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
697 697 elif datatype == 4:
698 698 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
699 699 elif datatype == 5:
700 700 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
701 701 else:
702 702 raise ValueError, 'Data type was not defined'
703 703
704 704 self.dtype = datatype_str
705 705 self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
706 706 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
707 707 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
708 708 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
709 709 self.getBlockDimension()
710 710
711 711
712 712 def __verifyFile(self, filename, msgFlag=True):
713 713 msg = None
714 714 try:
715 715 fp = open(filename, 'rb')
716 716 currentPosition = fp.tell()
717 717 except:
718 718 if msgFlag:
719 719 print "The file %s can't be opened" % (filename)
720 720 return False
721 721
722 722 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
723 723
724 724 if neededSize == 0:
725 725 basicHeaderObj = BasicHeader()
726 726 systemHeaderObj = SystemHeader()
727 727 radarControllerHeaderObj = RadarControllerHeader()
728 728 processingHeaderObj = ProcessingHeader()
729 729
730 730 try:
731 731 if not( basicHeaderObj.read(fp) ): raise IOError
732 732 if not( systemHeaderObj.read(fp) ): raise IOError
733 733 if not( radarControllerHeaderObj.read(fp) ): raise IOError
734 734 if not( processingHeaderObj.read(fp) ): raise IOError
735 735 data_type = int(numpy.log2((processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
736 736
737 737 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
738 738
739 739 except:
740 740 if msgFlag:
741 741 print "\tThe file %s is empty or it hasn't enough data" % filename
742 742
743 743 fp.close()
744 744 return False
745 745 else:
746 746 msg = "\tSkipping the file %s due to it hasn't enough data" %filename
747 747
748 748 fp.close()
749 749 fileSize = os.path.getsize(filename)
750 750 currentSize = fileSize - currentPosition
751 751 if currentSize < neededSize:
752 752 if msgFlag and (msg != None):
753 753 print msg #print"\tSkipping the file %s due to it hasn't enough data" %filename
754 754 return False
755 755
756 756 return True
757 757
758 758 def setup(self,
759 759 path=None,
760 760 startDate=None,
761 761 endDate=None,
762 762 startTime=datetime.time(0,0,0),
763 763 endTime=datetime.time(23,59,59),
764 764 set=0,
765 765 expLabel = "",
766 766 ext = None,
767 767 online = False,
768 768 delay = 60,
769 769 walk = True):
770 770
771 771 if path == None:
772 772 raise ValueError, "The path is not valid"
773 773
774 774 if ext == None:
775 775 ext = self.ext
776 776
777 777 if online:
778 778 print "Searching files in online mode..."
779 779
780 780 for nTries in range( self.nTries ):
781 781 fullpath, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk)
782 782
783 783 if fullpath:
784 784 break
785 785
786 786 print '\tWaiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
787 787 time.sleep( self.delay )
788 788
789 789 if not(fullpath):
790 790 print "There 'isn't valied files in %s" % path
791 791 return None
792 792
793 793 self.year = year
794 794 self.doy = doy
795 795 self.set = set - 1
796 796 self.path = path
797 797
798 798 else:
799 799 print "Searching files in offline mode ..."
800 800 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
801 801 startTime=startTime, endTime=endTime,
802 802 set=set, expLabel=expLabel, ext=ext,
803 803 walk=walk)
804 804
805 805 if not(pathList):
806 806 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
807 807 datetime.datetime.combine(startDate,startTime).ctime(),
808 808 datetime.datetime.combine(endDate,endTime).ctime())
809 809
810 810 sys.exit(-1)
811 811
812 812
813 813 self.fileIndex = -1
814 814 self.pathList = pathList
815 815 self.filenameList = filenameList
816 816
817 817 self.online = online
818 818 self.delay = delay
819 819 ext = ext.lower()
820 820 self.ext = ext
821 821
822 822 if not(self.setNextFile()):
823 823 if (startDate!=None) and (endDate!=None):
824 824 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
825 825 elif startDate != None:
826 826 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
827 827 else:
828 828 print "No files"
829 829
830 830 sys.exit(-1)
831 831
832 832 # self.updateDataHeader()
833 833
834 834 return self.dataOut
835 835
836 836 def getData():
837 837
838 838 raise ValueError, "This method has not been implemented"
839 839
840 840 def hasNotDataInBuffer():
841 841
842 842 raise ValueError, "This method has not been implemented"
843 843
844 844 def readBlock():
845 845
846 846 raise ValueError, "This method has not been implemented"
847 847
848 848 def isEndProcess(self):
849 849
850 850 return self.flagNoMoreFiles
851 851
852 852 def printReadBlocks(self):
853 853
854 854 print "Number of read blocks per file %04d" %self.nReadBlocks
855 855
856 856 def printTotalBlocks(self):
857 857
858 858 print "Number of read blocks %04d" %self.nTotalBlocks
859 859
860 860 def printInfo(self):
861 861
862 862 print self.basicHeaderObj.printInfo()
863 863 print self.systemHeaderObj.printInfo()
864 864 print self.radarControllerHeaderObj.printInfo()
865 865 print self.processingHeaderObj.printInfo()
866 866
867 867
868 868 def run(self, **kwargs):
869 869
870 870 if not(self.isConfig):
871 871
872 872 # self.dataOut = dataOut
873 873 self.setup(**kwargs)
874 874 self.isConfig = True
875 875
876 876 self.getData()
877 877
878 878 class JRODataWriter(JRODataIO, Operation):
879 879
880 880 """
881 881 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
882 882 de los datos siempre se realiza por bloques.
883 883 """
884 884
885 885 blockIndex = 0
886 886
887 887 path = None
888 888
889 889 setFile = None
890 890
891 891 profilesPerBlock = None
892 892
893 893 blocksPerFile = None
894 894
895 895 nWriteBlocks = 0
896 896
897 897 def __init__(self, dataOut=None):
898 898 raise ValueError, "Not implemented"
899 899
900 900
901 901 def hasAllDataInBuffer(self):
902 902 raise ValueError, "Not implemented"
903 903
904 904
905 905 def setBlockDimension(self):
906 906 raise ValueError, "Not implemented"
907 907
908 908
909 909 def writeBlock(self):
910 910 raise ValueError, "No implemented"
911 911
912 912
913 913 def putData(self):
914 914 raise ValueError, "No implemented"
915 915
916 916 def getDataHeader(self):
917 917 """
918 918 Obtiene una copia del First Header
919 919
920 920 Affected:
921 921
922 922 self.basicHeaderObj
923 923 self.systemHeaderObj
924 924 self.radarControllerHeaderObj
925 925 self.processingHeaderObj self.
926 926
927 927 Return:
928 928 None
929 929 """
930 930
931 931 raise ValueError, "No implemented"
932 932
933 933 def getBasicHeader(self):
934 934
935 935 self.basicHeaderObj.size = self.basicHeaderSize #bytes
936 936 self.basicHeaderObj.version = self.versionFile
937 937 self.basicHeaderObj.dataBlock = self.nTotalBlocks
938 938
939 939 utc = numpy.floor(self.dataOut.utctime)
940 940 milisecond = (self.dataOut.utctime - utc)* 1000.0
941 941
942 942 self.basicHeaderObj.utc = utc
943 943 self.basicHeaderObj.miliSecond = milisecond
944 944 self.basicHeaderObj.timeZone = 0
945 945 self.basicHeaderObj.dstFlag = 0
946 946 self.basicHeaderObj.errorCount = 0
947 947
948 948 def __writeFirstHeader(self):
949 949 """
950 950 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
951 951
952 952 Affected:
953 953 __dataType
954 954
955 955 Return:
956 956 None
957 957 """
958 958
959 959 # CALCULAR PARAMETROS
960 960
961 961 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
962 962 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
963 963
964 964 self.basicHeaderObj.write(self.fp)
965 965 self.systemHeaderObj.write(self.fp)
966 966 self.radarControllerHeaderObj.write(self.fp)
967 967 self.processingHeaderObj.write(self.fp)
968 968
969 969 self.dtype = self.dataOut.dtype
970 970
971 971 def __setNewBlock(self):
972 972 """
973 973 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
974 974
975 975 Return:
976 976 0 : si no pudo escribir nada
977 977 1 : Si escribio el Basic el First Header
978 978 """
979 979 if self.fp == None:
980 980 self.setNextFile()
981 981
982 982 if self.flagIsNewFile:
983 983 return 1
984 984
985 985 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
986 986 self.basicHeaderObj.write(self.fp)
987 987 return 1
988 988
989 989 if not( self.setNextFile() ):
990 990 return 0
991 991
992 992 return 1
993 993
994 994
995 995 def writeNextBlock(self):
996 996 """
997 997 Selecciona el bloque siguiente de datos y los escribe en un file
998 998
999 999 Return:
1000 1000 0 : Si no hizo pudo escribir el bloque de datos
1001 1001 1 : Si no pudo escribir el bloque de datos
1002 1002 """
1003 1003 if not( self.__setNewBlock() ):
1004 1004 return 0
1005 1005
1006 1006 self.writeBlock()
1007 1007
1008 1008 return 1
1009 1009
1010 1010 def setNextFile(self):
1011 1011 """
1012 1012 Determina el siguiente file que sera escrito
1013 1013
1014 1014 Affected:
1015 1015 self.filename
1016 1016 self.subfolder
1017 1017 self.fp
1018 1018 self.setFile
1019 1019 self.flagIsNewFile
1020 1020
1021 1021 Return:
1022 1022 0 : Si el archivo no puede ser escrito
1023 1023 1 : Si el archivo esta listo para ser escrito
1024 1024 """
1025 1025 ext = self.ext
1026 1026 path = self.path
1027 1027
1028 1028 if self.fp != None:
1029 1029 self.fp.close()
1030 1030
1031 1031 timeTuple = time.localtime( self.dataOut.dataUtcTime)
1032 1032 subfolder = 'D%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1033 1033
1034 1034 fullpath = os.path.join( path, subfolder )
1035 1035 if not( os.path.exists(fullpath) ):
1036 1036 os.mkdir(fullpath)
1037 1037 self.setFile = -1 #inicializo mi contador de seteo
1038 1038 else:
1039 1039 filesList = os.listdir( fullpath )
1040 1040 if len( filesList ) > 0:
1041 1041 filesList = sorted( filesList, key=str.lower )
1042 1042 filen = filesList[-1]
1043 1043 # el filename debera tener el siguiente formato
1044 1044 # 0 1234 567 89A BCDE (hex)
1045 1045 # x YYYY DDD SSS .ext
1046 1046 if isNumber( filen[8:11] ):
1047 1047 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1048 1048 else:
1049 1049 self.setFile = -1
1050 1050 else:
1051 1051 self.setFile = -1 #inicializo mi contador de seteo
1052 1052
1053 1053 setFile = self.setFile
1054 1054 setFile += 1
1055 1055
1056 1056 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
1057 1057 timeTuple.tm_year,
1058 1058 timeTuple.tm_yday,
1059 1059 setFile,
1060 1060 ext )
1061 1061
1062 1062 filename = os.path.join( path, subfolder, file )
1063 1063
1064 1064 fp = open( filename,'wb' )
1065 1065
1066 1066 self.blockIndex = 0
1067 1067
1068 1068 #guardando atributos
1069 1069 self.filename = filename
1070 1070 self.subfolder = subfolder
1071 1071 self.fp = fp
1072 1072 self.setFile = setFile
1073 1073 self.flagIsNewFile = 1
1074 1074
1075 1075 self.getDataHeader()
1076 1076
1077 1077 print 'Writing the file: %s'%self.filename
1078 1078
1079 1079 self.__writeFirstHeader()
1080 1080
1081 1081 return 1
1082 1082
1083 1083 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=None, set=0, ext=None):
1084 1084 """
1085 1085 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1086 1086
1087 1087 Inputs:
1088 1088 path : el path destino en el cual se escribiran los files a crear
1089 1089 format : formato en el cual sera salvado un file
1090 1090 set : el setebo del file
1091 1091
1092 1092 Return:
1093 1093 0 : Si no realizo un buen seteo
1094 1094 1 : Si realizo un buen seteo
1095 1095 """
1096 1096
1097 1097 if ext == None:
1098 1098 ext = self.ext
1099 1099
1100 1100 ext = ext.lower()
1101 1101
1102 1102 self.ext = ext
1103 1103
1104 1104 self.path = path
1105 1105
1106 1106 self.setFile = set - 1
1107 1107
1108 1108 self.blocksPerFile = blocksPerFile
1109 1109
1110 1110 self.profilesPerBlock = profilesPerBlock
1111 1111
1112 1112 self.dataOut = dataOut
1113 1113
1114 1114 if not(self.setNextFile()):
1115 1115 print "There isn't a next file"
1116 1116 return 0
1117 1117
1118 1118 self.setBlockDimension()
1119 1119
1120 1120 return 1
1121 1121
1122 1122 def run(self, dataOut, **kwargs):
1123 1123
1124 1124 if not(self.isConfig):
1125 1125
1126 1126 self.setup(dataOut, **kwargs)
1127 1127 self.isConfig = True
1128 1128
1129 1129 self.putData()
1130 1130
1131 1131 class VoltageReader(JRODataReader):
1132 1132 """
1133 1133 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
1134 1134 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
1135 1135 perfiles*alturas*canales) son almacenados en la variable "buffer".
1136 1136
1137 1137 perfiles * alturas * canales
1138 1138
1139 1139 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1140 1140 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
1141 1141 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
1142 1142 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1143 1143
1144 1144 Example:
1145 1145
1146 1146 dpath = "/home/myuser/data"
1147 1147
1148 1148 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1149 1149
1150 1150 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1151 1151
1152 1152 readerObj = VoltageReader()
1153 1153
1154 1154 readerObj.setup(dpath, startTime, endTime)
1155 1155
1156 1156 while(True):
1157 1157
1158 1158 #to get one profile
1159 1159 profile = readerObj.getData()
1160 1160
1161 1161 #print the profile
1162 1162 print profile
1163 1163
1164 1164 #If you want to see all datablock
1165 1165 print readerObj.datablock
1166 1166
1167 1167 if readerObj.flagNoMoreFiles:
1168 1168 break
1169 1169
1170 1170 """
1171 1171
1172 1172 ext = ".r"
1173 1173
1174 1174 optchar = "D"
1175 1175 dataOut = None
1176 1176
1177 1177
1178 1178 def __init__(self):
1179 1179 """
1180 1180 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
1181 1181
1182 1182 Input:
1183 1183 dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para
1184 1184 almacenar un perfil de datos cada vez que se haga un requerimiento
1185 1185 (getData). El perfil sera obtenido a partir del buffer de datos,
1186 1186 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1187 1187 bloque de datos.
1188 1188 Si este parametro no es pasado se creara uno internamente.
1189 1189
1190 1190 Variables afectadas:
1191 1191 self.dataOut
1192 1192
1193 1193 Return:
1194 1194 None
1195 1195 """
1196 1196
1197 1197 self.isConfig = False
1198 1198
1199 1199 self.datablock = None
1200 1200
1201 1201 self.utc = 0
1202 1202
1203 1203 self.ext = ".r"
1204 1204
1205 1205 self.optchar = "D"
1206 1206
1207 1207 self.basicHeaderObj = BasicHeader()
1208 1208
1209 1209 self.systemHeaderObj = SystemHeader()
1210 1210
1211 1211 self.radarControllerHeaderObj = RadarControllerHeader()
1212 1212
1213 1213 self.processingHeaderObj = ProcessingHeader()
1214 1214
1215 1215 self.online = 0
1216 1216
1217 1217 self.fp = None
1218 1218
1219 1219 self.idFile = None
1220 1220
1221 1221 self.dtype = None
1222 1222
1223 1223 self.fileSizeByHeader = None
1224 1224
1225 1225 self.filenameList = []
1226 1226
1227 1227 self.filename = None
1228 1228
1229 1229 self.fileSize = None
1230 1230
1231 1231 self.firstHeaderSize = 0
1232 1232
1233 1233 self.basicHeaderSize = 24
1234 1234
1235 1235 self.pathList = []
1236 1236
1237 1237 self.filenameList = []
1238 1238
1239 1239 self.lastUTTime = 0
1240 1240
1241 1241 self.maxTimeStep = 30
1242 1242
1243 1243 self.flagNoMoreFiles = 0
1244 1244
1245 1245 self.set = 0
1246 1246
1247 1247 self.path = None
1248 1248
1249 1249 self.profileIndex = 9999
1250 1250
1251 1251 self.delay = 3 #seconds
1252 1252
1253 1253 self.nTries = 3 #quantity tries
1254 1254
1255 1255 self.nFiles = 3 #number of files for searching
1256 1256
1257 1257 self.nReadBlocks = 0
1258 1258
1259 1259 self.flagIsNewFile = 1
1260 1260
1261 1261 self.ippSeconds = 0
1262 1262
1263 1263 self.flagTimeBlock = 0
1264 1264
1265 1265 self.flagIsNewBlock = 0
1266 1266
1267 1267 self.nTotalBlocks = 0
1268 1268
1269 1269 self.blocksize = 0
1270 1270
1271 1271 self.dataOut = self.createObjByDefault()
1272 1272
1273 1273 def createObjByDefault(self):
1274 1274
1275 1275 dataObj = Voltage()
1276 1276
1277 1277 return dataObj
1278 1278
1279 1279 def __hasNotDataInBuffer(self):
1280 1280 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1281 1281 return 1
1282 1282 return 0
1283 1283
1284 1284
1285 1285 def getBlockDimension(self):
1286 1286 """
1287 1287 Obtiene la cantidad de puntos a leer por cada bloque de datos
1288 1288
1289 1289 Affected:
1290 1290 self.blocksize
1291 1291
1292 1292 Return:
1293 1293 None
1294 1294 """
1295 1295 pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
1296 1296 self.blocksize = pts2read
1297 1297
1298 1298
1299 1299 def readBlock(self):
1300 1300 """
1301 1301 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
1302 1302 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1303 1303 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1304 1304 es seteado a 0
1305 1305
1306 1306 Inputs:
1307 1307 None
1308 1308
1309 1309 Return:
1310 1310 None
1311 1311
1312 1312 Affected:
1313 1313 self.profileIndex
1314 1314 self.datablock
1315 1315 self.flagIsNewFile
1316 1316 self.flagIsNewBlock
1317 1317 self.nTotalBlocks
1318 1318
1319 1319 Exceptions:
1320 1320 Si un bloque leido no es un bloque valido
1321 1321 """
1322 1322
1323 1323 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
1324 1324
1325 1325 try:
1326 1326 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
1327 1327 except:
1328 1328 print "The read block (%3d) has not enough data" %self.nReadBlocks
1329 1329 return 0
1330 1330
1331 1331 junk = numpy.transpose(junk, (2,0,1))
1332 1332 self.datablock = junk['real'] + junk['imag']*1j
1333 1333
1334 1334 self.profileIndex = 0
1335 1335
1336 1336 self.flagIsNewFile = 0
1337 1337 self.flagIsNewBlock = 1
1338 1338
1339 1339 self.nTotalBlocks += 1
1340 1340 self.nReadBlocks += 1
1341 1341
1342 1342 return 1
1343 1343
1344 1344
1345 1345 def getData(self):
1346 1346 """
1347 1347 getData obtiene una unidad de datos del buffer de lectura y la copia a la clase "Voltage"
1348 1348 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
1349 1349 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
1350 1350
1351 1351 Ademas incrementa el contador del buffer en 1.
1352 1352
1353 1353 Return:
1354 1354 data : retorna un perfil de voltages (alturas * canales) copiados desde el
1355 1355 buffer. Si no hay mas archivos a leer retorna None.
1356 1356
1357 1357 Variables afectadas:
1358 1358 self.dataOut
1359 1359 self.profileIndex
1360 1360
1361 1361 Affected:
1362 1362 self.dataOut
1363 1363 self.profileIndex
1364 1364 self.flagTimeBlock
1365 1365 self.flagIsNewBlock
1366 1366 """
1367 1367
1368 1368 if self.flagNoMoreFiles:
1369 1369 self.dataOut.flagNoData = True
1370 1370 print 'Process finished'
1371 1371 return 0
1372 1372
1373 1373 self.flagTimeBlock = 0
1374 1374 self.flagIsNewBlock = 0
1375 1375
1376 1376 if self.__hasNotDataInBuffer():
1377 1377
1378 1378 if not( self.readNextBlock() ):
1379 1379 return 0
1380 1380
1381 1381 self.dataOut.dtype = self.dtype
1382 1382
1383 1383 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
1384 1384
1385 1385 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
1386 1386
1387 1387 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
1388 1388
1389 1389 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
1390 1390
1391 1391 self.dataOut.flagTimeBlock = self.flagTimeBlock
1392 1392
1393 1393 self.dataOut.ippSeconds = self.ippSeconds
1394 1394
1395 1395 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt
1396 1396
1397 1397 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
1398 1398
1399 1399 self.dataOut.flagShiftFFT = False
1400 1400
1401 1401 if self.processingHeaderObj.code != None:
1402
1402 1403 self.dataOut.nCode = self.processingHeaderObj.nCode
1403 1404
1404 1405 self.dataOut.nBaud = self.processingHeaderObj.nBaud
1405 1406
1406 1407 self.dataOut.code = self.processingHeaderObj.code
1407 1408
1408 1409 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
1409 1410
1410 1411 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
1412
1413 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
1414
1415 self.dataOut.flagDeflipData = False #asumo q la data no esta sin flip
1416
1417 self.dataOut.flagShiftFFT = False
1418
1411 1419
1412 1420 # self.updateDataHeader()
1413 1421
1414 1422 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
1415 1423
1416 1424 if self.datablock == None:
1417 1425 self.dataOut.flagNoData = True
1418 1426 return 0
1419 1427
1420 1428 self.dataOut.data = self.datablock[:,self.profileIndex,:]
1421 1429
1422 1430 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.ippSeconds
1423 1431
1424 1432 self.profileIndex += 1
1425 1433
1426 1434 self.dataOut.flagNoData = False
1427 1435
1428 1436 # print self.profileIndex, self.dataOut.utctime
1429 1437 # if self.profileIndex == 800:
1430 1438 # a=1
1431 1439
1432 1440
1433 1441 return self.dataOut.data
1434 1442
1435 1443
1436 1444 class VoltageWriter(JRODataWriter):
1437 1445 """
1438 1446 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
1439 1447 de los datos siempre se realiza por bloques.
1440 1448 """
1441 1449
1442 1450 ext = ".r"
1443 1451
1444 1452 optchar = "D"
1445 1453
1446 1454 shapeBuffer = None
1447 1455
1448 1456
1449 1457 def __init__(self):
1450 1458 """
1451 1459 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
1452 1460
1453 1461 Affected:
1454 1462 self.dataOut
1455 1463
1456 1464 Return: None
1457 1465 """
1458 1466
1459 1467 self.nTotalBlocks = 0
1460 1468
1461 1469 self.profileIndex = 0
1462 1470
1463 1471 self.isConfig = False
1464 1472
1465 1473 self.fp = None
1466 1474
1467 1475 self.flagIsNewFile = 1
1468 1476
1469 1477 self.nTotalBlocks = 0
1470 1478
1471 1479 self.flagIsNewBlock = 0
1472 1480
1473 1481 self.setFile = None
1474 1482
1475 1483 self.dtype = None
1476 1484
1477 1485 self.path = None
1478 1486
1479 1487 self.filename = None
1480 1488
1481 1489 self.basicHeaderObj = BasicHeader()
1482 1490
1483 1491 self.systemHeaderObj = SystemHeader()
1484 1492
1485 1493 self.radarControllerHeaderObj = RadarControllerHeader()
1486 1494
1487 1495 self.processingHeaderObj = ProcessingHeader()
1488 1496
1489 1497 def hasAllDataInBuffer(self):
1490 1498 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1491 1499 return 1
1492 1500 return 0
1493 1501
1494 1502
1495 1503 def setBlockDimension(self):
1496 1504 """
1497 1505 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
1498 1506
1499 1507 Affected:
1500 1508 self.shape_spc_Buffer
1501 1509 self.shape_cspc_Buffer
1502 1510 self.shape_dc_Buffer
1503 1511
1504 1512 Return: None
1505 1513 """
1506 1514 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
1507 1515 self.processingHeaderObj.nHeights,
1508 1516 self.systemHeaderObj.nChannels)
1509 1517
1510 1518 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
1511 1519 self.processingHeaderObj.profilesPerBlock,
1512 1520 self.processingHeaderObj.nHeights),
1513 1521 dtype=numpy.dtype('complex'))
1514 1522
1515 1523
1516 1524 def writeBlock(self):
1517 1525 """
1518 1526 Escribe el buffer en el file designado
1519 1527
1520 1528 Affected:
1521 1529 self.profileIndex
1522 1530 self.flagIsNewFile
1523 1531 self.flagIsNewBlock
1524 1532 self.nTotalBlocks
1525 1533 self.blockIndex
1526 1534
1527 1535 Return: None
1528 1536 """
1529 1537 data = numpy.zeros( self.shapeBuffer, self.dtype )
1530 1538
1531 1539 junk = numpy.transpose(self.datablock, (1,2,0))
1532 1540
1533 1541 data['real'] = junk.real
1534 1542 data['imag'] = junk.imag
1535 1543
1536 1544 data = data.reshape( (-1) )
1537 1545
1538 1546 data.tofile( self.fp )
1539 1547
1540 1548 self.datablock.fill(0)
1541 1549
1542 1550 self.profileIndex = 0
1543 1551 self.flagIsNewFile = 0
1544 1552 self.flagIsNewBlock = 1
1545 1553
1546 1554 self.blockIndex += 1
1547 1555 self.nTotalBlocks += 1
1548 1556
1549 1557 def putData(self):
1550 1558 """
1551 1559 Setea un bloque de datos y luego los escribe en un file
1552 1560
1553 1561 Affected:
1554 1562 self.flagIsNewBlock
1555 1563 self.profileIndex
1556 1564
1557 1565 Return:
1558 1566 0 : Si no hay data o no hay mas files que puedan escribirse
1559 1567 1 : Si se escribio la data de un bloque en un file
1560 1568 """
1561 1569 if self.dataOut.flagNoData:
1562 1570 return 0
1563 1571
1564 1572 self.flagIsNewBlock = 0
1565 1573
1566 1574 if self.dataOut.flagTimeBlock:
1567 1575
1568 1576 self.datablock.fill(0)
1569 1577 self.profileIndex = 0
1570 1578 self.setNextFile()
1571 1579
1572 1580 if self.profileIndex == 0:
1573 1581 self.getBasicHeader()
1574 1582
1575 1583 self.datablock[:,self.profileIndex,:] = self.dataOut.data
1576 1584
1577 1585 self.profileIndex += 1
1578 1586
1579 1587 if self.hasAllDataInBuffer():
1580 1588 #if self.flagIsNewFile:
1581 1589 self.writeNextBlock()
1582 1590 # self.getDataHeader()
1583 1591
1584 1592 return 1
1585 1593
1586 1594 def __getProcessFlags(self):
1587 1595
1588 1596 processFlags = 0
1589 1597
1590 1598 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1591 1599 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1592 1600 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1593 1601 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1594 1602 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1595 1603 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1596 1604
1597 1605 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1598 1606
1599 1607
1600 1608
1601 1609 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
1602 1610 PROCFLAG.DATATYPE_SHORT,
1603 1611 PROCFLAG.DATATYPE_LONG,
1604 1612 PROCFLAG.DATATYPE_INT64,
1605 1613 PROCFLAG.DATATYPE_FLOAT,
1606 1614 PROCFLAG.DATATYPE_DOUBLE]
1607 1615
1608 1616
1609 1617 for index in range(len(dtypeList)):
1610 1618 if self.dataOut.dtype == dtypeList[index]:
1611 1619 dtypeValue = datatypeValueList[index]
1612 1620 break
1613 1621
1614 1622 processFlags += dtypeValue
1615 1623
1616 1624 if self.dataOut.flagDecodeData:
1617 1625 processFlags += PROCFLAG.DECODE_DATA
1618 1626
1619 1627 if self.dataOut.flagDeflipData:
1620 1628 processFlags += PROCFLAG.DEFLIP_DATA
1621 1629
1622 1630 if self.dataOut.code != None:
1623 1631 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1624 1632
1625 1633 if self.dataOut.nCohInt > 1:
1626 1634 processFlags += PROCFLAG.COHERENT_INTEGRATION
1627 1635
1628 1636 return processFlags
1629 1637
1630 1638
1631 1639 def __getBlockSize(self):
1632 1640 '''
1633 1641 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
1634 1642 '''
1635 1643
1636 1644 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1637 1645 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1638 1646 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1639 1647 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1640 1648 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1641 1649 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1642 1650
1643 1651 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1644 1652 datatypeValueList = [1,2,4,8,4,8]
1645 1653 for index in range(len(dtypeList)):
1646 1654 if self.dataOut.dtype == dtypeList[index]:
1647 1655 datatypeValue = datatypeValueList[index]
1648 1656 break
1649 1657
1650 1658 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels * self.dataOut.nProfiles * datatypeValue * 2)
1651 1659
1652 1660 return blocksize
1653 1661
1654 1662 def getDataHeader(self):
1655 1663
1656 1664 """
1657 1665 Obtiene una copia del First Header
1658 1666
1659 1667 Affected:
1660 1668 self.systemHeaderObj
1661 1669 self.radarControllerHeaderObj
1662 1670 self.dtype
1663 1671
1664 1672 Return:
1665 1673 None
1666 1674 """
1667 1675
1668 1676 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
1669 1677 self.systemHeaderObj.nChannels = self.dataOut.nChannels
1670 1678 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
1671 1679
1672 1680 self.getBasicHeader()
1673 1681
1674 1682 processingHeaderSize = 40 # bytes
1675 1683 self.processingHeaderObj.dtype = 0 # Voltage
1676 1684 self.processingHeaderObj.blockSize = self.__getBlockSize()
1677 1685 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
1678 1686 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
1679 1687 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
1680 1688 self.processingHeaderObj.processFlags = self.__getProcessFlags()
1681 1689 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
1682 1690 self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage
1683 1691 self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage
1684 1692
1685 1693 if self.dataOut.code != None:
1686 1694 self.processingHeaderObj.code = self.dataOut.code
1687 1695 self.processingHeaderObj.nCode = self.dataOut.nCode
1688 1696 self.processingHeaderObj.nBaud = self.dataOut.nBaud
1689 1697 codesize = int(8 + 4 * self.dataOut.nCode * self.dataOut.nBaud)
1690 1698 processingHeaderSize += codesize
1691 1699
1692 1700 if self.processingHeaderObj.nWindows != 0:
1693 1701 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
1694 1702 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
1695 1703 self.processingHeaderObj.nHeights = self.dataOut.nHeights
1696 1704 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
1697 1705 processingHeaderSize += 12
1698 1706
1699 1707 self.processingHeaderObj.size = processingHeaderSize
1700 1708
1701 1709 class SpectraReader(JRODataReader):
1702 1710 """
1703 1711 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
1704 1712 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
1705 1713 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
1706 1714
1707 1715 paresCanalesIguales * alturas * perfiles (Self Spectra)
1708 1716 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
1709 1717 canales * alturas (DC Channels)
1710 1718
1711 1719 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1712 1720 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
1713 1721 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
1714 1722 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1715 1723
1716 1724 Example:
1717 1725 dpath = "/home/myuser/data"
1718 1726
1719 1727 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1720 1728
1721 1729 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1722 1730
1723 1731 readerObj = SpectraReader()
1724 1732
1725 1733 readerObj.setup(dpath, startTime, endTime)
1726 1734
1727 1735 while(True):
1728 1736
1729 1737 readerObj.getData()
1730 1738
1731 1739 print readerObj.data_spc
1732 1740
1733 1741 print readerObj.data_cspc
1734 1742
1735 1743 print readerObj.data_dc
1736 1744
1737 1745 if readerObj.flagNoMoreFiles:
1738 1746 break
1739 1747
1740 1748 """
1741 1749
1742 1750 pts2read_SelfSpectra = 0
1743 1751
1744 1752 pts2read_CrossSpectra = 0
1745 1753
1746 1754 pts2read_DCchannels = 0
1747 1755
1748 1756 ext = ".pdata"
1749 1757
1750 1758 optchar = "P"
1751 1759
1752 1760 dataOut = None
1753 1761
1754 1762 nRdChannels = None
1755 1763
1756 1764 nRdPairs = None
1757 1765
1758 1766 rdPairList = []
1759 1767
1760 1768
1761 1769 def __init__(self):
1762 1770 """
1763 1771 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
1764 1772
1765 1773 Inputs:
1766 1774 dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para
1767 1775 almacenar un perfil de datos cada vez que se haga un requerimiento
1768 1776 (getData). El perfil sera obtenido a partir del buffer de datos,
1769 1777 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1770 1778 bloque de datos.
1771 1779 Si este parametro no es pasado se creara uno internamente.
1772 1780
1773 1781 Affected:
1774 1782 self.dataOut
1775 1783
1776 1784 Return : None
1777 1785 """
1778 1786
1779 1787 self.isConfig = False
1780 1788
1781 1789 self.pts2read_SelfSpectra = 0
1782 1790
1783 1791 self.pts2read_CrossSpectra = 0
1784 1792
1785 1793 self.pts2read_DCchannels = 0
1786 1794
1787 1795 self.datablock = None
1788 1796
1789 1797 self.utc = None
1790 1798
1791 1799 self.ext = ".pdata"
1792 1800
1793 1801 self.optchar = "P"
1794 1802
1795 1803 self.basicHeaderObj = BasicHeader()
1796 1804
1797 1805 self.systemHeaderObj = SystemHeader()
1798 1806
1799 1807 self.radarControllerHeaderObj = RadarControllerHeader()
1800 1808
1801 1809 self.processingHeaderObj = ProcessingHeader()
1802 1810
1803 1811 self.online = 0
1804 1812
1805 1813 self.fp = None
1806 1814
1807 1815 self.idFile = None
1808 1816
1809 1817 self.dtype = None
1810 1818
1811 1819 self.fileSizeByHeader = None
1812 1820
1813 1821 self.filenameList = []
1814 1822
1815 1823 self.filename = None
1816 1824
1817 1825 self.fileSize = None
1818 1826
1819 1827 self.firstHeaderSize = 0
1820 1828
1821 1829 self.basicHeaderSize = 24
1822 1830
1823 1831 self.pathList = []
1824 1832
1825 1833 self.lastUTTime = 0
1826 1834
1827 1835 self.maxTimeStep = 30
1828 1836
1829 1837 self.flagNoMoreFiles = 0
1830 1838
1831 1839 self.set = 0
1832 1840
1833 1841 self.path = None
1834 1842
1835 1843 self.delay = 3 #seconds
1836 1844
1837 1845 self.nTries = 3 #quantity tries
1838 1846
1839 1847 self.nFiles = 3 #number of files for searching
1840 1848
1841 1849 self.nReadBlocks = 0
1842 1850
1843 1851 self.flagIsNewFile = 1
1844 1852
1845 1853 self.ippSeconds = 0
1846 1854
1847 1855 self.flagTimeBlock = 0
1848 1856
1849 1857 self.flagIsNewBlock = 0
1850 1858
1851 1859 self.nTotalBlocks = 0
1852 1860
1853 1861 self.blocksize = 0
1854 1862
1855 1863 self.dataOut = self.createObjByDefault()
1856 1864
1857 1865
1858 1866 def createObjByDefault(self):
1859 1867
1860 1868 dataObj = Spectra()
1861 1869
1862 1870 return dataObj
1863 1871
1864 1872 def __hasNotDataInBuffer(self):
1865 1873 return 1
1866 1874
1867 1875
1868 1876 def getBlockDimension(self):
1869 1877 """
1870 1878 Obtiene la cantidad de puntos a leer por cada bloque de datos
1871 1879
1872 1880 Affected:
1873 1881 self.nRdChannels
1874 1882 self.nRdPairs
1875 1883 self.pts2read_SelfSpectra
1876 1884 self.pts2read_CrossSpectra
1877 1885 self.pts2read_DCchannels
1878 1886 self.blocksize
1879 1887 self.dataOut.nChannels
1880 1888 self.dataOut.nPairs
1881 1889
1882 1890 Return:
1883 1891 None
1884 1892 """
1885 1893 self.nRdChannels = 0
1886 1894 self.nRdPairs = 0
1887 1895 self.rdPairList = []
1888 1896
1889 1897 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
1890 1898 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
1891 1899 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
1892 1900 else:
1893 1901 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
1894 1902 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
1895 1903
1896 1904 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
1897 1905
1898 1906 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
1899 1907 self.blocksize = self.pts2read_SelfSpectra
1900 1908
1901 1909 if self.processingHeaderObj.flag_cspc:
1902 1910 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
1903 1911 self.blocksize += self.pts2read_CrossSpectra
1904 1912
1905 1913 if self.processingHeaderObj.flag_dc:
1906 1914 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
1907 1915 self.blocksize += self.pts2read_DCchannels
1908 1916
1909 1917 # self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels
1910 1918
1911 1919
1912 1920 def readBlock(self):
1913 1921 """
1914 1922 Lee el bloque de datos desde la posicion actual del puntero del archivo
1915 1923 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1916 1924 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1917 1925 es seteado a 0
1918 1926
1919 1927 Return: None
1920 1928
1921 1929 Variables afectadas:
1922 1930
1923 1931 self.flagIsNewFile
1924 1932 self.flagIsNewBlock
1925 1933 self.nTotalBlocks
1926 1934 self.data_spc
1927 1935 self.data_cspc
1928 1936 self.data_dc
1929 1937
1930 1938 Exceptions:
1931 1939 Si un bloque leido no es un bloque valido
1932 1940 """
1933 1941 blockOk_flag = False
1934 1942 fpointer = self.fp.tell()
1935 1943
1936 1944 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
1937 1945 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
1938 1946
1939 1947 if self.processingHeaderObj.flag_cspc:
1940 1948 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
1941 1949 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
1942 1950
1943 1951 if self.processingHeaderObj.flag_dc:
1944 1952 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
1945 1953 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
1946 1954
1947 1955
1948 1956 if not(self.processingHeaderObj.shif_fft):
1949 1957 #desplaza a la derecha en el eje 2 determinadas posiciones
1950 1958 shift = int(self.processingHeaderObj.profilesPerBlock/2)
1951 1959 spc = numpy.roll( spc, shift , axis=2 )
1952 1960
1953 1961 if self.processingHeaderObj.flag_cspc:
1954 1962 #desplaza a la derecha en el eje 2 determinadas posiciones
1955 1963 cspc = numpy.roll( cspc, shift, axis=2 )
1956 1964
1957 1965
1958 1966 spc = numpy.transpose( spc, (0,2,1) )
1959 1967 self.data_spc = spc
1960 1968
1961 1969 if self.processingHeaderObj.flag_cspc:
1962 1970 cspc = numpy.transpose( cspc, (0,2,1) )
1963 1971 self.data_cspc = cspc['real'] + cspc['imag']*1j
1964 1972 else:
1965 1973 self.data_cspc = None
1966 1974
1967 1975 if self.processingHeaderObj.flag_dc:
1968 1976 self.data_dc = dc['real'] + dc['imag']*1j
1969 1977 else:
1970 1978 self.data_dc = None
1971 1979
1972 1980 self.flagIsNewFile = 0
1973 1981 self.flagIsNewBlock = 1
1974 1982
1975 1983 self.nTotalBlocks += 1
1976 1984 self.nReadBlocks += 1
1977 1985
1978 1986 return 1
1979 1987
1980 1988
1981 1989 def getData(self):
1982 1990 """
1983 1991 Copia el buffer de lectura a la clase "Spectra",
1984 1992 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
1985 1993 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
1986 1994
1987 1995 Return:
1988 1996 0 : Si no hay mas archivos disponibles
1989 1997 1 : Si hizo una buena copia del buffer
1990 1998
1991 1999 Affected:
1992 2000 self.dataOut
1993 2001
1994 2002 self.flagTimeBlock
1995 2003 self.flagIsNewBlock
1996 2004 """
1997 2005
1998 2006 if self.flagNoMoreFiles:
1999 2007 self.dataOut.flagNoData = True
2000 2008 print 'Process finished'
2001 2009 return 0
2002 2010
2003 2011 self.flagTimeBlock = 0
2004 2012 self.flagIsNewBlock = 0
2005 2013
2006 2014 if self.__hasNotDataInBuffer():
2007 2015
2008 2016 if not( self.readNextBlock() ):
2009 2017 self.dataOut.flagNoData = True
2010 2018 return 0
2011 2019
2012 2020 # self.updateDataHeader()
2013 2021
2014 2022 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
2015 2023
2016 2024 if self.data_dc == None:
2017 2025 self.dataOut.flagNoData = True
2018 2026 return 0
2019 2027
2020 2028 self.dataOut.data_spc = self.data_spc
2021 2029
2022 2030 self.dataOut.data_cspc = self.data_cspc
2023 2031
2024 2032 self.dataOut.data_dc = self.data_dc
2025 2033
2026 2034 self.dataOut.flagTimeBlock = self.flagTimeBlock
2027 2035
2028 2036 self.dataOut.flagNoData = False
2029 2037
2030 2038 self.dataOut.dtype = self.dtype
2031 2039
2032 2040 # self.dataOut.nChannels = self.nRdChannels
2033 2041
2034 2042 self.dataOut.nPairs = self.nRdPairs
2035 2043
2036 2044 self.dataOut.pairsList = self.rdPairList
2037 2045
2038 2046 # self.dataOut.nHeights = self.processingHeaderObj.nHeights
2039 2047
2040 2048 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
2041 2049
2042 2050 self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
2043 2051
2044 2052 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
2045 2053
2046 2054 self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
2047 2055
2048 2056 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
2049 2057
2050 2058 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
2051 2059
2052 2060 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
2053 2061
2054 2062 # self.dataOut.channelIndexList = range(self.systemHeaderObj.nChannels)
2055 2063
2056 2064 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000.#+ self.profileIndex * self.ippSeconds
2057 2065
2058 2066 self.dataOut.ippSeconds = self.ippSeconds
2059 2067
2060 2068 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt * self.processingHeaderObj.nIncohInt * self.dataOut.nFFTPoints
2061 2069
2062 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
2063
2064 2070 # self.profileIndex += 1
2065 2071
2066 2072 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
2067 2073
2068 2074 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
2075
2076 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
2077
2078 self.dataOut.flagDecodeData = True #asumo q la data no esta decodificada
2079
2080 self.dataOut.flagDeflipData = True #asumo q la data no esta sin flip
2081
2069 2082
2070 2083 return self.dataOut.data_spc
2071 2084
2072 2085
2073 2086 class SpectraWriter(JRODataWriter):
2074 2087
2075 2088 """
2076 2089 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
2077 2090 de los datos siempre se realiza por bloques.
2078 2091 """
2079 2092
2080 2093 ext = ".pdata"
2081 2094
2082 2095 optchar = "P"
2083 2096
2084 2097 shape_spc_Buffer = None
2085 2098
2086 2099 shape_cspc_Buffer = None
2087 2100
2088 2101 shape_dc_Buffer = None
2089 2102
2090 2103 data_spc = None
2091 2104
2092 2105 data_cspc = None
2093 2106
2094 2107 data_dc = None
2095 2108
2096 2109 # dataOut = None
2097 2110
2098 2111 def __init__(self):
2099 2112 """
2100 2113 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
2101 2114
2102 2115 Affected:
2103 2116 self.dataOut
2104 2117 self.basicHeaderObj
2105 2118 self.systemHeaderObj
2106 2119 self.radarControllerHeaderObj
2107 2120 self.processingHeaderObj
2108 2121
2109 2122 Return: None
2110 2123 """
2111 2124
2112 2125 self.isConfig = False
2113 2126
2114 2127 self.nTotalBlocks = 0
2115 2128
2116 2129 self.data_spc = None
2117 2130
2118 2131 self.data_cspc = None
2119 2132
2120 2133 self.data_dc = None
2121 2134
2122 2135 self.fp = None
2123 2136
2124 2137 self.flagIsNewFile = 1
2125 2138
2126 2139 self.nTotalBlocks = 0
2127 2140
2128 2141 self.flagIsNewBlock = 0
2129 2142
2130 2143 self.setFile = None
2131 2144
2132 2145 self.dtype = None
2133 2146
2134 2147 self.path = None
2135 2148
2136 2149 self.noMoreFiles = 0
2137 2150
2138 2151 self.filename = None
2139 2152
2140 2153 self.basicHeaderObj = BasicHeader()
2141 2154
2142 2155 self.systemHeaderObj = SystemHeader()
2143 2156
2144 2157 self.radarControllerHeaderObj = RadarControllerHeader()
2145 2158
2146 2159 self.processingHeaderObj = ProcessingHeader()
2147 2160
2148 2161
2149 2162 def hasAllDataInBuffer(self):
2150 2163 return 1
2151 2164
2152 2165
2153 2166 def setBlockDimension(self):
2154 2167 """
2155 2168 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
2156 2169
2157 2170 Affected:
2158 2171 self.shape_spc_Buffer
2159 2172 self.shape_cspc_Buffer
2160 2173 self.shape_dc_Buffer
2161 2174
2162 2175 Return: None
2163 2176 """
2164 2177 self.shape_spc_Buffer = (self.dataOut.nChannels,
2165 2178 self.processingHeaderObj.nHeights,
2166 2179 self.processingHeaderObj.profilesPerBlock)
2167 2180
2168 2181 self.shape_cspc_Buffer = (self.dataOut.nPairs,
2169 2182 self.processingHeaderObj.nHeights,
2170 2183 self.processingHeaderObj.profilesPerBlock)
2171 2184
2172 2185 self.shape_dc_Buffer = (self.dataOut.nChannels,
2173 2186 self.processingHeaderObj.nHeights)
2174 2187
2175 2188
2176 2189 def writeBlock(self):
2177 2190 """
2178 2191 Escribe el buffer en el file designado
2179 2192
2180 2193 Affected:
2181 2194 self.data_spc
2182 2195 self.data_cspc
2183 2196 self.data_dc
2184 2197 self.flagIsNewFile
2185 2198 self.flagIsNewBlock
2186 2199 self.nTotalBlocks
2187 2200 self.nWriteBlocks
2188 2201
2189 2202 Return: None
2190 2203 """
2191 2204
2192 2205 spc = numpy.transpose( self.data_spc, (0,2,1) )
2193 2206 if not( self.processingHeaderObj.shif_fft ):
2194 2207 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2195 2208 data = spc.reshape((-1))
2196 2209 data.tofile(self.fp)
2197 2210
2198 2211 if self.data_cspc != None:
2199 2212 data = numpy.zeros( self.shape_cspc_Buffer, self.dtype )
2200 2213 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
2201 2214 if not( self.processingHeaderObj.shif_fft ):
2202 2215 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2203 2216 data['real'] = cspc.real
2204 2217 data['imag'] = cspc.imag
2205 2218 data = data.reshape((-1))
2206 2219 data.tofile(self.fp)
2207 2220
2208 2221 if self.data_dc != None:
2209 2222 data = numpy.zeros( self.shape_dc_Buffer, self.dtype )
2210 2223 dc = self.data_dc
2211 2224 data['real'] = dc.real
2212 2225 data['imag'] = dc.imag
2213 2226 data = data.reshape((-1))
2214 2227 data.tofile(self.fp)
2215 2228
2216 2229 self.data_spc.fill(0)
2217 2230 self.data_dc.fill(0)
2218 2231 if self.data_cspc != None:
2219 2232 self.data_cspc.fill(0)
2220 2233
2221 2234 self.flagIsNewFile = 0
2222 2235 self.flagIsNewBlock = 1
2223 2236 self.nTotalBlocks += 1
2224 2237 self.nWriteBlocks += 1
2225 2238 self.blockIndex += 1
2226 2239
2227 2240
2228 2241 def putData(self):
2229 2242 """
2230 2243 Setea un bloque de datos y luego los escribe en un file
2231 2244
2232 2245 Affected:
2233 2246 self.data_spc
2234 2247 self.data_cspc
2235 2248 self.data_dc
2236 2249
2237 2250 Return:
2238 2251 0 : Si no hay data o no hay mas files que puedan escribirse
2239 2252 1 : Si se escribio la data de un bloque en un file
2240 2253 """
2241 2254
2242 2255 if self.dataOut.flagNoData:
2243 2256 return 0
2244 2257
2245 2258 self.flagIsNewBlock = 0
2246 2259
2247 2260 if self.dataOut.flagTimeBlock:
2248 2261 self.data_spc.fill(0)
2249 2262 self.data_cspc.fill(0)
2250 2263 self.data_dc.fill(0)
2251 2264 self.setNextFile()
2252 2265
2253 2266 if self.flagIsNewFile == 0:
2254 2267 self.getBasicHeader()
2255 2268
2256 2269 self.data_spc = self.dataOut.data_spc
2257 2270 self.data_cspc = self.dataOut.data_cspc
2258 2271 self.data_dc = self.dataOut.data_dc
2259 2272
2260 2273 # #self.processingHeaderObj.dataBlocksPerFile)
2261 2274 if self.hasAllDataInBuffer():
2262 2275 # self.getDataHeader()
2263 2276 self.writeNextBlock()
2264 2277
2265 2278 return 1
2266 2279
2267 2280
2268 2281 def __getProcessFlags(self):
2269 2282
2270 2283 processFlags = 0
2271 2284
2272 2285 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2273 2286 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2274 2287 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2275 2288 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2276 2289 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2277 2290 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2278 2291
2279 2292 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2280 2293
2281 2294
2282 2295
2283 2296 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
2284 2297 PROCFLAG.DATATYPE_SHORT,
2285 2298 PROCFLAG.DATATYPE_LONG,
2286 2299 PROCFLAG.DATATYPE_INT64,
2287 2300 PROCFLAG.DATATYPE_FLOAT,
2288 2301 PROCFLAG.DATATYPE_DOUBLE]
2289 2302
2290 2303
2291 2304 for index in range(len(dtypeList)):
2292 2305 if self.dataOut.dtype == dtypeList[index]:
2293 2306 dtypeValue = datatypeValueList[index]
2294 2307 break
2295 2308
2296 2309 processFlags += dtypeValue
2297 2310
2298 2311 if self.dataOut.flagDecodeData:
2299 2312 processFlags += PROCFLAG.DECODE_DATA
2300 2313
2301 2314 if self.dataOut.flagDeflipData:
2302 2315 processFlags += PROCFLAG.DEFLIP_DATA
2303 2316
2304 2317 if self.dataOut.code != None:
2305 2318 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
2306 2319
2307 2320 if self.dataOut.nIncohInt > 1:
2308 2321 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
2309 2322
2310 2323 if self.dataOut.data_dc != None:
2311 2324 processFlags += PROCFLAG.SAVE_CHANNELS_DC
2312 2325
2313 2326 return processFlags
2314 2327
2315 2328
2316 2329 def __getBlockSize(self):
2317 2330 '''
2318 2331 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
2319 2332 '''
2320 2333
2321 2334 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2322 2335 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2323 2336 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2324 2337 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2325 2338 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2326 2339 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2327 2340
2328 2341 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2329 2342 datatypeValueList = [1,2,4,8,4,8]
2330 2343 for index in range(len(dtypeList)):
2331 2344 if self.dataOut.dtype == dtypeList[index]:
2332 2345 datatypeValue = datatypeValueList[index]
2333 2346 break
2334 2347
2335 2348
2336 2349 pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints
2337 2350
2338 2351 pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write)
2339 2352 blocksize = (pts2write_SelfSpectra*datatypeValue)
2340 2353
2341 2354 if self.dataOut.data_cspc != None:
2342 2355 pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write)
2343 2356 blocksize += (pts2write_CrossSpectra*datatypeValue*2)
2344 2357
2345 2358 if self.dataOut.data_dc != None:
2346 2359 pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights)
2347 2360 blocksize += (pts2write_DCchannels*datatypeValue*2)
2348 2361
2349 2362 blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
2350 2363
2351 2364 return blocksize
2352 2365
2353 2366 def getDataHeader(self):
2354 2367
2355 2368 """
2356 2369 Obtiene una copia del First Header
2357 2370
2358 2371 Affected:
2359 2372 self.systemHeaderObj
2360 2373 self.radarControllerHeaderObj
2361 2374 self.dtype
2362 2375
2363 2376 Return:
2364 2377 None
2365 2378 """
2366 2379
2367 2380 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
2368 2381 self.systemHeaderObj.nChannels = self.dataOut.nChannels
2369 2382 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
2370 2383
2371 2384 self.getBasicHeader()
2372 2385
2373 2386 processingHeaderSize = 40 # bytes
2374 2387 self.processingHeaderObj.dtype = 0 # Voltage
2375 2388 self.processingHeaderObj.blockSize = self.__getBlockSize()
2376 2389 self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints
2377 2390 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
2378 2391 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
2379 2392 self.processingHeaderObj.processFlags = self.__getProcessFlags()
2380 2393 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval
2381 2394 self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt
2382 2395 self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
2383 2396
2384 2397 if self.processingHeaderObj.totalSpectra > 0:
2385 2398 channelList = []
2386 2399 for channel in range(self.dataOut.nChannels):
2387 2400 channelList.append(channel)
2388 2401 channelList.append(channel)
2389 2402
2390 2403 pairsList = []
2391 2404 for pair in self.dataOut.pairsList:
2392 2405 pairsList.append(pair[0])
2393 2406 pairsList.append(pair[1])
2394 2407 spectraComb = channelList + pairsList
2395 2408 spectraComb = numpy.array(spectraComb,dtype="u1")
2396 2409 self.processingHeaderObj.spectraComb = spectraComb
2397 2410 sizeOfSpcComb = len(spectraComb)
2398 2411 processingHeaderSize += sizeOfSpcComb
2399 2412
2400 2413 if self.dataOut.code != None:
2401 2414 self.processingHeaderObj.code = self.dataOut.code
2402 2415 self.processingHeaderObj.nCode = self.dataOut.nCode
2403 2416 self.processingHeaderObj.nBaud = self.dataOut.nBaud
2404 2417 nCodeSize = 4 # bytes
2405 2418 nBaudSize = 4 # bytes
2406 2419 codeSize = 4 # bytes
2407 2420 sizeOfCode = int(nCodeSize + nBaudSize + codeSize * self.dataOut.nCode * self.dataOut.nBaud)
2408 2421 processingHeaderSize += sizeOfCode
2409 2422
2410 2423 if self.processingHeaderObj.nWindows != 0:
2411 2424 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
2412 2425 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
2413 2426 self.processingHeaderObj.nHeights = self.dataOut.nHeights
2414 2427 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
2415 2428 sizeOfFirstHeight = 4
2416 2429 sizeOfdeltaHeight = 4
2417 2430 sizeOfnHeights = 4
2418 2431 sizeOfWindows = (sizeOfFirstHeight + sizeOfdeltaHeight + sizeOfnHeights)*self.processingHeaderObj.nWindows
2419 2432 processingHeaderSize += sizeOfWindows
2420 2433
2421 2434 self.processingHeaderObj.size = processingHeaderSize
2422 2435
2423 2436 class SpectraHeisWriter():
2424 2437
2425 2438 i=0
2426 2439
2427 2440 def __init__(self, dataOut):
2428 2441
2429 2442 self.wrObj = FITS()
2430 2443 self.dataOut = dataOut
2431 2444
2432 2445 def isNumber(str):
2433 2446 """
2434 2447 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
2435 2448
2436 2449 Excepciones:
2437 2450 Si un determinado string no puede ser convertido a numero
2438 2451 Input:
2439 2452 str, string al cual se le analiza para determinar si convertible a un numero o no
2440 2453
2441 2454 Return:
2442 2455 True : si el string es uno numerico
2443 2456 False : no es un string numerico
2444 2457 """
2445 2458 try:
2446 2459 float( str )
2447 2460 return True
2448 2461 except:
2449 2462 return False
2450 2463
2451 2464 def setup(self, wrpath,):
2452 2465
2453 2466 if not(os.path.exists(wrpath)):
2454 2467 os.mkdir(wrpath)
2455 2468
2456 2469 self.wrpath = wrpath
2457 2470 self.setFile = 0
2458 2471
2459 2472 def putData(self):
2460 2473 # self.wrObj.writeHeader(nChannels=self.dataOut.nChannels, nFFTPoints=self.dataOut.nFFTPoints)
2461 2474 #name = self.dataOut.utctime
2462 2475 name= time.localtime( self.dataOut.utctime)
2463 2476 ext=".fits"
2464 2477 #folder='D%4.4d%3.3d'%(name.tm_year,name.tm_yday)
2465 2478 subfolder = 'D%4.4d%3.3d' % (name.tm_year,name.tm_yday)
2466 2479
2467 2480 fullpath = os.path.join( self.wrpath, subfolder )
2468 2481 if not( os.path.exists(fullpath) ):
2469 2482 os.mkdir(fullpath)
2470 2483 self.setFile += 1
2471 2484 file = 'D%4.4d%3.3d%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext)
2472 2485
2473 2486 filename = os.path.join(self.wrpath,subfolder, file)
2474 2487
2475 2488 # print self.dataOut.ippSeconds
2476 2489 freq=numpy.arange(-1*self.dataOut.nHeights/2.,self.dataOut.nHeights/2.)/(2*self.dataOut.ippSeconds)
2477 2490
2478 2491 col1=self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints)+'E', array=freq)
2479 2492 col2=self.wrObj.writeData(name="P_Ch1",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[0,:]))
2480 2493 col3=self.wrObj.writeData(name="P_Ch2",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[1,:]))
2481 2494 col4=self.wrObj.writeData(name="P_Ch3",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[2,:]))
2482 2495 col5=self.wrObj.writeData(name="P_Ch4",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[3,:]))
2483 2496 col6=self.wrObj.writeData(name="P_Ch5",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[4,:]))
2484 2497 col7=self.wrObj.writeData(name="P_Ch6",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[5,:]))
2485 2498 col8=self.wrObj.writeData(name="P_Ch7",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[6,:]))
2486 2499 col9=self.wrObj.writeData(name="P_Ch8",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[7,:]))
2487 2500 #n=numpy.arange((100))
2488 2501 n=self.dataOut.data_spc[6,:]
2489 2502 a=self.wrObj.cFImage(n)
2490 2503 b=self.wrObj.Ctable(col1,col2,col3,col4,col5,col6,col7,col8,col9)
2491 2504 self.wrObj.CFile(a,b)
2492 2505 self.wrObj.wFile(filename)
2493 2506 return 1
2494 2507
2495 2508 class FITS:
2496 2509
2497 2510 name=None
2498 2511 format=None
2499 2512 array =None
2500 2513 data =None
2501 2514 thdulist=None
2502 2515
2503 2516 def __init__(self):
2504 2517
2505 2518 pass
2506 2519
2507 2520 def setColF(self,name,format,array):
2508 2521 self.name=name
2509 2522 self.format=format
2510 2523 self.array=array
2511 2524 a1=numpy.array([self.array],dtype=numpy.float32)
2512 2525 self.col1 = pyfits.Column(name=self.name, format=self.format, array=a1)
2513 2526 return self.col1
2514 2527
2515 2528 # def setColP(self,name,format,data):
2516 2529 # self.name=name
2517 2530 # self.format=format
2518 2531 # self.data=data
2519 2532 # a2=numpy.array([self.data],dtype=numpy.float32)
2520 2533 # self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2521 2534 # return self.col2
2522 2535
2523 2536 def writeHeader(self,):
2524 2537 pass
2525 2538
2526 2539 def writeData(self,name,format,data):
2527 2540 self.name=name
2528 2541 self.format=format
2529 2542 self.data=data
2530 2543 a2=numpy.array([self.data],dtype=numpy.float32)
2531 2544 self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2532 2545 return self.col2
2533 2546
2534 2547 def cFImage(self,n):
2535 2548 self.hdu= pyfits.PrimaryHDU(n)
2536 2549 return self.hdu
2537 2550
2538 2551 def Ctable(self,col1,col2,col3,col4,col5,col6,col7,col8,col9):
2539 2552 self.cols=pyfits.ColDefs( [col1,col2,col3,col4,col5,col6,col7,col8,col9])
2540 2553 self.tbhdu = pyfits.new_table(self.cols)
2541 2554 return self.tbhdu
2542 2555
2543 2556 def CFile(self,hdu,tbhdu):
2544 2557 self.thdulist=pyfits.HDUList([hdu,tbhdu])
2545 2558
2546 2559 def wFile(self,filename):
2547 2560 self.thdulist.writeto(filename) No newline at end of file
@@ -1,519 +1,519
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JROHeaderIO.py 151 2012-10-31 19:00:51Z murco $
5 5 '''
6 6 import sys
7 7 import numpy
8 8 import copy
9 9 import datetime
10 10
11 11 class Header:
12 12
13 13 def __init__(self):
14 14 raise
15 15
16 16 def copy(self):
17 17 return copy.deepcopy(self)
18 18
19 19 def read():
20 20 pass
21 21
22 22 def write():
23 23 pass
24 24
25 25 def printInfo(self):
26 26
27 27 for key in self.__dict__.keys():
28 28 print "%s = %s" %(key, self.__dict__[key])
29 29
30 30 class BasicHeader(Header):
31 31
32 32 size = None
33 33 version = None
34 34 dataBlock = None
35 35 utc = None
36 36 miliSecond = None
37 37 timeZone = None
38 38 dstFlag = None
39 39 errorCount = None
40 40 struct = None
41 41 datatime = None
42 42
43 43 def __init__(self):
44 44
45 45 self.size = 0
46 46 self.version = 0
47 47 self.dataBlock = 0
48 48 self.utc = 0
49 49 self.miliSecond = 0
50 50 self.timeZone = 0
51 51 self.dstFlag = 0
52 52 self.errorCount = 0
53 53 self.struct = numpy.dtype([
54 54 ('nSize','<u4'),
55 55 ('nVersion','<u2'),
56 56 ('nDataBlockId','<u4'),
57 57 ('nUtime','<u4'),
58 58 ('nMilsec','<u2'),
59 59 ('nTimezone','<i2'),
60 60 ('nDstflag','<i2'),
61 61 ('nErrorCount','<u4')
62 62 ])
63 63
64 64
65 65 def read(self, fp):
66 66 try:
67 67 header = numpy.fromfile(fp, self.struct,1)
68 68 self.size = int(header['nSize'][0])
69 69 self.version = int(header['nVersion'][0])
70 70 self.dataBlock = int(header['nDataBlockId'][0])
71 71 self.utc = int(header['nUtime'][0])
72 72 self.miliSecond = int(header['nMilsec'][0])
73 73 self.timeZone = int(header['nTimezone'][0])
74 74 self.dstFlag = int(header['nDstflag'][0])
75 75 self.errorCount = int(header['nErrorCount'][0])
76 76
77 77 self.datatime = datetime.datetime.utcfromtimestamp(self.utc)
78 78 except Exception, e:
79 79 print "BasicHeader: " + e
80 80 return 0
81 81
82 82 return 1
83 83
84 84 def write(self, fp):
85 85 headerTuple = (self.size,self.version,self.dataBlock,self.utc,self.miliSecond,self.timeZone,self.dstFlag,self.errorCount)
86 86 header = numpy.array(headerTuple,self.struct)
87 87 header.tofile(fp)
88 88
89 89 return 1
90 90
91 91 class SystemHeader(Header):
92 92
93 93 size = None
94 94 nSamples = None
95 95 nProfiles = None
96 96 nChannels = None
97 97 adcResolution = None
98 98 pciDioBusWidth = None
99 99 struct = None
100 100
101 101 def __init__(self):
102 102 self.size = 0
103 103 self.nSamples = 0
104 104 self.nProfiles = 0
105 105 self.nChannels = 0
106 106 self.adcResolution = 0
107 107 self.pciDioBusWidth = 0
108 108 self.struct = numpy.dtype([
109 109 ('nSize','<u4'),
110 110 ('nNumSamples','<u4'),
111 111 ('nNumProfiles','<u4'),
112 112 ('nNumChannels','<u4'),
113 113 ('nADCResolution','<u4'),
114 114 ('nPCDIOBusWidth','<u4'),
115 115 ])
116 116
117 117
118 118 def read(self, fp):
119 119 try:
120 120 header = numpy.fromfile(fp,self.struct,1)
121 121 self.size = header['nSize'][0]
122 122 self.nSamples = header['nNumSamples'][0]
123 123 self.nProfiles = header['nNumProfiles'][0]
124 124 self.nChannels = header['nNumChannels'][0]
125 125 self.adcResolution = header['nADCResolution'][0]
126 126 self.pciDioBusWidth = header['nPCDIOBusWidth'][0]
127 127
128 128 except Exception, e:
129 129 print "SystemHeader: " + e
130 130 return 0
131 131
132 132 return 1
133 133
134 134 def write(self, fp):
135 135 headerTuple = (self.size,self.nSamples,self.nProfiles,self.nChannels,self.adcResolution,self.pciDioBusWidth)
136 136 header = numpy.array(headerTuple,self.struct)
137 137 header.tofile(fp)
138 138
139 139 return 1
140 140
141 141 class RadarControllerHeader(Header):
142 142
143 143 size = None
144 144 expType = None
145 145 nTx = None
146 146 ipp = None
147 147 txA = None
148 148 txB = None
149 149 nWindows = None
150 150 numTaus = None
151 151 codeType = None
152 152 line6Function = None
153 153 line5Function = None
154 154 fClock = None
155 155 prePulseBefore = None
156 156 prePulserAfter = None
157 157 rangeIpp = None
158 158 rangeTxA = None
159 159 rangeTxB = None
160 160 struct = None
161 161
162 162 def __init__(self):
163 163 self.size = 0
164 164 self.expType = 0
165 165 self.nTx = 0
166 166 self.ipp = 0
167 167 self.txA = 0
168 168 self.txB = 0
169 169 self.nWindows = 0
170 170 self.numTaus = 0
171 171 self.codeType = 0
172 172 self.line6Function = 0
173 173 self.line5Function = 0
174 174 self.fClock = 0
175 175 self.prePulseBefore = 0
176 176 self.prePulserAfter = 0
177 177 self.rangeIpp = 0
178 178 self.rangeTxA = 0
179 179 self.rangeTxB = 0
180 180 self.struct = numpy.dtype([
181 181 ('nSize','<u4'),
182 182 ('nExpType','<u4'),
183 183 ('nNTx','<u4'),
184 184 ('fIpp','<f4'),
185 185 ('fTxA','<f4'),
186 186 ('fTxB','<f4'),
187 187 ('nNumWindows','<u4'),
188 188 ('nNumTaus','<u4'),
189 189 ('nCodeType','<u4'),
190 190 ('nLine6Function','<u4'),
191 191 ('nLine5Function','<u4'),
192 192 ('fClock','<f4'),
193 193 ('nPrePulseBefore','<u4'),
194 194 ('nPrePulseAfter','<u4'),
195 195 ('sRangeIPP','<a20'),
196 196 ('sRangeTxA','<a20'),
197 197 ('sRangeTxB','<a20'),
198 198 ])
199 199
200 200 self.samplingWindowStruct = numpy.dtype([('h0','<f4'),('dh','<f4'),('nsa','<u4')])
201 201
202 202 self.samplingWindow = None
203 203 self.nHeights = None
204 204 self.firstHeight = None
205 205 self.deltaHeight = None
206 206 self.samplesWin = None
207 207
208 208 self.nCode = None
209 209 self.nBaud = None
210 210 self.code = None
211 211 self.flip1 = None
212 212 self.flip2 = None
213 213
214 214 self.dynamic = numpy.array([],numpy.dtype('byte'))
215 215
216 216
217 217 def read(self, fp):
218 218 try:
219 219 startFp = fp.tell()
220 220 header = numpy.fromfile(fp,self.struct,1)
221 221 self.size = int(header['nSize'][0])
222 222 self.expType = int(header['nExpType'][0])
223 223 self.nTx = int(header['nNTx'][0])
224 224 self.ipp = float(header['fIpp'][0])
225 225 self.txA = float(header['fTxA'][0])
226 226 self.txB = float(header['fTxB'][0])
227 227 self.nWindows = int(header['nNumWindows'][0])
228 228 self.numTaus = int(header['nNumTaus'][0])
229 229 self.codeType = int(header['nCodeType'][0])
230 230 self.line6Function = int(header['nLine6Function'][0])
231 231 self.line5Function = int(header['nLine5Function'][0])
232 232 self.fClock = float(header['fClock'][0])
233 233 self.prePulseBefore = int(header['nPrePulseBefore'][0])
234 234 self.prePulserAfter = int(header['nPrePulseAfter'][0])
235 235 self.rangeIpp = header['sRangeIPP'][0]
236 236 self.rangeTxA = header['sRangeTxA'][0]
237 237 self.rangeTxB = header['sRangeTxB'][0]
238 238 # jump Dynamic Radar Controller Header
239 239 jumpFp = self.size - 116
240 240 self.dynamic = numpy.fromfile(fp,numpy.dtype('byte'),jumpFp)
241 241 #pointer backward to dynamic header and read
242 242 backFp = fp.tell() - jumpFp
243 243 fp.seek(backFp)
244 244
245 245 self.samplingWindow = numpy.fromfile(fp,self.samplingWindowStruct,self.nWindows)
246 246 self.nHeights = int(numpy.sum(self.samplingWindow['nsa']))
247 247 self.firstHeight = self.samplingWindow['h0']
248 248 self.deltaHeight = self.samplingWindow['dh']
249 249 self.samplesWin = self.samplingWindow['nsa']
250 250
251 251 self.Taus = numpy.fromfile(fp,'<f4',self.numTaus)
252 252
253 253 if self.codeType != 0:
254 254 self.nCode = int(numpy.fromfile(fp,'<u4',1))
255 255 self.nBaud = int(numpy.fromfile(fp,'<u4',1))
256 256 self.code = numpy.empty([self.nCode,self.nBaud],dtype='u1')
257 257 tempList = []
258 258 for ic in range(self.nCode):
259 259 temp = numpy.fromfile(fp,'u1',4*int(numpy.ceil(self.nBaud/32.)))
260 260 tempList.append(temp)
261 261 self.code[ic] = numpy.unpackbits(temp[::-1])[-1*self.nBaud:]
262 262 self.code = 2.0*self.code - 1.0
263 263
264 264 if self.line5Function == RCfunction.FLIP:
265 265 self.flip1 = numpy.fromfile(fp,'<u4',1)
266 266
267 267 if self.line6Function == RCfunction.FLIP:
268 268 self.flip2 = numpy.fromfile(fp,'<u4',1)
269 269
270 270 endFp = self.size + startFp
271 271 jumpFp = endFp - fp.tell()
272 272 if jumpFp > 0:
273 273 fp.seek(jumpFp)
274 274
275 275 except Exception, e:
276 276 print "RadarControllerHeader: " + e
277 277 return 0
278 278
279 279 return 1
280 280
281 281 def write(self, fp):
282 282 headerTuple = (self.size,
283 283 self.expType,
284 284 self.nTx,
285 285 self.ipp,
286 286 self.txA,
287 287 self.txB,
288 288 self.nWindows,
289 289 self.numTaus,
290 290 self.codeType,
291 291 self.line6Function,
292 292 self.line5Function,
293 293 self.fClock,
294 294 self.prePulseBefore,
295 295 self.prePulserAfter,
296 296 self.rangeIpp,
297 297 self.rangeTxA,
298 298 self.rangeTxB)
299 299
300 300 header = numpy.array(headerTuple,self.struct)
301 301 header.tofile(fp)
302 302
303 303 dynamic = self.dynamic
304 304 dynamic.tofile(fp)
305 305
306 306 return 1
307 307
308 308
309 309
310 310 class ProcessingHeader(Header):
311 311
312 312 size = None
313 313 dtype = None
314 314 blockSize = None
315 315 profilesPerBlock = None
316 316 dataBlocksPerFile = None
317 317 nWindows = None
318 318 processFlags = None
319 319 nCohInt = None
320 320 nIncohInt = None
321 321 totalSpectra = None
322 322 struct = None
323 323 flag_dc = None
324 324 flag_cspc = None
325 325
326 326 def __init__(self):
327 327 self.size = 0
328 328 self.dtype = 0
329 329 self.blockSize = 0
330 330 self.profilesPerBlock = 0
331 331 self.dataBlocksPerFile = 0
332 332 self.nWindows = 0
333 333 self.processFlags = 0
334 334 self.nCohInt = 0
335 335 self.nIncohInt = 0
336 336 self.totalSpectra = 0
337 337 self.struct = numpy.dtype([
338 338 ('nSize','<u4'),
339 339 ('nDataType','<u4'),
340 340 ('nSizeOfDataBlock','<u4'),
341 341 ('nProfilesperBlock','<u4'),
342 342 ('nDataBlocksperFile','<u4'),
343 343 ('nNumWindows','<u4'),
344 344 ('nProcessFlags','<u4'),
345 345 ('nCoherentIntegrations','<u4'),
346 346 ('nIncoherentIntegrations','<u4'),
347 347 ('nTotalSpectra','<u4')
348 348 ])
349 349 self.samplingWindow = 0
350 350 self.structSamplingWindow = numpy.dtype([('h0','<f4'),('dh','<f4'),('nsa','<u4')])
351 351 self.nHeights = 0
352 352 self.firstHeight = 0
353 353 self.deltaHeight = 0
354 354 self.samplesWin = 0
355 355 self.spectraComb = 0
356 356 self.nCode = None
357 357 self.code = None
358 358 self.nBaud = None
359 359 self.shif_fft = False
360 360 self.flag_dc = False
361 361 self.flag_cspc = False
362 362
363 363 def read(self, fp):
364 364 try:
365 365 header = numpy.fromfile(fp,self.struct,1)
366 366 self.size = int(header['nSize'][0])
367 367 self.dtype = int(header['nDataType'][0])
368 368 self.blockSize = int(header['nSizeOfDataBlock'][0])
369 369 self.profilesPerBlock = int(header['nProfilesperBlock'][0])
370 370 self.dataBlocksPerFile = int(header['nDataBlocksperFile'][0])
371 371 self.nWindows = int(header['nNumWindows'][0])
372 self.processFlags = int(header['nProcessFlags'])
372 self.processFlags = header['nProcessFlags']
373 373 self.nCohInt = int(header['nCoherentIntegrations'][0])
374 374 self.nIncohInt = int(header['nIncoherentIntegrations'][0])
375 375 self.totalSpectra = int(header['nTotalSpectra'][0])
376 376 self.samplingWindow = numpy.fromfile(fp,self.structSamplingWindow,self.nWindows)
377 377 self.nHeights = int(numpy.sum(self.samplingWindow['nsa']))
378 378 self.firstHeight = float(self.samplingWindow['h0'][0])
379 379 self.deltaHeight = float(self.samplingWindow['dh'][0])
380 380 self.samplesWin = self.samplingWindow['nsa']
381 381 self.spectraComb = numpy.fromfile(fp,'u1',2*self.totalSpectra)
382 382
383 383 if ((self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE) == PROCFLAG.DEFINE_PROCESS_CODE):
384 384 self.nCode = int(numpy.fromfile(fp,'<u4',1))
385 385 self.nBaud = int(numpy.fromfile(fp,'<u4',1))
386 386 self.code = numpy.fromfile(fp,'<f4',self.nCode*self.nBaud).reshape(self.nBaud,self.nCode)
387 387
388 388 if ((self.processFlags & PROCFLAG.SHIFT_FFT_DATA) == PROCFLAG.SHIFT_FFT_DATA):
389 389 self.shif_fft = True
390 390 else:
391 391 self.shif_fft = False
392 392
393 393 if ((self.processFlags & PROCFLAG.SAVE_CHANNELS_DC) == PROCFLAG.SAVE_CHANNELS_DC):
394 394 self.flag_dc = True
395 395
396 396 nChannels = 0
397 397 nPairs = 0
398 398 pairList = []
399 399
400 400 for i in range( 0, self.totalSpectra*2, 2 ):
401 401 if self.spectraComb[i] == self.spectraComb[i+1]:
402 402 nChannels = nChannels + 1 #par de canales iguales
403 403 else:
404 404 nPairs = nPairs + 1 #par de canales diferentes
405 405 pairList.append( (self.spectraComb[i], self.spectraComb[i+1]) )
406 406
407 407 self.flag_cspc = False
408 408 if nPairs > 0:
409 409 self.flag_cspc = True
410 410
411 411 except Exception, e:
412 412 print "ProcessingHeader: " + e
413 413 return 0
414 414
415 415 return 1
416 416
417 417 def write(self, fp):
418 418 headerTuple = (self.size,
419 419 self.dtype,
420 420 self.blockSize,
421 421 self.profilesPerBlock,
422 422 self.dataBlocksPerFile,
423 423 self.nWindows,
424 424 self.processFlags,
425 425 self.nCohInt,
426 426 self.nIncohInt,
427 427 self.totalSpectra)
428 428
429 429 header = numpy.array(headerTuple,self.struct)
430 430 header.tofile(fp)
431 431
432 432 if self.nWindows != 0:
433 433 sampleWindowTuple = (self.firstHeight,self.deltaHeight,self.samplesWin)
434 434 samplingWindow = numpy.array(sampleWindowTuple,self.structSamplingWindow)
435 435 samplingWindow.tofile(fp)
436 436
437 437
438 438 if self.totalSpectra != 0:
439 439 spectraComb = numpy.array([],numpy.dtype('u1'))
440 440 spectraComb = self.spectraComb
441 441 spectraComb.tofile(fp)
442 442
443 443
444 444 if self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE == PROCFLAG.DEFINE_PROCESS_CODE:
445 445 nCode = self.nCode #Probar con un dato que almacene codigo, hasta el momento no se hizo la prueba
446 446 nCode.tofile(fp)
447 447
448 448 nBaud = self.nBaud
449 449 nBaud.tofile(fp)
450 450
451 451 code = self.code.reshape(nCode*nBaud)
452 452 code.tofile(fp)
453 453
454 454 return 1
455 455
456 456 class RCfunction:
457 457 NONE=0
458 458 FLIP=1
459 459 CODE=2
460 460 SAMPLING=3
461 461 LIN6DIV256=4
462 462 SYNCHRO=5
463 463
464 464 class nCodeType:
465 465 NONE=0
466 466 USERDEFINE=1
467 467 BARKER2=2
468 468 BARKER3=3
469 469 BARKER4=4
470 470 BARKER5=5
471 471 BARKER7=6
472 472 BARKER11=7
473 473 BARKER13=8
474 474 AC128=9
475 475 COMPLEMENTARYCODE2=10
476 476 COMPLEMENTARYCODE4=11
477 477 COMPLEMENTARYCODE8=12
478 478 COMPLEMENTARYCODE16=13
479 479 COMPLEMENTARYCODE32=14
480 480 COMPLEMENTARYCODE64=15
481 481 COMPLEMENTARYCODE128=16
482 482 CODE_BINARY28=17
483 483
484 484 class PROCFLAG:
485 485 COHERENT_INTEGRATION = numpy.uint32(0x00000001)
486 486 DECODE_DATA = numpy.uint32(0x00000002)
487 487 SPECTRA_CALC = numpy.uint32(0x00000004)
488 488 INCOHERENT_INTEGRATION = numpy.uint32(0x00000008)
489 489 POST_COHERENT_INTEGRATION = numpy.uint32(0x00000010)
490 490 SHIFT_FFT_DATA = numpy.uint32(0x00000020)
491 491
492 492 DATATYPE_CHAR = numpy.uint32(0x00000040)
493 493 DATATYPE_SHORT = numpy.uint32(0x00000080)
494 494 DATATYPE_LONG = numpy.uint32(0x00000100)
495 495 DATATYPE_INT64 = numpy.uint32(0x00000200)
496 496 DATATYPE_FLOAT = numpy.uint32(0x00000400)
497 497 DATATYPE_DOUBLE = numpy.uint32(0x00000800)
498 498
499 499 DATAARRANGE_CONTIGUOUS_CH = numpy.uint32(0x00001000)
500 500 DATAARRANGE_CONTIGUOUS_H = numpy.uint32(0x00002000)
501 501 DATAARRANGE_CONTIGUOUS_P = numpy.uint32(0x00004000)
502 502
503 503 SAVE_CHANNELS_DC = numpy.uint32(0x00008000)
504 504 DEFLIP_DATA = numpy.uint32(0x00010000)
505 505 DEFINE_PROCESS_CODE = numpy.uint32(0x00020000)
506 506
507 507 ACQ_SYS_NATALIA = numpy.uint32(0x00040000)
508 508 ACQ_SYS_ECHOTEK = numpy.uint32(0x00080000)
509 509 ACQ_SYS_ADRXD = numpy.uint32(0x000C0000)
510 510 ACQ_SYS_JULIA = numpy.uint32(0x00100000)
511 511 ACQ_SYS_XXXXXX = numpy.uint32(0x00140000)
512 512
513 513 EXP_NAME_ESP = numpy.uint32(0x00200000)
514 514 CHANNEL_NAMES_ESP = numpy.uint32(0x00400000)
515 515
516 516 OPERATION_MASK = numpy.uint32(0x0000003F)
517 517 DATATYPE_MASK = numpy.uint32(0x00000FC0)
518 518 DATAARRANGE_MASK = numpy.uint32(0x00007000)
519 519 ACQ_SYS_MASK = numpy.uint32(0x001C0000) No newline at end of file
@@ -1,818 +1,823
1 1 import numpy
2 2 import time, datetime
3 3 from graphics.figure import *
4 4
5 5 class CrossSpectraPlot(Figure):
6 6
7 7 __isConfig = None
8 8 __nsubplots = None
9 9
10 10 WIDTHPROF = None
11 11 HEIGHTPROF = None
12 12 PREFIX = 'cspc'
13 13
14 14 def __init__(self):
15 15
16 16 self.__isConfig = False
17 17 self.__nsubplots = 4
18 18
19 19 self.WIDTH = 300
20 20 self.HEIGHT = 400
21 21 self.WIDTHPROF = 0
22 22 self.HEIGHTPROF = 0
23 23
24 24 def getSubplots(self):
25 25
26 26 ncol = 4
27 27 nrow = self.nplots
28 28
29 29 return nrow, ncol
30 30
31 31 def setup(self, idfigure, nplots, wintitle, showprofile=True):
32 32
33 33 self.__showprofile = showprofile
34 34 self.nplots = nplots
35 35
36 36 ncolspan = 1
37 37 colspan = 1
38 38
39 39 self.createFigure(idfigure = idfigure,
40 40 wintitle = wintitle,
41 41 widthplot = self.WIDTH + self.WIDTHPROF,
42 42 heightplot = self.HEIGHT + self.HEIGHTPROF)
43 43
44 44 nrow, ncol = self.getSubplots()
45 45
46 46 counter = 0
47 47 for y in range(nrow):
48 48 for x in range(ncol):
49 49 self.addAxes(nrow, ncol*ncolspan, y, x*ncolspan, colspan, 1)
50 50
51 51 counter += 1
52 52
53 53 def run(self, dataOut, idfigure, wintitle="", pairsList=None, showprofile='True',
54 54 xmin=None, xmax=None, ymin=None, ymax=None, zmin=None, zmax=None,
55 55 save=False, figpath='./', figfile=None):
56 56
57 57 """
58 58
59 59 Input:
60 60 dataOut :
61 61 idfigure :
62 62 wintitle :
63 63 channelList :
64 64 showProfile :
65 65 xmin : None,
66 66 xmax : None,
67 67 ymin : None,
68 68 ymax : None,
69 69 zmin : None,
70 70 zmax : None
71 71 """
72 72
73 73 if pairsList == None:
74 74 pairsIndexList = dataOut.pairsIndexList
75 75 else:
76 76 pairsIndexList = []
77 77 for pair in pairsList:
78 78 if pair not in dataOut.pairsList:
79 79 raise ValueError, "Pair %s is not in dataOut.pairsList" %(pair)
80 80 pairsIndexList.append(dataOut.pairsList.index(pair))
81 81
82 82 if pairsIndexList == []:
83 83 return
84 84
85 85 if len(pairsIndexList) > 4:
86 86 pairsIndexList = pairsIndexList[0:4]
87 87
88 88 x = dataOut.getFreqRange(1)
89 89 y = dataOut.getHeiRange()
90 90 z = 10.*numpy.log10(dataOut.data_spc[:,:,:])
91 z = numpy.where(numpy.isfinite(z), z, numpy.NAN)
91 92 avg = numpy.average(numpy.abs(z), axis=1)
92 93
93 94 noise = dataOut.getNoise()
94 95
95 96 if not self.__isConfig:
96 97
97 98 nplots = len(pairsIndexList)
98 99
99 100 self.setup(idfigure=idfigure,
100 101 nplots=nplots,
101 102 wintitle=wintitle,
102 103 showprofile=showprofile)
103 104
104 105 if xmin == None: xmin = numpy.nanmin(x)
105 106 if xmax == None: xmax = numpy.nanmax(x)
106 107 if ymin == None: ymin = numpy.nanmin(y)
107 108 if ymax == None: ymax = numpy.nanmax(y)
108 109 if zmin == None: zmin = numpy.nanmin(avg)*0.9
109 110 if zmax == None: zmax = numpy.nanmax(avg)*0.9
110 111
111 112 self.__isConfig = True
112 113
113 114 thisDatetime = dataOut.datatime
114 115 title = "Cross-Spectra: %s" %(thisDatetime.strftime("%d-%b-%Y %H:%M:%S"))
115 116 xlabel = "Velocity (m/s)"
116 117 ylabel = "Range (Km)"
117 118
118 119 self.setWinTitle(title)
119 120
120 121 for i in range(self.nplots):
121 122 pair = dataOut.pairsList[pairsIndexList[i]]
122 123
123 124 title = "Channel %d: %4.2fdB" %(pair[0], noise[pair[0]])
124 125 z = 10.*numpy.log10(dataOut.data_spc[pair[0],:,:])
125 126 axes0 = self.axesList[i*self.__nsubplots]
126 127 axes0.pcolor(x, y, z,
127 128 xmin=xmin, xmax=xmax, ymin=ymin, ymax=ymax, zmin=zmin, zmax=zmax,
128 129 xlabel=xlabel, ylabel=ylabel, title=title,
129 130 ticksize=9, cblabel='')
130 131
131 132 title = "Channel %d: %4.2fdB" %(pair[1], noise[pair[1]])
132 133 z = 10.*numpy.log10(dataOut.data_spc[pair[1],:,:])
133 134 axes0 = self.axesList[i*self.__nsubplots+1]
134 135 axes0.pcolor(x, y, z,
135 136 xmin=xmin, xmax=xmax, ymin=ymin, ymax=ymax, zmin=zmin, zmax=zmax,
136 137 xlabel=xlabel, ylabel=ylabel, title=title,
137 138 ticksize=9, cblabel='')
138 139
139 140 coherenceComplex = dataOut.data_cspc[pairsIndexList[i],:,:]/numpy.sqrt(dataOut.data_spc[pair[0],:,:]*dataOut.data_spc[pair[1],:,:])
140 141 coherence = numpy.abs(coherenceComplex)
141 142 phase = numpy.arctan(-1*coherenceComplex.imag/coherenceComplex.real)*180/numpy.pi
142 143
143 144
144 145 title = "Coherence %d%d" %(pair[0], pair[1])
145 146 axes0 = self.axesList[i*self.__nsubplots+2]
146 147 axes0.pcolor(x, y, coherence,
147 148 xmin=xmin, xmax=xmax, ymin=ymin, ymax=ymax, zmin=0, zmax=1,
148 149 xlabel=xlabel, ylabel=ylabel, title=title,
149 150 ticksize=9, cblabel='')
150 151
151 152 title = "Phase %d%d" %(pair[0], pair[1])
152 153 axes0 = self.axesList[i*self.__nsubplots+3]
153 154 axes0.pcolor(x, y, phase,
154 155 xmin=xmin, xmax=xmax, ymin=ymin, ymax=ymax, zmin=-180, zmax=180,
155 156 xlabel=xlabel, ylabel=ylabel, title=title,
156 157 ticksize=9, cblabel='', colormap='RdBu')
157 158
158 159
159 160
160 161 self.draw()
161 162
162 163 if save:
163 164 date = thisDatetime.strftime("%Y%m%d")
164 165 if figfile == None:
165 166 figfile = self.getFilename(name = date)
166 167
167 168 self.saveFigure(figpath, figfile)
168 169
169 170
170 171 class RTIPlot(Figure):
171 172
172 173 __isConfig = None
173 174 __nsubplots = None
174 175
175 176 WIDTHPROF = None
176 177 HEIGHTPROF = None
177 178 PREFIX = 'rti'
178 179
179 180 def __init__(self):
180 181
181 182 self.timerange = 24*60*60
182 183 self.__isConfig = False
183 184 self.__nsubplots = 1
184 185
185 186 self.WIDTH = 800
186 187 self.HEIGHT = 200
187 188 self.WIDTHPROF = 120
188 189 self.HEIGHTPROF = 0
189 190
190 191 def getSubplots(self):
191 192
192 193 ncol = 1
193 194 nrow = self.nplots
194 195
195 196 return nrow, ncol
196 197
197 198 def setup(self, idfigure, nplots, wintitle, showprofile=True):
198 199
199 200 self.__showprofile = showprofile
200 201 self.nplots = nplots
201 202
202 203 ncolspan = 1
203 204 colspan = 1
204 205 if showprofile:
205 206 ncolspan = 7
206 207 colspan = 6
207 208 self.__nsubplots = 2
208 209
209 210 self.createFigure(idfigure = idfigure,
210 211 wintitle = wintitle,
211 212 widthplot = self.WIDTH + self.WIDTHPROF,
212 213 heightplot = self.HEIGHT + self.HEIGHTPROF)
213 214
214 215 nrow, ncol = self.getSubplots()
215 216
216 217 counter = 0
217 218 for y in range(nrow):
218 219 for x in range(ncol):
219 220
220 221 if counter >= self.nplots:
221 222 break
222 223
223 224 self.addAxes(nrow, ncol*ncolspan, y, x*ncolspan, colspan, 1)
224 225
225 226 if showprofile:
226 227 self.addAxes(nrow, ncol*ncolspan, y, x*ncolspan+colspan, 1, 1)
227 228
228 229 counter += 1
229 230
230 231 def run(self, dataOut, idfigure, wintitle="", channelList=None, showprofile='True',
231 232 xmin=None, xmax=None, ymin=None, ymax=None, zmin=None, zmax=None,
232 233 timerange=None,
233 234 save=False, figpath='./', figfile=None):
234 235
235 236 """
236 237
237 238 Input:
238 239 dataOut :
239 240 idfigure :
240 241 wintitle :
241 242 channelList :
242 243 showProfile :
243 244 xmin : None,
244 245 xmax : None,
245 246 ymin : None,
246 247 ymax : None,
247 248 zmin : None,
248 249 zmax : None
249 250 """
250 251
251 252 if channelList == None:
252 253 channelIndexList = dataOut.channelIndexList
253 254 else:
254 255 channelIndexList = []
255 256 for channel in channelList:
256 257 if channel not in dataOut.channelList:
257 258 raise ValueError, "Channel %d is not in dataOut.channelList"
258 259 channelIndexList.append(dataOut.channelList.index(channel))
259 260
260 261 if timerange != None:
261 262 self.timerange = timerange
262 263
263 264 tmin = None
264 265 tmax = None
265 266 x = dataOut.getTimeRange()
266 267 y = dataOut.getHeiRange()
267 268 z = 10.*numpy.log10(dataOut.data_spc[channelIndexList,:,:])
269 z = numpy.where(numpy.isfinite(z), z, numpy.NAN)
268 270 avg = numpy.average(z, axis=1)
269 271
270 272 noise = dataOut.getNoise()
271
273
272 274 if not self.__isConfig:
273 275
274 276 nplots = len(channelIndexList)
275 277
276 278 self.setup(idfigure=idfigure,
277 279 nplots=nplots,
278 280 wintitle=wintitle,
279 281 showprofile=showprofile)
280 282
281 283 tmin, tmax = self.getTimeLim(x, xmin, xmax)
282 284 if ymin == None: ymin = numpy.nanmin(y)
283 285 if ymax == None: ymax = numpy.nanmax(y)
284 286 if zmin == None: zmin = numpy.nanmin(avg)*0.9
285 287 if zmax == None: zmax = numpy.nanmax(avg)*0.9
286 288
289 self.name = thisDatetime.strftime("%Y%m%d_%H%M%S")
287 290 self.__isConfig = True
288 291
289 292 thisDatetime = dataOut.datatime
290 293 title = "RTI: %s" %(thisDatetime.strftime("%d-%b-%Y"))
291 294 xlabel = "Velocity (m/s)"
292 295 ylabel = "Range (Km)"
293 296
294 297 self.setWinTitle(title)
295 298
296 299 for i in range(self.nplots):
297 300 title = "Channel %d: %s" %(dataOut.channelList[i], thisDatetime.strftime("%d-%b-%Y %H:%M:%S"))
298 301 axes = self.axesList[i*self.__nsubplots]
299 302 z = avg[i].reshape((1,-1))
300 303 axes.pcolor(x, y, z,
301 304 xmin=tmin, xmax=tmax, ymin=ymin, ymax=ymax, zmin=zmin, zmax=zmax,
302 305 xlabel=xlabel, ylabel=ylabel, title=title, rti=True, XAxisAsTime=True,
303 306 ticksize=9, cblabel='', cbsize="1%")
304 307
305 308 if self.__showprofile:
306 309 axes = self.axesList[i*self.__nsubplots +1]
307 310 axes.pline(avg[i], y,
308 311 xmin=zmin, xmax=zmax, ymin=ymin, ymax=ymax,
309 312 xlabel='dB', ylabel='', title='',
310 313 ytick_visible=False,
311 314 grid='x')
312 315
313 316 self.draw()
314 317
315 318 if save:
316 date = thisDatetime.strftime("%Y%m%d")
319
317 320 if figfile == None:
318 figfile = self.getFilename(name = date)
321 figfile = self.getFilename(name = self.name)
319 322
320 323 self.saveFigure(figpath, figfile)
321 324
322 325 if x[1] + (x[1]-x[0]) >= self.axesList[0].xmax:
323 326 self.__isConfig = False
324 327
325 328 class SpectraPlot(Figure):
326 329
327 330 __isConfig = None
328 331 __nsubplots = None
329 332
330 333 WIDTHPROF = None
331 334 HEIGHTPROF = None
332 335 PREFIX = 'spc'
333 336
334 337 def __init__(self):
335 338
336 339 self.__isConfig = False
337 340 self.__nsubplots = 1
338 341
339 342 self.WIDTH = 300
340 343 self.HEIGHT = 400
341 344 self.WIDTHPROF = 120
342 345 self.HEIGHTPROF = 0
343 346
344 347 def getSubplots(self):
345 348
346 349 ncol = int(numpy.sqrt(self.nplots)+0.9)
347 350 nrow = int(self.nplots*1./ncol + 0.9)
348 351
349 352 return nrow, ncol
350 353
351 354 def setup(self, idfigure, nplots, wintitle, showprofile=True):
352 355
353 356 self.__showprofile = showprofile
354 357 self.nplots = nplots
355 358
356 359 ncolspan = 1
357 360 colspan = 1
358 361 if showprofile:
359 362 ncolspan = 3
360 363 colspan = 2
361 364 self.__nsubplots = 2
362 365
363 366 self.createFigure(idfigure = idfigure,
364 367 wintitle = wintitle,
365 368 widthplot = self.WIDTH + self.WIDTHPROF,
366 369 heightplot = self.HEIGHT + self.HEIGHTPROF)
367 370
368 371 nrow, ncol = self.getSubplots()
369 372
370 373 counter = 0
371 374 for y in range(nrow):
372 375 for x in range(ncol):
373 376
374 377 if counter >= self.nplots:
375 378 break
376 379
377 380 self.addAxes(nrow, ncol*ncolspan, y, x*ncolspan, colspan, 1)
378 381
379 382 if showprofile:
380 383 self.addAxes(nrow, ncol*ncolspan, y, x*ncolspan+colspan, 1, 1)
381 384
382 385 counter += 1
383 386
384 387 def run(self, dataOut, idfigure, wintitle="", channelList=None, showprofile='True',
385 388 xmin=None, xmax=None, ymin=None, ymax=None, zmin=None, zmax=None,
386 389 save=False, figpath='./', figfile=None):
387 390
388 391 """
389 392
390 393 Input:
391 394 dataOut :
392 395 idfigure :
393 396 wintitle :
394 397 channelList :
395 398 showProfile :
396 399 xmin : None,
397 400 xmax : None,
398 401 ymin : None,
399 402 ymax : None,
400 403 zmin : None,
401 404 zmax : None
402 405 """
403 406
404 407 if channelList == None:
405 408 channelIndexList = dataOut.channelIndexList
406 409 else:
407 410 channelIndexList = []
408 411 for channel in channelList:
409 412 if channel not in dataOut.channelList:
410 413 raise ValueError, "Channel %d is not in dataOut.channelList"
411 414 channelIndexList.append(dataOut.channelList.index(channel))
412 415
413 416 x = dataOut.getVelRange(1)
414 417 y = dataOut.getHeiRange()
418
415 419 z = 10.*numpy.log10(dataOut.data_spc[channelIndexList,:,:])
420 z = numpy.where(numpy.isfinite(z), z, numpy.NAN)
416 421 avg = numpy.average(z, axis=1)
417 422
418 423 noise = dataOut.getNoise()
419 424
420 425 if not self.__isConfig:
421 426
422 427 nplots = len(channelIndexList)
423 428
424 429 self.setup(idfigure=idfigure,
425 430 nplots=nplots,
426 431 wintitle=wintitle,
427 432 showprofile=showprofile)
428 433
429 434 if xmin == None: xmin = numpy.nanmin(x)
430 435 if xmax == None: xmax = numpy.nanmax(x)
431 436 if ymin == None: ymin = numpy.nanmin(y)
432 437 if ymax == None: ymax = numpy.nanmax(y)
433 438 if zmin == None: zmin = numpy.nanmin(avg)*0.9
434 439 if zmax == None: zmax = numpy.nanmax(avg)*0.9
435 440
436 441 self.__isConfig = True
437 442
438 443 thisDatetime = dataOut.datatime
439 444 title = "Spectra: %s" %(thisDatetime.strftime("%d-%b-%Y %H:%M:%S"))
440 445 xlabel = "Velocity (m/s)"
441 446 ylabel = "Range (Km)"
442 447
443 448 self.setWinTitle(title)
444 449
445 450 for i in range(self.nplots):
446 451 title = "Channel %d: %4.2fdB" %(dataOut.channelList[i], noise[i])
447 452 axes = self.axesList[i*self.__nsubplots]
448 453 axes.pcolor(x, y, z[i,:,:],
449 454 xmin=xmin, xmax=xmax, ymin=ymin, ymax=ymax, zmin=zmin, zmax=zmax,
450 455 xlabel=xlabel, ylabel=ylabel, title=title,
451 456 ticksize=9, cblabel='')
452 457
453 458 if self.__showprofile:
454 459 axes = self.axesList[i*self.__nsubplots +1]
455 460 axes.pline(avg[i], y,
456 461 xmin=zmin, xmax=zmax, ymin=ymin, ymax=ymax,
457 462 xlabel='dB', ylabel='', title='',
458 463 ytick_visible=False,
459 464 grid='x')
460 465
461 466 self.draw()
462 467
463 468 if save:
464 469 date = thisDatetime.strftime("%Y%m%d")
465 470 if figfile == None:
466 471 figfile = self.getFilename(name = date)
467 472
468 473 self.saveFigure(figpath, figfile)
469 474
470 475 class Scope(Figure):
471 476
472 477 __isConfig = None
473 478
474 479 def __init__(self):
475 480
476 481 self.__isConfig = False
477 482 self.WIDTH = 600
478 483 self.HEIGHT = 200
479 484
480 485 def getSubplots(self):
481 486
482 487 nrow = self.nplots
483 488 ncol = 3
484 489 return nrow, ncol
485 490
486 491 def setup(self, idfigure, nplots, wintitle):
487 492
493 self.nplots = nplots
494
488 495 self.createFigure(idfigure, wintitle)
489 496
490 497 nrow,ncol = self.getSubplots()
491 498 colspan = 3
492 499 rowspan = 1
493 500
494 501 for i in range(nplots):
495 502 self.addAxes(nrow, ncol, i, 0, colspan, rowspan)
496 503
497 self.nplots = nplots
504
498 505
499 506 def run(self, dataOut, idfigure, wintitle="", channelList=None,
500 507 xmin=None, xmax=None, ymin=None, ymax=None, save=False, filename=None):
501 508
502 509 """
503 510
504 511 Input:
505 512 dataOut :
506 513 idfigure :
507 514 wintitle :
508 515 channelList :
509 516 xmin : None,
510 517 xmax : None,
511 518 ymin : None,
512 519 ymax : None,
513 520 """
514 521
515 522 if channelList == None:
516 523 channelIndexList = dataOut.channelIndexList
517 524 else:
518 525 channelIndexList = []
519 526 for channel in channelList:
520 527 if channel not in dataOut.channelList:
521 528 raise ValueError, "Channel %d is not in dataOut.channelList"
522 529 channelIndexList.append(dataOut.channelList.index(channel))
523 530
524 531 x = dataOut.heightList
525 y = dataOut.data[channelList,:] * numpy.conjugate(dataOut.data[channelList,:])
532 y = dataOut.data[channelIndexList,:] * numpy.conjugate(dataOut.data[channelIndexList,:])
526 533 y = y.real
527 534
528 noise = dataOut.getNoise()
529
530 535 if not self.__isConfig:
531 nplots = len(channelList)
536 nplots = len(channelIndexList)
532 537
533 538 self.setup(idfigure=idfigure,
534 539 nplots=nplots,
535 540 wintitle=wintitle)
536 541
537 542 if xmin == None: xmin = numpy.nanmin(x)
538 543 if xmax == None: xmax = numpy.nanmax(x)
539 544 if ymin == None: ymin = numpy.nanmin(y)
540 545 if ymax == None: ymax = numpy.nanmax(y)
541 546
542 547 self.__isConfig = True
543 548
544 549
545 550 thisDatetime = dataOut.datatime
546 551 title = "Scope: %s" %(thisDatetime.strftime("%d-%b-%Y %H:%M:%S"))
547 552 xlabel = "Range (Km)"
548 553 ylabel = "Intensity"
549 554
550 555 self.setWinTitle(title)
551 556
552 557 for i in range(len(self.axesList)):
553 title = "Channel %d: %4.2fdB" %(i, noise[i])
558 title = "Channel %d" %(i)
554 559 axes = self.axesList[i]
555 560 ychannel = y[i,:]
556 561 axes.pline(x, ychannel,
557 562 xmin=xmin, xmax=xmax, ymin=ymin, ymax=ymax,
558 563 xlabel=xlabel, ylabel=ylabel, title=title)
559 564
560 565 self.draw()
561 566
562 567 if save:
563 568 self.saveFigure(filename)
564 569
565 570 class ProfilePlot(Figure):
566 571 __isConfig = None
567 572 __nsubplots = None
568 573
569 574 WIDTHPROF = None
570 575 HEIGHTPROF = None
571 576 PREFIX = 'spcprofile'
572 577
573 578 def __init__(self):
574 579 self.__isConfig = False
575 580 self.__nsubplots = 1
576 581
577 582 self.WIDTH = 300
578 583 self.HEIGHT = 500
579 584
580 585 def getSubplots(self):
581 586 ncol = 1
582 587 nrow = 1
583 588
584 589 return nrow, ncol
585 590
586 591 def setup(self, idfigure, nplots, wintitle):
587 592
588 593 self.nplots = nplots
589 594
590 595 ncolspan = 1
591 596 colspan = 1
592 597
593 598 self.createFigure(idfigure = idfigure,
594 599 wintitle = wintitle,
595 600 widthplot = self.WIDTH,
596 601 heightplot = self.HEIGHT)
597 602
598 603 nrow, ncol = self.getSubplots()
599 604
600 605 counter = 0
601 606 for y in range(nrow):
602 607 for x in range(ncol):
603 608 self.addAxes(nrow, ncol*ncolspan, y, x*ncolspan, colspan, 1)
604 609
605 610 def run(self, dataOut, idfigure, wintitle="", channelList=None,
606 611 xmin=None, xmax=None, ymin=None, ymax=None,
607 612 save=False, figpath='./', figfile=None):
608 613
609 614 if channelList == None:
610 615 channelIndexList = dataOut.channelIndexList
611 616 channelList = dataOut.channelList
612 617 else:
613 618 channelIndexList = []
614 619 for channel in channelList:
615 620 if channel not in dataOut.channelList:
616 621 raise ValueError, "Channel %d is not in dataOut.channelList"
617 622 channelIndexList.append(dataOut.channelList.index(channel))
618 623
619 624
620 625 y = dataOut.getHeiRange()
621 626 x = 10.*numpy.log10(dataOut.data_spc[channelIndexList,:,:])
622 627 avg = numpy.average(x, axis=1)
623 628
624 629
625 630 if not self.__isConfig:
626 631
627 632 nplots = 1
628 633
629 634 self.setup(idfigure=idfigure,
630 635 nplots=nplots,
631 636 wintitle=wintitle)
632 637
633 638 if ymin == None: ymin = numpy.nanmin(y)
634 639 if ymax == None: ymax = numpy.nanmax(y)
635 640 if xmin == None: xmin = numpy.nanmin(avg)*0.9
636 641 if xmax == None: xmax = numpy.nanmax(avg)*0.9
637 642
638 643 self.__isConfig = True
639 644
640 645 thisDatetime = dataOut.datatime
641 646 title = "Power Profile"
642 647 xlabel = "dB"
643 648 ylabel = "Range (Km)"
644 649
645 650 self.setWinTitle(title)
646 651
647 652
648 653 title = "Power Profile: %s" %(thisDatetime.strftime("%d-%b-%Y %H:%M:%S"))
649 654 axes = self.axesList[0]
650 655
651 656 legendlabels = ["channel %d"%x for x in channelList]
652 657 axes.pmultiline(avg, y,
653 658 xmin=xmin, xmax=xmax, ymin=ymin, ymax=ymax,
654 659 xlabel=xlabel, ylabel=ylabel, title=title, legendlabels=legendlabels,
655 660 ytick_visible=True, nxticks=5,
656 661 grid='x')
657 662
658 663 self.draw()
659 664
660 665 if save:
661 666 date = thisDatetime.strftime("%Y%m%d")
662 667 if figfile == None:
663 668 figfile = self.getFilename(name = date)
664 669
665 670 self.saveFigure(figpath, figfile)
666 671
667 672 class CoherencePlot(Figure):
668 673 __isConfig = None
669 674 __nsubplots = None
670 675
671 676 WIDTHPROF = None
672 677 HEIGHTPROF = None
673 678 PREFIX = 'coherencemap'
674 679
675 680 def __init__(self):
676 681 self.timerange = 24*60*60
677 682 self.__isConfig = False
678 683 self.__nsubplots = 1
679 684
680 685 self.WIDTH = 800
681 686 self.HEIGHT = 200
682 687 self.WIDTHPROF = 120
683 688 self.HEIGHTPROF = 0
684 689
685 690 def getSubplots(self):
686 691 ncol = 1
687 692 nrow = self.nplots*2
688 693
689 694 return nrow, ncol
690 695
691 696 def setup(self, idfigure, nplots, wintitle, showprofile=True):
692 697 self.__showprofile = showprofile
693 698 self.nplots = nplots
694 699
695 700 ncolspan = 1
696 701 colspan = 1
697 702 if showprofile:
698 703 ncolspan = 7
699 704 colspan = 6
700 705 self.__nsubplots = 2
701 706
702 707 self.createFigure(idfigure = idfigure,
703 708 wintitle = wintitle,
704 709 widthplot = self.WIDTH + self.WIDTHPROF,
705 710 heightplot = self.HEIGHT + self.HEIGHTPROF)
706 711
707 712 nrow, ncol = self.getSubplots()
708 713
709 714 for y in range(nrow):
710 715 for x in range(ncol):
711 716
712 717 self.addAxes(nrow, ncol*ncolspan, y, x*ncolspan, colspan, 1)
713 718
714 719 if showprofile:
715 720 self.addAxes(nrow, ncol*ncolspan, y, x*ncolspan+colspan, 1, 1)
716 721
717 722 def run(self, dataOut, idfigure, wintitle="", pairsList=None, showprofile='True',
718 723 xmin=None, xmax=None, ymin=None, ymax=None, zmin=None, zmax=None,
719 724 timerange=None,
720 725 save=False, figpath='./', figfile=None):
721 726
722 727 if pairsList == None:
723 728 pairsIndexList = dataOut.pairsIndexList
724 729 else:
725 730 pairsIndexList = []
726 731 for pair in pairsList:
727 732 if pair not in dataOut.pairsList:
728 733 raise ValueError, "Pair %s is not in dataOut.pairsList" %(pair)
729 734 pairsIndexList.append(dataOut.pairsList.index(pair))
730 735
731 736 if timerange != None:
732 737 self.timerange = timerange
733 738
734 739 tmin = None
735 740 tmax = None
736 741 x = dataOut.getTimeRange()
737 742 y = dataOut.getHeiRange()
738 743
739 744 if not self.__isConfig:
740 745 nplots = len(pairsIndexList)
741 746 self.setup(idfigure=idfigure,
742 747 nplots=nplots,
743 748 wintitle=wintitle,
744 749 showprofile=showprofile)
745 750
746 751 tmin, tmax = self.getTimeLim(x, xmin, xmax)
747 752 if ymin == None: ymin = numpy.nanmin(y)
748 753 if ymax == None: ymax = numpy.nanmax(y)
749 754
750 755 self.__isConfig = True
751 756
752 757 thisDatetime = dataOut.datatime
753 758 title = "CoherenceMap: %s" %(thisDatetime.strftime("%d-%b-%Y"))
754 759 xlabel = ""
755 760 ylabel = "Range (Km)"
756 761
757 762 self.setWinTitle(title)
758 763
759 764 for i in range(self.nplots):
760 765
761 766 pair = dataOut.pairsList[pairsIndexList[i]]
762 767 coherenceComplex = dataOut.data_cspc[pairsIndexList[i],:,:]/numpy.sqrt(dataOut.data_spc[pair[0],:,:]*dataOut.data_spc[pair[1],:,:])
763 768 coherence = numpy.abs(coherenceComplex)
764 769 avg = numpy.average(coherence, axis=0)
765 770 z = avg.reshape((1,-1))
766 771
767 772 counter = 0
768 773
769 774 title = "Coherence %d%d: %s" %(pair[0], pair[1], thisDatetime.strftime("%d-%b-%Y %H:%M:%S"))
770 775 axes = self.axesList[i*self.__nsubplots*2]
771 776 axes.pcolor(x, y, z,
772 777 xmin=tmin, xmax=tmax, ymin=ymin, ymax=ymax, zmin=0, zmax=1,
773 778 xlabel=xlabel, ylabel=ylabel, title=title, rti=True, XAxisAsTime=True,
774 779 ticksize=9, cblabel='', cbsize="1%")
775 780
776 781 if self.__showprofile:
777 782 counter += 1
778 783 axes = self.axesList[i*self.__nsubplots*2 + counter]
779 784 axes.pline(avg, y,
780 785 xmin=0, xmax=1, ymin=ymin, ymax=ymax,
781 786 xlabel='', ylabel='', title='', ticksize=7,
782 787 ytick_visible=False, nxticks=5,
783 788 grid='x')
784 789
785 790 counter += 1
786 791 phase = numpy.arctan(-1*coherenceComplex.imag/coherenceComplex.real)*180/numpy.pi
787 792 avg = numpy.average(phase, axis=0)
788 793 z = avg.reshape((1,-1))
789 794
790 795 title = "Phase %d%d: %s" %(pair[0], pair[1], thisDatetime.strftime("%d-%b-%Y %H:%M:%S"))
791 796 axes = self.axesList[i*self.__nsubplots*2 + counter]
792 797 axes.pcolor(x, y, z,
793 798 xmin=tmin, xmax=tmax, ymin=ymin, ymax=ymax, zmin=-180, zmax=180,
794 799 xlabel=xlabel, ylabel=ylabel, title=title, rti=True, XAxisAsTime=True,
795 800 ticksize=9, cblabel='', colormap='RdBu', cbsize="1%")
796 801
797 802 if self.__showprofile:
798 803 counter += 1
799 804 axes = self.axesList[i*self.__nsubplots*2 + counter]
800 805 axes.pline(avg, y,
801 806 xmin=-180, xmax=180, ymin=ymin, ymax=ymax,
802 807 xlabel='', ylabel='', title='', ticksize=7,
803 808 ytick_visible=False, nxticks=4,
804 809 grid='x')
805 810
806 811 self.draw()
807 812
808 813 if save:
809 814 date = thisDatetime.strftime("%Y%m%d")
810 815 if figfile == None:
811 816 figfile = self.getFilename(name = date)
812 817
813 818 self.saveFigure(figpath, figfile)
814 819
815 820 if x[1] + (x[1]-x[0]) >= self.axesList[0].xmax:
816 821 self.__isConfig = False
817 822
818 823 No newline at end of file
@@ -1,1145 +1,1074
1 1 '''
2 2
3 3 $Author: dsuarez $
4 4 $Id: Processor.py 1 2012-11-12 18:56:07Z dsuarez $
5 5 '''
6 6 import os
7 7 import numpy
8 8 import datetime
9 9 import time
10 10
11 11 from jrodata import *
12 12 from jrodataIO import *
13 13 from jroplot import *
14 14
15 15 class ProcessingUnit:
16 16
17 17 """
18 18 Esta es la clase base para el procesamiento de datos.
19 19
20 20 Contiene el metodo "call" para llamar operaciones. Las operaciones pueden ser:
21 21 - Metodos internos (callMethod)
22 22 - Objetos del tipo Operation (callObject). Antes de ser llamados, estos objetos
23 23 tienen que ser agreagados con el metodo "add".
24 24
25 25 """
26 26 # objeto de datos de entrada (Voltage, Spectra o Correlation)
27 27 dataIn = None
28 28
29 29 # objeto de datos de entrada (Voltage, Spectra o Correlation)
30 30 dataOut = None
31 31
32 32
33 33 objectDict = None
34 34
35 35 def __init__(self):
36 36
37 37 self.objectDict = {}
38 38
39 39 def init(self):
40 40
41 41 raise ValueError, "Not implemented"
42 42
43 43 def addOperation(self, object, objId):
44 44
45 45 """
46 46 Agrega el objeto "object" a la lista de objetos "self.objectList" y retorna el
47 47 identificador asociado a este objeto.
48 48
49 49 Input:
50 50
51 51 object : objeto de la clase "Operation"
52 52
53 53 Return:
54 54
55 55 objId : identificador del objeto, necesario para ejecutar la operacion
56 56 """
57 57
58 58 self.objectDict[objId] = object
59 59
60 60 return objId
61 61
62 62 def operation(self, **kwargs):
63 63
64 64 """
65 Operacion directa sobre la data (dataout.data). Es necesario actualizar los valores de los
65 Operacion directa sobre la data (dataOut.data). Es necesario actualizar los valores de los
66 66 atributos del objeto dataOut
67 67
68 68 Input:
69 69
70 70 **kwargs : Diccionario de argumentos de la funcion a ejecutar
71 71 """
72 72
73 73 raise ValueError, "ImplementedError"
74 74
75 75 def callMethod(self, name, **kwargs):
76 76
77 77 """
78 78 Ejecuta el metodo con el nombre "name" y con argumentos **kwargs de la propia clase.
79 79
80 80 Input:
81 81 name : nombre del metodo a ejecutar
82 82
83 83 **kwargs : diccionario con los nombres y valores de la funcion a ejecutar.
84 84
85 85 """
86 86 if name != 'run':
87 87
88 88 if name == 'init' and self.dataIn.isEmpty():
89 89 self.dataOut.flagNoData = True
90 90 return False
91 91
92 92 if name != 'init' and self.dataOut.isEmpty():
93 93 return False
94 94
95 95 methodToCall = getattr(self, name)
96 96
97 97 methodToCall(**kwargs)
98 98
99 99 if name != 'run':
100 100 return True
101 101
102 102 if self.dataOut.isEmpty():
103 103 return False
104 104
105 105 return True
106 106
107 107 def callObject(self, objId, **kwargs):
108 108
109 109 """
110 110 Ejecuta la operacion asociada al identificador del objeto "objId"
111 111
112 112 Input:
113 113
114 114 objId : identificador del objeto a ejecutar
115 115
116 116 **kwargs : diccionario con los nombres y valores de la funcion a ejecutar.
117 117
118 118 Return:
119 119
120 120 None
121 121 """
122 122
123 123 if self.dataOut.isEmpty():
124 124 return False
125 125
126 126 object = self.objectDict[objId]
127 127
128 128 object.run(self.dataOut, **kwargs)
129 129
130 130 return True
131 131
132 132 def call(self, operationConf, **kwargs):
133 133
134 134 """
135 135 Return True si ejecuta la operacion "operationConf.name" con los
136 136 argumentos "**kwargs". False si la operacion no se ha ejecutado.
137 137 La operacion puede ser de dos tipos:
138 138
139 139 1. Un metodo propio de esta clase:
140 140
141 141 operation.type = "self"
142 142
143 143 2. El metodo "run" de un objeto del tipo Operation o de un derivado de ella:
144 144 operation.type = "other".
145 145
146 146 Este objeto de tipo Operation debe de haber sido agregado antes con el metodo:
147 147 "addOperation" e identificado con el operation.id
148 148
149 149
150 150 con el id de la operacion.
151 151
152 152 Input:
153 153
154 154 Operation : Objeto del tipo operacion con los atributos: name, type y id.
155 155
156 156 """
157 157
158 158 if operationConf.type == 'self':
159 159 sts = self.callMethod(operationConf.name, **kwargs)
160 160
161 161 if operationConf.type == 'other':
162 162 sts = self.callObject(operationConf.id, **kwargs)
163 163
164 164 return sts
165 165
166 166 def setInput(self, dataIn):
167 167
168 168 self.dataIn = dataIn
169 169
170 170 def getOutput(self):
171 171
172 172 return self.dataOut
173 173
174 174 class Operation():
175 175
176 176 """
177 177 Clase base para definir las operaciones adicionales que se pueden agregar a la clase ProcessingUnit
178 178 y necesiten acumular informacion previa de los datos a procesar. De preferencia usar un buffer de
179 179 acumulacion dentro de esta clase
180 180
181 181 Ejemplo: Integraciones coherentes, necesita la informacion previa de los n perfiles anteriores (bufffer)
182 182
183 183 """
184 184
185 185 __buffer = None
186 186 __isConfig = False
187 187
188 188 def __init__(self):
189 189
190 190 pass
191 191
192 192 def run(self, dataIn, **kwargs):
193 193
194 194 """
195 195 Realiza las operaciones necesarias sobre la dataIn.data y actualiza los atributos del objeto dataIn.
196 196
197 197 Input:
198 198
199 199 dataIn : objeto del tipo JROData
200 200
201 201 Return:
202 202
203 203 None
204 204
205 205 Affected:
206 206 __buffer : buffer de recepcion de datos.
207 207
208 208 """
209 209
210 210 raise ValueError, "ImplementedError"
211 211
212 212 class VoltageProc(ProcessingUnit):
213 213
214 214
215 215 def __init__(self):
216 216
217 217 self.objectDict = {}
218 218 self.dataOut = Voltage()
219 219
220 220 def init(self):
221 221
222 222 self.dataOut.copy(self.dataIn)
223 223 # No necesita copiar en cada init() los atributos de dataIn
224 224 # la copia deberia hacerse por cada nuevo bloque de datos
225 225
226 226 def selectChannels(self, channelList):
227 227
228 228 channelIndexList = []
229 229
230 230 for channel in channelList:
231 231 index = self.dataOut.channelList.index(channel)
232 232 channelIndexList.append(index)
233 233
234 234 self.selectChannelsByIndex(channelIndexList)
235 235
236 236 def selectChannelsByIndex(self, channelIndexList):
237 237 """
238 238 Selecciona un bloque de datos en base a canales segun el channelIndexList
239 239
240 240 Input:
241 241 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
242 242
243 243 Affected:
244 244 self.dataOut.data
245 245 self.dataOut.channelIndexList
246 246 self.dataOut.nChannels
247 247 self.dataOut.m_ProcessingHeader.totalSpectra
248 248 self.dataOut.systemHeaderObj.numChannels
249 249 self.dataOut.m_ProcessingHeader.blockSize
250 250
251 251 Return:
252 252 None
253 253 """
254 254
255 255 for channelIndex in channelIndexList:
256 256 if channelIndex not in self.dataOut.channelIndexList:
257 257 print channelIndexList
258 258 raise ValueError, "The value %d in channelIndexList is not valid" %channelIndex
259 259
260 260 nChannels = len(channelIndexList)
261 261
262 262 data = self.dataOut.data[channelIndexList,:]
263 263
264 264 self.dataOut.data = data
265 265 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
266 266 # self.dataOut.nChannels = nChannels
267 267
268 268 return 1
269 269
270 270 def selectHeights(self, minHei, maxHei):
271 271 """
272 272 Selecciona un bloque de datos en base a un grupo de valores de alturas segun el rango
273 273 minHei <= height <= maxHei
274 274
275 275 Input:
276 276 minHei : valor minimo de altura a considerar
277 277 maxHei : valor maximo de altura a considerar
278 278
279 279 Affected:
280 280 Indirectamente son cambiados varios valores a travez del metodo selectHeightsByIndex
281 281
282 282 Return:
283 283 1 si el metodo se ejecuto con exito caso contrario devuelve 0
284 284 """
285 285 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
286 286 raise ValueError, "some value in (%d,%d) is not valid" % (minHei, maxHei)
287 287
288 288 if (maxHei > self.dataOut.heightList[-1]):
289 289 maxHei = self.dataOut.heightList[-1]
290 290 # raise ValueError, "some value in (%d,%d) is not valid" % (minHei, maxHei)
291 291
292 292 minIndex = 0
293 293 maxIndex = 0
294 294 data = self.dataOut.heightList
295 295
296 296 for i,val in enumerate(data):
297 297 if val < minHei:
298 298 continue
299 299 else:
300 300 minIndex = i;
301 301 break
302 302
303 303 for i,val in enumerate(data):
304 304 if val <= maxHei:
305 305 maxIndex = i;
306 306 else:
307 307 break
308 308
309 309 self.selectHeightsByIndex(minIndex, maxIndex)
310 310
311 311 return 1
312 312
313 313
314 314 def selectHeightsByIndex(self, minIndex, maxIndex):
315 315 """
316 316 Selecciona un bloque de datos en base a un grupo indices de alturas segun el rango
317 317 minIndex <= index <= maxIndex
318 318
319 319 Input:
320 320 minIndex : valor de indice minimo de altura a considerar
321 321 maxIndex : valor de indice maximo de altura a considerar
322 322
323 323 Affected:
324 324 self.dataOut.data
325 325 self.dataOut.heightList
326 326
327 327 Return:
328 328 1 si el metodo se ejecuto con exito caso contrario devuelve 0
329 329 """
330 330
331 331 if (minIndex < 0) or (minIndex > maxIndex):
332 332 raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
333 333
334 334 if (maxIndex >= self.dataOut.nHeights):
335 335 maxIndex = self.dataOut.nHeights-1
336 336 # raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
337 337
338 338 nHeights = maxIndex - minIndex + 1
339 339
340 340 #voltage
341 341 data = self.dataOut.data[:,minIndex:maxIndex+1]
342 342
343 343 firstHeight = self.dataOut.heightList[minIndex]
344 344
345 345 self.dataOut.data = data
346 346 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex+1]
347 347
348 348 return 1
349 349
350
351 def filterByHeights(self, window):
352 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
353
354 if window == None:
355 window = self.dataOut.radarControllerHeaderObj.txA / deltaHeight
356
357 newdelta = deltaHeight * window
358 r = self.dataOut.data.shape[1] % window
359 buffer = self.dataOut.data[:,0:self.dataOut.data.shape[1]-r]
360 buffer = buffer.reshape(self.dataOut.data.shape[0],self.dataOut.data.shape[1]/window,window)
361 buffer = numpy.sum(buffer,2)
362 self.dataOut.data = buffer
363 self.dataOut.heightList = numpy.arange(self.dataOut.heightList[0],newdelta*self.dataOut.nHeights/window,newdelta)
364
365 350
366 351 class CohInt(Operation):
367 352
353 __isConfig = False
354
368 355 __profIndex = 0
369 356 __withOverapping = False
370 357
371 358 __byTime = False
372 359 __initime = None
373 360 __lastdatatime = None
374 361 __integrationtime = None
375 362
376 363 __buffer = None
377 364
378 365 __dataReady = False
379 366
380 367 n = None
381 368
382 369
383 370 def __init__(self):
384 371
385 372 self.__isConfig = False
386 373
387 374 def setup(self, n=None, timeInterval=None, overlapping=False):
388 375 """
389 376 Set the parameters of the integration class.
390 377
391 378 Inputs:
392 379
393 380 n : Number of coherent integrations
394 381 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
395 382 overlapping :
396 383
397 384 """
398 385
399 386 self.__initime = None
400 387 self.__lastdatatime = 0
401 388 self.__buffer = None
402 389 self.__dataReady = False
403 390
404 391
405 392 if n == None and timeInterval == None:
406 393 raise ValueError, "n or timeInterval should be specified ..."
407 394
408 395 if n != None:
409 396 self.n = n
410 397 self.__byTime = False
411 398 else:
412 399 self.__integrationtime = timeInterval * 60. #if (type(timeInterval)!=integer) -> change this line
413 400 self.n = 9999
414 401 self.__byTime = True
415 402
416 403 if overlapping:
417 404 self.__withOverapping = True
418 405 self.__buffer = None
419 406 else:
420 407 self.__withOverapping = False
421 408 self.__buffer = 0
422 409
423 410 self.__profIndex = 0
424 411
425 412 def putData(self, data):
426 413
427 414 """
428 415 Add a profile to the __buffer and increase in one the __profileIndex
429 416
430 417 """
431 418
432 419 if not self.__withOverapping:
433 420 self.__buffer += data.copy()
434 421 self.__profIndex += 1
435 422 return
436 423
437 424 #Overlapping data
438 425 nChannels, nHeis = data.shape
439 426 data = numpy.reshape(data, (1, nChannels, nHeis))
440 427
441 428 #If the buffer is empty then it takes the data value
442 429 if self.__buffer == None:
443 430 self.__buffer = data
444 431 self.__profIndex += 1
445 432 return
446 433
447 434 #If the buffer length is lower than n then stakcing the data value
448 435 if self.__profIndex < self.n:
449 436 self.__buffer = numpy.vstack((self.__buffer, data))
450 437 self.__profIndex += 1
451 438 return
452 439
453 440 #If the buffer length is equal to n then replacing the last buffer value with the data value
454 441 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
455 442 self.__buffer[self.n-1] = data
456 443 self.__profIndex = self.n
457 444 return
458 445
459 446
460 447 def pushData(self):
461 448 """
462 449 Return the sum of the last profiles and the profiles used in the sum.
463 450
464 451 Affected:
465 452
466 453 self.__profileIndex
467 454
468 455 """
469 456
470 457 if not self.__withOverapping:
471 458 data = self.__buffer
472 459 n = self.__profIndex
473 460
474 461 self.__buffer = 0
475 462 self.__profIndex = 0
476 463
477 464 return data, n
478 465
479 466 #Integration with Overlapping
480 467 data = numpy.sum(self.__buffer, axis=0)
481 468 n = self.__profIndex
482 469
483 470 return data, n
484 471
485 472 def byProfiles(self, data):
486 473
487 474 self.__dataReady = False
488 475 avgdata = None
489 476 n = None
490 477
491 478 self.putData(data)
492 479
493 480 if self.__profIndex == self.n:
494 481
495 482 avgdata, n = self.pushData()
496 483 self.__dataReady = True
497 484
498 485 return avgdata
499 486
500 487 def byTime(self, data, datatime):
501 488
502 489 self.__dataReady = False
503 490 avgdata = None
504 491 n = None
505 492
506 493 self.putData(data)
507 494
508 495 if (datatime - self.__initime) >= self.__integrationtime:
509 496 avgdata, n = self.pushData()
510 497 self.n = n
511 498 self.__dataReady = True
512 499
513 500 return avgdata
514 501
515 502 def integrate(self, data, datatime=None):
516 503
517 504 if self.__initime == None:
518 505 self.__initime = datatime
519 506
520 507 if self.__byTime:
521 508 avgdata = self.byTime(data, datatime)
522 509 else:
523 510 avgdata = self.byProfiles(data)
524 511
525 512
526 513 self.__lastdatatime = datatime
527 514
528 515 if avgdata == None:
529 516 return None, None
530 517
531 518 avgdatatime = self.__initime
532 519
533 520 deltatime = datatime -self.__lastdatatime
534 521
535 522 if not self.__withOverapping:
536 523 self.__initime = datatime
537 524 else:
538 525 self.__initime += deltatime
539 526
540 527 return avgdata, avgdatatime
541 528
542 def run(self, dataOut, n=None, timeInterval=None, overlapping=False):
529 def run(self, dataOut, **kwargs):
543 530
544 531 if not self.__isConfig:
545 self.setup(n, timeInterval, overlapping)
532 self.setup(**kwargs)
546 533 self.__isConfig = True
547 534
548 535 avgdata, avgdatatime = self.integrate(dataOut.data, dataOut.utctime)
549 536
550 537 # dataOut.timeInterval *= n
551 538 dataOut.flagNoData = True
552 539
553 540 if self.__dataReady:
554 541 dataOut.data = avgdata
555 542 dataOut.nCohInt *= self.n
556 543 dataOut.utctime = avgdatatime
557 544 dataOut.timeInterval = dataOut.ippSeconds * dataOut.nCohInt
558 545 dataOut.flagNoData = False
546
547 class Decoder(Operation):
548
549 __isConfig = False
550 __profIndex = 0
551
552 code = None
553
554 nCode = None
555 nBaud = None
556
557 def __init__(self):
558
559 self.__isConfig = False
560
561 def setup(self, code):
562
563 self.__profIndex = 0
564
565 self.code = code
566
567 self.nCode = len(code)
568 self.nBaud = len(code[0])
569
570 def convolutionInFreq(self, data):
571
572 ndata = data.shape[1]
573 newcode = numpy.zeros(ndata)
574 newcode[0:self.nBaud] = self.code[self.__profIndex]
575
576 fft_data = numpy.fft.fft(data, axis=1)
577 fft_code = numpy.conj(numpy.fft.fft(newcode))
578 fft_code = fft_code.reshape(1,len(fft_code))
579
580 # conv = fft_data.copy()
581 # conv.fill(0)
582
583 conv = fft_data*fft_code
584
585 data = numpy.fft.ifft(conv,axis=1)
586
587 datadec = data[:,:-self.nBaud+1]
588 ndatadec = ndata - self.nBaud + 1
589
590 if self.__profIndex == self.nCode:
591 self.__profIndex = 0
592
593 self.__profIndex += 1
594
595 return ndatadec, datadec
596
597
598 def convolutionInTime(self, data):
599
600 nchannel = data.shape[1]
601 newcode = self.code[self.__profIndex]
602
603 datadec = data.copy()
604
605 for i in range(nchannel):
606 datadec[i,:] = numpy.correlate(data[i,:], newcode)
607
608 ndatadec = ndata - self.nBaud + 1
609
610 if self.__profIndex == self.nCode:
611 self.__profIndex = 0
612
613 self.__profIndex += 1
614
615 return ndatadec, datadec
616
617 def run(self, dataOut, code=None, mode = 0):
618
619 if not self.__isConfig:
559 620
621 if code == None:
622 code = dataOut.code
623
624 self.setup(code)
625 self.__isConfig = True
626
627 if mode == 0:
628 ndatadec, datadec = self.convolutionInFreq(data)
629
630 if mode == 1:
631 ndatadec, datadec = self.convolutionInTime(data)
632
633 dataOut.data = datadec
634
635 dataOut.heightList = dataOut.heightList[0:ndatadec+1]
636
637 dataOut.flagDecodeData = True #asumo q la data no esta decodificada
638
639 # dataOut.flagDeflipData = True #asumo q la data no esta sin flip
640
641
560 642
561 643 class SpectraProc(ProcessingUnit):
562 644
563 645 def __init__(self):
564 646
565 647 self.objectDict = {}
566 648 self.buffer = None
567 649 self.firstdatatime = None
568 650 self.profIndex = 0
569 651 self.dataOut = Spectra()
570 652
571 653 def __updateObjFromInput(self):
572 654
573 655 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
574 656 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
575 657 self.dataOut.channelList = self.dataIn.channelList
576 658 self.dataOut.heightList = self.dataIn.heightList
577 659 self.dataOut.dtype = self.dataIn.dtype
578 660 # self.dataOut.nHeights = self.dataIn.nHeights
579 661 # self.dataOut.nChannels = self.dataIn.nChannels
580 662 self.dataOut.nBaud = self.dataIn.nBaud
581 663 self.dataOut.nCode = self.dataIn.nCode
582 664 self.dataOut.code = self.dataIn.code
583 665 self.dataOut.nProfiles = self.dataOut.nFFTPoints
584 666 # self.dataOut.channelIndexList = self.dataIn.channelIndexList
585 667 self.dataOut.flagTimeBlock = self.dataIn.flagTimeBlock
586 668 self.dataOut.utctime = self.firstdatatime
587 669 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
588 670 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
589 671 self.dataOut.flagShiftFFT = self.dataIn.flagShiftFFT
590 672 self.dataOut.nCohInt = self.dataIn.nCohInt
591 673 self.dataOut.nIncohInt = 1
592 674 self.dataOut.ippSeconds = self.dataIn.ippSeconds
593 675
594 676 self.dataOut.timeInterval = self.dataIn.timeInterval*self.dataOut.nFFTPoints*self.dataOut.nIncohInt
595 677
596 678 def __getFft(self):
597 679 """
598 680 Convierte valores de Voltaje a Spectra
599 681
600 682 Affected:
601 683 self.dataOut.data_spc
602 684 self.dataOut.data_cspc
603 685 self.dataOut.data_dc
604 686 self.dataOut.heightList
605 687 self.profIndex
606 688 self.buffer
607 689 self.dataOut.flagNoData
608 690 """
609 fft_volt = numpy.fft.fft(self.buffer,axis=1)/numpy.sqrt(self.dataOut.nFFTPoints)
691 fft_volt = numpy.fft.fft(self.buffer,axis=1)
610 692 dc = fft_volt[:,0,:]
611 693
612 694 #calculo de self-spectra
613 695 fft_volt = numpy.fft.fftshift(fft_volt,axes=(1,))
614 696 spc = fft_volt * numpy.conjugate(fft_volt)
615 697 spc = spc.real
616 698
617 699 blocksize = 0
618 700 blocksize += dc.size
619 701 blocksize += spc.size
620 702
621 703 cspc = None
622 704 pairIndex = 0
623 705 if self.dataOut.pairsList != None:
624 706 #calculo de cross-spectra
625 707 cspc = numpy.zeros((self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex')
626 708 for pair in self.dataOut.pairsList:
627 709 cspc[pairIndex,:,:] = fft_volt[pair[0],:,:] * numpy.conjugate(fft_volt[pair[1],:,:])
628 710 pairIndex += 1
629 711 blocksize += cspc.size
630 712
631 713 self.dataOut.data_spc = spc
632 714 self.dataOut.data_cspc = cspc
633 715 self.dataOut.data_dc = dc
634 716 self.dataOut.blockSize = blocksize
635 717
636 718 def init(self, nFFTPoints=None, pairsList=None):
637 719
720 self.dataOut.flagNoData = True
721
638 722 if self.dataIn.type == "Spectra":
639 723 self.dataOut.copy(self.dataIn)
640 724 return
641 725
642 726 if self.dataIn.type == "Voltage":
643 727
644 728 if nFFTPoints == None:
645 729 raise ValueError, "This SpectraProc.init() need nFFTPoints input variable"
646 730
647 731 if pairsList == None:
648 732 nPairs = 0
649 733 else:
650 734 nPairs = len(pairsList)
651 735
652 736 self.dataOut.nFFTPoints = nFFTPoints
653 737 self.dataOut.pairsList = pairsList
654 738 self.dataOut.nPairs = nPairs
655 739
656 740 if self.buffer == None:
657 741 self.buffer = numpy.zeros((self.dataIn.nChannels,
658 742 self.dataOut.nFFTPoints,
659 743 self.dataIn.nHeights),
660 744 dtype='complex')
661 745
662 746
663 self.buffer[:,self.profIndex,:] = self.dataIn.data
747 self.buffer[:,self.profIndex,:] = self.dataIn.data.copy()
664 748 self.profIndex += 1
665 749
666 750 if self.firstdatatime == None:
667 751 self.firstdatatime = self.dataIn.utctime
668 752
669 753 if self.profIndex == self.dataOut.nFFTPoints:
670 754 self.__updateObjFromInput()
671 755 self.__getFft()
672 756
673 757 self.dataOut.flagNoData = False
674 758
675 759 self.buffer = None
676 760 self.firstdatatime = None
677 761 self.profIndex = 0
678 762
679 763 return
680 764
681 765 raise ValuError, "The type object %s is not valid"%(self.dataIn.type)
682 766
683 767 def selectChannels(self, channelList):
684 768
685 769 channelIndexList = []
686 770
687 771 for channel in channelList:
688 772 index = self.dataOut.channelList.index(channel)
689 773 channelIndexList.append(index)
690 774
691 775 self.selectChannelsByIndex(channelIndexList)
692 776
693 777 def selectChannelsByIndex(self, channelIndexList):
694 778 """
695 779 Selecciona un bloque de datos en base a canales segun el channelIndexList
696 780
697 781 Input:
698 782 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
699 783
700 784 Affected:
701 785 self.dataOut.data_spc
702 786 self.dataOut.channelIndexList
703 787 self.dataOut.nChannels
704 788
705 789 Return:
706 790 None
707 791 """
708 792
709 793 for channelIndex in channelIndexList:
710 794 if channelIndex not in self.dataOut.channelIndexList:
711 795 print channelIndexList
712 796 raise ValueError, "The value %d in channelIndexList is not valid" %channelIndex
713 797
714 798 nChannels = len(channelIndexList)
715 799
716 800 data_spc = self.dataOut.data_spc[channelIndexList,:]
717 801
718 802 self.dataOut.data_spc = data_spc
719 803 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
720 804 # self.dataOut.nChannels = nChannels
721 805
722 806 return 1
723 807
724 808
725 809 class IncohInt(Operation):
726 810
727 811
728 812 __profIndex = 0
729 813 __withOverapping = False
730 814
731 815 __byTime = False
732 816 __initime = None
733 817 __lastdatatime = None
734 818 __integrationtime = None
735 819
736 820 __buffer_spc = None
737 821 __buffer_cspc = None
738 822 __buffer_dc = None
739 823
740 824 __dataReady = False
741 825
742 826 n = None
743 827
744 828
745 829 def __init__(self):
746 830
747 831 self.__isConfig = False
748 832
749 833 def setup(self, n=None, timeInterval=None, overlapping=False):
750 834 """
751 835 Set the parameters of the integration class.
752 836
753 837 Inputs:
754 838
755 839 n : Number of coherent integrations
756 840 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
757 841 overlapping :
758 842
759 843 """
760 844
761 845 self.__initime = None
762 846 self.__lastdatatime = 0
763 847 self.__buffer_spc = None
764 848 self.__buffer_cspc = None
765 849 self.__buffer_dc = None
766 850 self.__dataReady = False
767 851
768 852
769 853 if n == None and timeInterval == None:
770 854 raise ValueError, "n or timeInterval should be specified ..."
771 855
772 856 if n != None:
773 857 self.n = n
774 858 self.__byTime = False
775 859 else:
776 860 self.__integrationtime = timeInterval * 60. #if (type(timeInterval)!=integer) -> change this line
777 861 self.n = 9999
778 862 self.__byTime = True
779 863
780 864 if overlapping:
781 865 self.__withOverapping = True
782 866 else:
783 867 self.__withOverapping = False
784 868 self.__buffer_spc = 0
785 869 self.__buffer_cspc = 0
786 870 self.__buffer_dc = 0
787 871
788 872 self.__profIndex = 0
789 873
790 874 def putData(self, data_spc, data_cspc, data_dc):
791 875
792 876 """
793 877 Add a profile to the __buffer_spc and increase in one the __profileIndex
794 878
795 879 """
796 880
797 881 if not self.__withOverapping:
798 882 self.__buffer_spc += data_spc
799 883
800 884 if data_cspc == None:
801 885 self.__buffer_cspc = None
802 886 else:
803 887 self.__buffer_cspc += data_cspc
804 888
805 889 if data_dc == None:
806 890 self.__buffer_dc = None
807 891 else:
808 892 self.__buffer_dc += data_dc
809 893
810 894 self.__profIndex += 1
811 895 return
812 896
813 897 #Overlapping data
814 898 nChannels, nFFTPoints, nHeis = data_spc.shape
815 899 data_spc = numpy.reshape(data_spc, (1, nChannels, nFFTPoints, nHeis))
816 900 if data_cspc != None:
817 901 data_cspc = numpy.reshape(data_cspc, (1, -1, nFFTPoints, nHeis))
818 902 if data_dc != None:
819 903 data_dc = numpy.reshape(data_dc, (1, -1, nHeis))
820 904
821 905 #If the buffer is empty then it takes the data value
822 906 if self.__buffer_spc == None:
823 907 self.__buffer_spc = data_spc
824 908
825 909 if data_cspc == None:
826 910 self.__buffer_cspc = None
827 911 else:
828 912 self.__buffer_cspc += data_cspc
829 913
830 914 if data_dc == None:
831 915 self.__buffer_dc = None
832 916 else:
833 917 self.__buffer_dc += data_dc
834 918
835 919 self.__profIndex += 1
836 920 return
837 921
838 922 #If the buffer length is lower than n then stakcing the data value
839 923 if self.__profIndex < self.n:
840 924 self.__buffer_spc = numpy.vstack((self.__buffer_spc, data_spc))
841 925
842 926 if data_cspc != None:
843 927 self.__buffer_cspc = numpy.vstack((self.__buffer_cspc, data_cspc))
844 928
845 929 if data_dc != None:
846 930 self.__buffer_dc = numpy.vstack((self.__buffer_dc, data_dc))
847 931
848 932 self.__profIndex += 1
849 933 return
850 934
851 935 #If the buffer length is equal to n then replacing the last buffer value with the data value
852 936 self.__buffer_spc = numpy.roll(self.__buffer_spc, -1, axis=0)
853 937 self.__buffer_spc[self.n-1] = data_spc
854 938
855 939 if data_cspc != None:
856 940 self.__buffer_cspc = numpy.roll(self.__buffer_cspc, -1, axis=0)
857 941 self.__buffer_cspc[self.n-1] = data_cspc
858 942
859 943 if data_dc != None:
860 944 self.__buffer_dc = numpy.roll(self.__buffer_dc, -1, axis=0)
861 945 self.__buffer_dc[self.n-1] = data_dc
862 946
863 947 self.__profIndex = self.n
864 948 return
865 949
866 950
867 951 def pushData(self):
868 952 """
869 953 Return the sum of the last profiles and the profiles used in the sum.
870 954
871 955 Affected:
872 956
873 957 self.__profileIndex
874 958
875 959 """
876 960 data_spc = None
877 961 data_cspc = None
878 962 data_dc = None
879 963
880 964 if not self.__withOverapping:
881 965 data_spc = self.__buffer_spc
882 966 data_cspc = self.__buffer_cspc
883 967 data_dc = self.__buffer_dc
884 968
885 969 n = self.__profIndex
886 970
887 971 self.__buffer_spc = 0
888 972 self.__buffer_cspc = 0
889 973 self.__buffer_dc = 0
890 974 self.__profIndex = 0
891 975
892 976 return data_spc, data_cspc, data_dc, n
893 977
894 978 #Integration with Overlapping
895 979 data_spc = numpy.sum(self.__buffer_spc, axis=0)
896 980
897 981 if self.__buffer_cspc != None:
898 982 data_cspc = numpy.sum(self.__buffer_cspc, axis=0)
899 983
900 984 if self.__buffer_dc != None:
901 985 data_dc = numpy.sum(self.__buffer_dc, axis=0)
902 986
903 987 n = self.__profIndex
904 988
905 989 return data_spc, data_cspc, data_dc, n
906 990
907 991 def byProfiles(self, *args):
908 992
909 993 self.__dataReady = False
910 994 avgdata_spc = None
911 995 avgdata_cspc = None
912 996 avgdata_dc = None
913 997 n = None
914 998
915 999 self.putData(*args)
916 1000
917 1001 if self.__profIndex == self.n:
918 1002
919 1003 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
920 1004 self.__dataReady = True
921 1005
922 1006 return avgdata_spc, avgdata_cspc, avgdata_dc
923 1007
924 1008 def byTime(self, datatime, *args):
925 1009
926 1010 self.__dataReady = False
927 1011 avgdata_spc = None
928 1012 avgdata_cspc = None
929 1013 avgdata_dc = None
930 1014 n = None
931 1015
932 1016 self.putData(*args)
933 1017
934 1018 if (datatime - self.__initime) >= self.__integrationtime:
935 1019 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
936 1020 self.n = n
937 1021 self.__dataReady = True
938 1022
939 1023 return avgdata_spc, avgdata_cspc, avgdata_dc
940 1024
941 1025 def integrate(self, datatime, *args):
942 1026
943 1027 if self.__initime == None:
944 1028 self.__initime = datatime
945 1029
946 1030 if self.__byTime:
947 1031 avgdata_spc, avgdata_cspc, avgdata_dc = self.byTime(datatime, *args)
948 1032 else:
949 1033 avgdata_spc, avgdata_cspc, avgdata_dc = self.byProfiles(*args)
950 1034
951 1035 self.__lastdatatime = datatime
952 1036
953 1037 if avgdata_spc == None:
954 1038 return None, None, None, None
955 1039
956 1040 avgdatatime = self.__initime
957 1041
958 1042 deltatime = datatime -self.__lastdatatime
959 1043
960 1044 if not self.__withOverapping:
961 1045 self.__initime = datatime
962 1046 else:
963 1047 self.__initime += deltatime
964 1048
965 1049 return avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc
966 1050
967 1051 def run(self, dataOut, n=None, timeInterval=None, overlapping=False):
968 1052
969 1053 if not self.__isConfig:
970 1054 self.setup(n, timeInterval, overlapping)
971 1055 self.__isConfig = True
972 1056
973 1057 avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc = self.integrate(dataOut.utctime,
974 1058 dataOut.data_spc,
975 1059 dataOut.data_cspc,
976 1060 dataOut.data_dc)
977 1061
978 1062 # dataOut.timeInterval *= n
979 1063 dataOut.flagNoData = True
980 1064
981 1065 if self.__dataReady:
982 1066 dataOut.data_spc = avgdata_spc
983 1067 dataOut.data_cspc = avgdata_cspc
984 1068 dataOut.data_dc = avgdata_dc
985 1069
986 1070 dataOut.nIncohInt *= self.n
987 1071 dataOut.utctime = avgdatatime
988 1072 dataOut.timeInterval = dataOut.ippSeconds * dataOut.nCohInt * dataOut.nIncohInt * dataOut.nFFTPoints
989 1073 dataOut.flagNoData = False
990
991
992 class ProfileSelector(Operation):
993
994 profileIndex = None
995 # Tamanho total de los perfiles
996 nProfiles = None
997
998 def __init__(self):
999
1000 self.profileIndex = 0
1001
1002 def incIndex(self):
1003 self.profileIndex += 1
1004
1005 if self.profileIndex >= self.nProfiles:
1006 self.profileIndex = 0
1007
1008 def isProfileInRange(self, minIndex, maxIndex):
1009
1010 if self.profileIndex < minIndex:
1011 return False
1012
1013 if self.profileIndex > maxIndex:
1014 return False
1015
1016 return True
1017
1018 def isProfileInList(self, profileList):
1019
1020 if self.profileIndex not in profileList:
1021 return False
1022
1023 return True
1024
1025 def run(self, dataOut, profileList=None, profileRangeList=None):
1026
1027 self.nProfiles = dataOut.nProfiles
1028
1029 if profileList != None:
1030 if not(self.isProfileInList(profileList)):
1031 dataOut.flagNoData = True
1032 else:
1033 dataOut.flagNoData = False
1034 self.incIndex()
1035 return 1
1036
1037
1038 elif profileRangeList != None:
1039 minIndex = profileRangeList[0]
1040 maxIndex = profileRangeList[1]
1041 if not(self.isProfileInRange(minIndex, maxIndex)):
1042 dataOut.flagNoData = True
1043 else:
1044 dataOut.flagNoData = False
1045 self.incIndex()
1046 return 1
1047 else:
1048 raise ValueError, "ProfileSelector needs profileList or profileRangeList"
1049
1050 return 0
1051
1052 class Decoder:
1053
1054 data = None
1055 profCounter = None
1056 code = None
1057 ncode = None
1058 nbaud = None
1059 codeIndex = None
1060 flag = False
1061
1062 def __init__(self):
1063
1064 self.data = None
1065 self.ndata = None
1066 self.profCounter = 1
1067 self.codeIndex = 0
1068 self.flag = False
1069 self.code = None
1070 self.ncode = None
1071 self.nbaud = None
1072 self.__isConfig = False
1073
1074 def convolutionInFreq(self, data, ndata):
1075
1076 newcode = numpy.zeros(ndata)
1077 newcode[0:self.nbaud] = self.code[self.codeIndex]
1078
1079 self.codeIndex += 1
1080
1081 fft_data = numpy.fft.fft(data, axis=1)
1082 fft_code = numpy.conj(numpy.fft.fft(newcode))
1083 fft_code = fft_code.reshape(1,len(fft_code))
1084
1085 conv = fft_data.copy()
1086 conv.fill(0)
1087
1088 conv = fft_data*fft_code
1089
1090 data = numpy.fft.ifft(conv,axis=1)
1091 self.data = data[:,:-self.nbaud+1]
1092 self.flag = True
1093
1094 if self.profCounter == self.ncode:
1095 self.profCounter = 0
1096 self.codeIndex = 0
1097
1098 self.profCounter += 1
1099
1100 def convolutionInTime(self, data, ndata):
1101
1102 nchannel = data.shape[1]
1103 newcode = self.code[self.codeIndex]
1104 self.codeIndex += 1
1105 conv = data.copy()
1106 for i in range(nchannel):
1107 conv[i,:] = numpy.correlate(data[i,:], newcode)
1108
1109 self.data = conv
1110 self.flag = True
1111
1112 if self.profCounter == self.ncode:
1113 self.profCounter = 0
1114 self.codeIndex = 0
1115
1116 self.profCounter += 1
1117
1118 def run(self, dataOut, code=None, mode = 0):
1119
1120 if not(self.__isConfig):
1121 if code == None:
1122 code = dataOut.radarControllerHeaderObj.code
1123 # code = dataOut.code
1124
1125 ncode, nbaud = code.shape
1126 self.code = code
1127 self.ncode = ncode
1128 self.nbaud = nbaud
1129 self.__isConfig = True
1130
1131 ndata = dataOut.data.shape[1]
1132
1133 if mode == 0:
1134 self.convolutionInFreq(dataOut.data, ndata)
1135
1136 if mode == 1:
1137 self.convolutionInTime(dataOut.data, ndata)
1138
1139 self.ndata = ndata - self.nbaud + 1
1140
1141 dataOut.data = self.data
1142
1143 dataOut.heightList = dataOut.heightList[:self.ndata]
1144
1145 dataOut.flagNoData = False No newline at end of file
1074 No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now