##// END OF EJS Templates
Prueba de lectura de Voltajes y obtencion de Espectros.
Miguel Valdez -
r220:a4afebb9fb40
parent child
Show More
@@ -1,506 +1,506
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JROData.py 173 2012-11-20 15:06:21Z murco $
5 5 '''
6 6
7 7 import os, sys
8 8 import copy
9 9 import numpy
10 10
11 11 from jroheaderIO import SystemHeader, RadarControllerHeader
12 12
13 13 def hildebrand_sekhon(data, navg):
14 14 """
15 15 This method is for the objective determination of de noise level in Doppler spectra. This
16 16 implementation technique is based on the fact that the standard deviation of the spectral
17 17 densities is equal to the mean spectral density for white Gaussian noise
18 18
19 19 Inputs:
20 20 Data : heights
21 21 navg : numbers of averages
22 22
23 23 Return:
24 24 -1 : any error
25 25 anoise : noise's level
26 26 """
27 27
28 28 dataflat = data.copy().reshape(-1)
29 29 dataflat.sort()
30 30 npts = dataflat.size #numbers of points of the data
31 31
32 32 if npts < 32:
33 33 print "error in noise - requires at least 32 points"
34 34 return -1.0
35 35
36 36 dataflat2 = numpy.power(dataflat,2)
37 37
38 38 cs = numpy.cumsum(dataflat)
39 39 cs2 = numpy.cumsum(dataflat2)
40 40
41 41 # data sorted in ascending order
42 42 nmin = int((npts + 7.)/8)
43 43
44 44 for i in range(nmin, npts):
45 45 s = cs[i]
46 46 s2 = cs2[i]
47 47 p = s / float(i);
48 48 p2 = p**2;
49 49 q = s2 / float(i) - p2;
50 50 leftc = p2;
51 51 rightc = q * float(navg);
52 52 R2 = leftc/rightc
53 53
54 54 # Signal detect: R2 < 1 (R2 = leftc/rightc)
55 55 if R2 < 1:
56 56 npts_noise = i
57 57 break
58 58
59 59
60 60 anoise = numpy.average(dataflat[0:npts_noise])
61 61
62 62 return anoise;
63 63
64 64 def sorting_bruce(data, navg):
65 65
66 66 data = data.copy()
67 67
68 68 sortdata = numpy.sort(data)
69 69 lenOfData = len(data)
70 70 nums_min = lenOfData/10
71 71
72 72 if (lenOfData/10) > 0:
73 73 nums_min = lenOfData/10
74 74 else:
75 75 nums_min = 0
76 76
77 77 rtest = 1.0 + 1.0/navg
78 78
79 79 sum = 0.
80 80
81 81 sumq = 0.
82 82
83 83 j = 0
84 84
85 85 cont = 1
86 86
87 87 while((cont==1)and(j<lenOfData)):
88 88
89 89 sum += sortdata[j]
90 90
91 91 sumq += sortdata[j]**2
92 92
93 93 j += 1
94 94
95 95 if j > nums_min:
96 96 if ((sumq*j) <= (rtest*sum**2)):
97 97 lnoise = sum / j
98 98 else:
99 99 j = j - 1
100 100 sum = sum - sordata[j]
101 101 sumq = sumq - sordata[j]**2
102 102 cont = 0
103 103
104 104 if j == nums_min:
105 105 lnoise = sum /j
106 106
107 107 return lnoise
108 108
109 109 class JROData:
110 110
111 111 # m_BasicHeader = BasicHeader()
112 112 # m_ProcessingHeader = ProcessingHeader()
113 113
114 114 systemHeaderObj = SystemHeader()
115 115
116 116 radarControllerHeaderObj = RadarControllerHeader()
117 117
118 118 # data = None
119 119
120 120 type = None
121 121
122 122 dtype = None
123 123
124 124 # nChannels = None
125 125
126 126 # nHeights = None
127 127
128 128 nProfiles = None
129 129
130 130 heightList = None
131 131
132 132 channelList = None
133 133
134 134 flagNoData = True
135 135
136 136 flagTimeBlock = False
137 137
138 138 utctime = None
139 139
140 140 blocksize = None
141 141
142 142 nCode = None
143 143
144 144 nBaud = None
145 145
146 146 code = None
147 147
148 148 flagDecodeData = True #asumo q la data esta decodificada
149 149
150 150 flagDeflipData = True #asumo q la data esta sin flip
151 151
152 152 flagShiftFFT = False
153 153
154 154 ippSeconds = None
155 155
156 156 timeInterval = None
157 157
158 158 nCohInt = None
159 159
160 160 noise = None
161 161
162 162 #Speed of ligth
163 163 C = 3e8
164 164
165 165 frequency = 49.92e6
166 166
167 167 def __init__(self):
168 168
169 169 raise ValueError, "This class has not been implemented"
170 170
171 171 def copy(self, inputObj=None):
172 172
173 173 if inputObj == None:
174 174 return copy.deepcopy(self)
175 175
176 176 for key in inputObj.__dict__.keys():
177 177 self.__dict__[key] = inputObj.__dict__[key]
178 178
179 179 def deepcopy(self):
180 180
181 181 return copy.deepcopy(self)
182 182
183 183 def isEmpty(self):
184 184
185 185 return self.flagNoData
186 186
187 187 def getNoise(self):
188 188
189 189 raise ValueError, "Not implemented"
190 190
191 191 def getNChannels(self):
192 192
193 193 return len(self.channelList)
194 194
195 195 def getChannelIndexList(self):
196 196
197 197 return range(self.nChannels)
198 198
199 199 def getNHeights(self):
200 200
201 201 return len(self.heightList)
202 202
203 203 def getHeiRange(self, extrapoints=0):
204 204
205 205 heis = self.heightList
206 206 # deltah = self.heightList[1] - self.heightList[0]
207 207 #
208 208 # heis.append(self.heightList[-1])
209 209
210 210 return heis
211 211
212 212 def getDatatime(self):
213 213
214 214 datatime = []
215 215
216 216 datatime.append(self.utctime)
217 datatime.append(self.utctime + 2*self.timeInterval)
217 datatime.append(self.utctime + self.timeInterval)
218 218
219 219 datatime = numpy.array(datatime)
220 220
221 221 return datatime
222 222
223 223 def getFmax(self):
224 224
225 225 PRF = 1./(self.ippSeconds * self.nCohInt)
226 226
227 227 fmax = PRF/2.
228 228
229 229 return fmax
230 230
231 231 def getVmax(self):
232 232
233 233 _lambda = self.C/self.frequency
234 234
235 235 vmax = self.getFmax() * _lambda / 2.
236 236
237 237 return vmax
238 238
239 239 nChannels = property(getNChannels, "I'm the 'nChannel' property.")
240 240 channelIndexList = property(getChannelIndexList, "I'm the 'channelIndexList' property.")
241 241
242 242 nHeights = property(getNHeights, "I'm the 'nHeights' property.")
243 243
244 244 noise = property(getNoise, "I'm the 'nHeights' property.")
245 245
246 246 class Voltage(JROData):
247 247
248 248 #data es un numpy array de 2 dmensiones (canales, alturas)
249 249 data = None
250 250
251 251 def __init__(self):
252 252 '''
253 253 Constructor
254 254 '''
255 255
256 256 self.radarControllerHeaderObj = RadarControllerHeader()
257 257
258 258 self.systemHeaderObj = SystemHeader()
259 259
260 260 self.type = "Voltage"
261 261
262 262 self.data = None
263 263
264 264 self.dtype = None
265 265
266 266 # self.nChannels = 0
267 267
268 268 # self.nHeights = 0
269 269
270 270 self.nProfiles = None
271 271
272 272 self.heightList = None
273 273
274 274 self.channelList = None
275 275
276 276 # self.channelIndexList = None
277 277
278 278 self.flagNoData = True
279 279
280 280 self.flagTimeBlock = False
281 281
282 282 self.utctime = None
283 283
284 284 self.nCohInt = None
285 285
286 286 self.blocksize = None
287 287
288 288 def getNoisebyHildebrand(self):
289 289 """
290 290 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
291 291
292 292 Return:
293 293 noiselevel
294 294 """
295 295
296 296 for channel in range(self.nChannels):
297 297 daux = self.data_spc[channel,:,:]
298 298 self.noise[channel] = hildebrand_sekhon(daux, self.nCohInt)
299 299
300 300 return self.noise
301 301
302 302 def getNoise(self, type = 1):
303 303
304 304 self.noise = numpy.zeros(self.nChannels)
305 305
306 306 if type == 1:
307 307 noise = self.getNoisebyHildebrand()
308 308
309 309 return 10*numpy.log10(noise)
310 310
311 311 class Spectra(JROData):
312 312
313 313 #data es un numpy array de 2 dmensiones (canales, perfiles, alturas)
314 314 data_spc = None
315 315
316 316 #data es un numpy array de 2 dmensiones (canales, pares, alturas)
317 317 data_cspc = None
318 318
319 319 #data es un numpy array de 2 dmensiones (canales, alturas)
320 320 data_dc = None
321 321
322 322 nFFTPoints = None
323 323
324 324 nPairs = None
325 325
326 326 pairsList = None
327 327
328 328 nIncohInt = None
329 329
330 330 wavelength = None #Necesario para cacular el rango de velocidad desde la frecuencia
331 331
332 332 nCohInt = None #se requiere para determinar el valor de timeInterval
333 333
334 334 def __init__(self):
335 335 '''
336 336 Constructor
337 337 '''
338 338
339 339 self.radarControllerHeaderObj = RadarControllerHeader()
340 340
341 341 self.systemHeaderObj = SystemHeader()
342 342
343 343 self.type = "Spectra"
344 344
345 345 # self.data = None
346 346
347 347 self.dtype = None
348 348
349 349 # self.nChannels = 0
350 350
351 351 # self.nHeights = 0
352 352
353 353 self.nProfiles = None
354 354
355 355 self.heightList = None
356 356
357 357 self.channelList = None
358 358
359 359 # self.channelIndexList = None
360 360
361 361 self.flagNoData = True
362 362
363 363 self.flagTimeBlock = False
364 364
365 365 self.utctime = None
366 366
367 367 self.nCohInt = None
368 368
369 369 self.nIncohInt = None
370 370
371 371 self.blocksize = None
372 372
373 373 self.nFFTPoints = None
374 374
375 375 self.wavelength = None
376 376
377 377 def getNoisebyHildebrand(self):
378 378 """
379 379 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
380 380
381 381 Return:
382 382 noiselevel
383 383 """
384 384
385 385 for channel in range(self.nChannels):
386 386 daux = self.data_spc[channel,:,:]
387 387 self.noise[channel] = hildebrand_sekhon(daux, self.nIncohInt)
388 388
389 389 return self.noise
390 390
391 391 def getNoisebyWindow(self, heiIndexMin=0, heiIndexMax=-1, freqIndexMin=0, freqIndexMax=-1):
392 392 """
393 393 Determina el ruido del canal utilizando la ventana indicada con las coordenadas:
394 394 (heiIndexMIn, freqIndexMin) hasta (heiIndexMax, freqIndexMAx)
395 395
396 396 Inputs:
397 397 heiIndexMin: Limite inferior del eje de alturas
398 398 heiIndexMax: Limite superior del eje de alturas
399 399 freqIndexMin: Limite inferior del eje de frecuencia
400 400 freqIndexMax: Limite supoerior del eje de frecuencia
401 401 """
402 402
403 403 data = self.data_spc[:, heiIndexMin:heiIndexMax, freqIndexMin:freqIndexMax]
404 404
405 405 for channel in range(self.nChannels):
406 406 daux = data[channel,:,:]
407 407 self.noise[channel] = numpy.average(daux)
408 408
409 409 return self.noise
410 410
411 411 def getNoisebySort(self):
412 412
413 413 for channel in range(self.nChannels):
414 414 daux = self.data_spc[channel,:,:]
415 415 self.noise[channel] = sorting_bruce(daux, self.nIncohInt)
416 416
417 417 return self.noise
418 418
419 419 def getNoise(self, type = 1):
420 420
421 421 self.noise = numpy.zeros(self.nChannels)
422 422
423 423 if type == 1:
424 424 noise = self.getNoisebyHildebrand()
425 425
426 426 if type == 2:
427 427 noise = self.getNoisebySort()
428 428
429 429 if type == 3:
430 430 noise = self.getNoisebyWindow()
431 431
432 432 return 10*numpy.log10(noise)
433 433
434 434
435 435 def getFreqRange(self, extrapoints=0):
436 436
437 437 delfreq = 2 * self.getFmax() / self.nFFTPoints
438 438 freqrange = deltafreqs*(numpy.arange(self.nFFTPoints+extrapoints)-self.nFFTPoints/2.) - deltafreq/2
439 439
440 440 return freqrange
441 441
442 442 def getVelRange(self, extrapoints=0):
443 443
444 444 deltav = 2 * self.getVmax() / self.nFFTPoints
445 445 velrange = deltav*(numpy.arange(self.nFFTPoints+extrapoints)-self.nFFTPoints/2.) - deltav/2
446 446
447 447 return velrange
448 448
449 449 def getNPairs(self):
450 450
451 451 return len(self.pairsList)
452 452
453 453 def getPairsIndexList(self):
454 454
455 455 return range(self.nPairs)
456 456
457 457 nPairs = property(getNPairs, "I'm the 'nPairs' property.")
458 458 pairsIndexList = property(getPairsIndexList, "I'm the 'pairsIndexList' property.")
459 459
460 460 class SpectraHeis(JROData):
461 461
462 462 data_spc = None
463 463
464 464 data_cspc = None
465 465
466 466 data_dc = None
467 467
468 468 nFFTPoints = None
469 469
470 470 nPairs = None
471 471
472 472 pairsList = None
473 473
474 474 nIncohInt = None
475 475
476 476 def __init__(self):
477 477
478 478 self.radarControllerHeaderObj = RadarControllerHeader()
479 479
480 480 self.systemHeaderObj = SystemHeader()
481 481
482 482 self.type = "SpectraHeis"
483 483
484 484 self.dtype = None
485 485
486 486 # self.nChannels = 0
487 487
488 488 # self.nHeights = 0
489 489
490 490 self.nProfiles = None
491 491
492 492 self.heightList = None
493 493
494 494 self.channelList = None
495 495
496 496 # self.channelIndexList = None
497 497
498 498 self.flagNoData = True
499 499
500 500 self.flagTimeBlock = False
501 501
502 502 self.nPairs = 0
503 503
504 504 self.utctime = None
505 505
506 506 self.blocksize = None
@@ -1,2513 +1,2516
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JRODataIO.py 169 2012-11-19 21:57:03Z murco $
5 5 '''
6 6
7 7 import os, sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import time, datetime
13 13
14 14 from jrodata import *
15 15 from jroheaderIO import *
16 16 from jroprocessing import *
17 17
18 18 def isNumber(str):
19 19 """
20 20 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
21 21
22 22 Excepciones:
23 23 Si un determinado string no puede ser convertido a numero
24 24 Input:
25 25 str, string al cual se le analiza para determinar si convertible a un numero o no
26 26
27 27 Return:
28 28 True : si el string es uno numerico
29 29 False : no es un string numerico
30 30 """
31 31 try:
32 32 float( str )
33 33 return True
34 34 except:
35 35 return False
36 36
37 37 def isThisFileinRange(filename, startUTSeconds, endUTSeconds):
38 38 """
39 39 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
40 40
41 41 Inputs:
42 42 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
43 43
44 44 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
45 45 segundos contados desde 01/01/1970.
46 46 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
47 47 segundos contados desde 01/01/1970.
48 48
49 49 Return:
50 50 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
51 51 fecha especificado, de lo contrario retorna False.
52 52
53 53 Excepciones:
54 54 Si el archivo no existe o no puede ser abierto
55 55 Si la cabecera no puede ser leida.
56 56
57 57 """
58 58 basicHeaderObj = BasicHeader()
59 59
60 60 try:
61 61 fp = open(filename,'rb')
62 62 except:
63 63 raise IOError, "The file %s can't be opened" %(filename)
64 64
65 65 sts = basicHeaderObj.read(fp)
66 66 fp.close()
67 67
68 68 if not(sts):
69 69 print "Skipping the file %s because it has not a valid header" %(filename)
70 70 return 0
71 71
72 72 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
73 73 return 0
74 74
75 75 return 1
76 76
77 77 def getlastFileFromPath(path, ext):
78 78 """
79 79 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
80 80 al final de la depuracion devuelve el ultimo file de la lista que quedo.
81 81
82 82 Input:
83 83 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
84 84 ext : extension de los files contenidos en una carpeta
85 85
86 86 Return:
87 87 El ultimo file de una determinada carpeta, no se considera el path.
88 88 """
89 89 validFilelist = []
90 90 fileList = os.listdir(path)
91 91
92 92 # 0 1234 567 89A BCDE
93 93 # H YYYY DDD SSS .ext
94 94
95 95 for file in fileList:
96 96 try:
97 97 year = int(file[1:5])
98 98 doy = int(file[5:8])
99 99
100 100 if (os.path.splitext(file)[-1].upper() != ext.upper()) : continue
101 101 except:
102 102 continue
103 103
104 104 validFilelist.append(file)
105 105
106 106 if validFilelist:
107 107 validFilelist = sorted( validFilelist, key=str.lower )
108 108 return validFilelist[-1]
109 109
110 110 return None
111 111
112 112 def checkForRealPath(path, year, doy, set, ext):
113 113 """
114 114 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
115 115 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
116 116 el path exacto de un determinado file.
117 117
118 118 Example :
119 119 nombre correcto del file es .../.../D2009307/P2009307367.ext
120 120
121 121 Entonces la funcion prueba con las siguientes combinaciones
122 122 .../.../x2009307/y2009307367.ext
123 123 .../.../x2009307/Y2009307367.ext
124 124 .../.../X2009307/y2009307367.ext
125 125 .../.../X2009307/Y2009307367.ext
126 126 siendo para este caso, la ultima combinacion de letras, identica al file buscado
127 127
128 128 Return:
129 129 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
130 130 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
131 131 para el filename
132 132 """
133 133 filepath = None
134 134 find_flag = False
135 135 filename = None
136 136
137 137 if ext.lower() == ".r": #voltage
138 138 header1 = "dD"
139 139 header2 = "dD"
140 140 elif ext.lower() == ".pdata": #spectra
141 141 header1 = "dD"
142 142 header2 = "pP"
143 143 else:
144 144 return None, filename
145 145
146 146 for dir in header1: #barrido por las dos combinaciones posibles de "D"
147 147 for fil in header2: #barrido por las dos combinaciones posibles de "D"
148 148 doypath = "%s%04d%03d" % ( dir, year, doy ) #formo el nombre del directorio xYYYYDDD (x=d o x=D)
149 149 filename = "%s%04d%03d%03d%s" % ( fil, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
150 150 filepath = os.path.join( path, doypath, filename ) #formo el path completo
151 151 if os.path.exists( filepath ): #verifico que exista
152 152 find_flag = True
153 153 break
154 154 if find_flag:
155 155 break
156 156
157 157 if not(find_flag):
158 158 return None, filename
159 159
160 160 return filepath, filename
161 161
162 162 class JRODataIO:
163 163
164 164 c = 3E8
165 165
166 166 isConfig = False
167 167
168 168 basicHeaderObj = BasicHeader()
169 169
170 170 systemHeaderObj = SystemHeader()
171 171
172 172 radarControllerHeaderObj = RadarControllerHeader()
173 173
174 174 processingHeaderObj = ProcessingHeader()
175 175
176 176 online = 0
177 177
178 178 dtype = None
179 179
180 180 pathList = []
181 181
182 182 filenameList = []
183 183
184 184 filename = None
185 185
186 186 ext = None
187 187
188 188 flagIsNewFile = 1
189 189
190 190 flagTimeBlock = 0
191 191
192 192 flagIsNewBlock = 0
193 193
194 194 fp = None
195 195
196 196 firstHeaderSize = 0
197 197
198 198 basicHeaderSize = 24
199 199
200 200 versionFile = 1103
201 201
202 202 fileSize = None
203 203
204 204 ippSeconds = None
205 205
206 206 fileSizeByHeader = None
207 207
208 208 fileIndex = None
209 209
210 210 profileIndex = None
211 211
212 212 blockIndex = None
213 213
214 214 nTotalBlocks = None
215 215
216 216 maxTimeStep = 30
217 217
218 218 lastUTTime = None
219 219
220 220 datablock = None
221 221
222 222 dataOut = None
223 223
224 224 blocksize = None
225 225
226 226 def __init__(self):
227 227
228 228 raise ValueError, "Not implemented"
229 229
230 230 def run(self):
231 231
232 232 raise ValueError, "Not implemented"
233 233
234 234 def getOutput(self):
235 235
236 236 return self.dataOut
237 237
238 238 class JRODataReader(JRODataIO, ProcessingUnit):
239 239
240 240 nReadBlocks = 0
241 241
242 242 delay = 10 #number of seconds waiting a new file
243 243
244 244 nTries = 3 #quantity tries
245 245
246 246 nFiles = 3 #number of files for searching
247 247
248 248 flagNoMoreFiles = 0
249 249
250 250 def __init__(self):
251 251
252 252 """
253 253
254 254 """
255 255
256 256 raise ValueError, "This method has not been implemented"
257 257
258 258
259 259 def createObjByDefault(self):
260 260 """
261 261
262 262 """
263 263 raise ValueError, "This method has not been implemented"
264 264
265 265 def getBlockDimension(self):
266 266
267 267 raise ValueError, "No implemented"
268 268
269 269 def __searchFilesOffLine(self,
270 270 path,
271 271 startDate,
272 272 endDate,
273 273 startTime=datetime.time(0,0,0),
274 274 endTime=datetime.time(23,59,59),
275 275 set=None,
276 276 expLabel="",
277 277 ext=".r"):
278 278 dirList = []
279 279 for thisPath in os.listdir(path):
280 280 if os.path.isdir(os.path.join(path,thisPath)):
281 281 dirList.append(thisPath)
282 282
283 283 if not(dirList):
284 284 return None, None
285 285
286 286 pathList = []
287 287 dateList = []
288 288
289 289 thisDate = startDate
290 290
291 291 while(thisDate <= endDate):
292 292 year = thisDate.timetuple().tm_year
293 293 doy = thisDate.timetuple().tm_yday
294 294
295 295 match = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy))
296 296 if len(match) == 0:
297 297 thisDate += datetime.timedelta(1)
298 298 continue
299 299
300 300 pathList.append(os.path.join(path,match[0],expLabel))
301 301 dateList.append(thisDate)
302 302 thisDate += datetime.timedelta(1)
303 303
304 304 filenameList = []
305 305 for index in range(len(pathList)):
306 306
307 307 thisPath = pathList[index]
308 308 fileList = glob.glob1(thisPath, "*%s" %ext)
309 309 fileList.sort()
310 310
311 311 #Busqueda de datos en el rango de horas indicados
312 312 thisDate = dateList[index]
313 313 startDT = datetime.datetime.combine(thisDate, startTime)
314 314 endDT = datetime.datetime.combine(thisDate, endTime)
315 315
316 316 startUtSeconds = time.mktime(startDT.timetuple())
317 317 endUtSeconds = time.mktime(endDT.timetuple())
318 318
319 319 for file in fileList:
320 320
321 321 filename = os.path.join(thisPath,file)
322 322
323 323 if isThisFileinRange(filename, startUtSeconds, endUtSeconds):
324 324 filenameList.append(filename)
325 325
326 326 if not(filenameList):
327 327 return None, None
328 328
329 329 self.filenameList = filenameList
330 330
331 331 return pathList, filenameList
332 332
333 333 def __searchFilesOnLine(self, path, startDate=None, endDate=None, startTime=None, endTime=None, expLabel = "", ext = None):
334 334
335 335 """
336 336 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
337 337 devuelve el archivo encontrado ademas de otros datos.
338 338
339 339 Input:
340 340 path : carpeta donde estan contenidos los files que contiene data
341 341
342 342 startDate : Fecha inicial. Rechaza todos los directorios donde
343 343 file end time < startDate (obejto datetime.date)
344 344
345 345 endDate : Fecha final. Rechaza todos los directorios donde
346 346 file start time > endDate (obejto datetime.date)
347 347
348 348 startTime : Tiempo inicial. Rechaza todos los archivos donde
349 349 file end time < startTime (obejto datetime.time)
350 350
351 351 endTime : Tiempo final. Rechaza todos los archivos donde
352 352 file start time > endTime (obejto datetime.time)
353 353
354 354 expLabel : Nombre del subexperimento (subfolder)
355 355
356 356 ext : extension de los files
357 357
358 358 Return:
359 359 directory : eL directorio donde esta el file encontrado
360 360 filename : el ultimo file de una determinada carpeta
361 361 year : el anho
362 362 doy : el numero de dia del anho
363 363 set : el set del archivo
364 364
365 365
366 366 """
367 367 dirList = []
368 368 pathList = []
369 369 directory = None
370 370
371 371 #Filtra solo los directorios
372 372 for thisPath in os.listdir(path):
373 373 if os.path.isdir(os.path.join(path, thisPath)):
374 374 dirList.append(thisPath)
375 375
376 376 if not(dirList):
377 377 return None, None, None, None, None
378 378
379 379 dirList = sorted( dirList, key=str.lower )
380 380
381 381 if startDate:
382 382 startDateTime = datetime.datetime.combine(startDate, startTime)
383 383 thisDateTime = startDateTime
384 384 if endDate == None: endDateTime = startDateTime
385 385 else: endDateTime = datetime.datetime.combine(endDate, endTime)
386 386
387 387 while(thisDateTime <= endDateTime):
388 388 year = thisDateTime.timetuple().tm_year
389 389 doy = thisDateTime.timetuple().tm_yday
390 390
391 391 match = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy))
392 392 if len(match) == 0:
393 393 thisDateTime += datetime.timedelta(1)
394 394 continue
395 395
396 396 pathList.append(os.path.join(path,match[0], expLabel))
397 397 thisDateTime += datetime.timedelta(1)
398 398
399 399 if not(pathList):
400 400 print "\tNo files in range: %s - %s" %(startDateTime.ctime(), endDateTime.ctime())
401 401 return None, None, None, None, None
402 402
403 403 directory = pathList[0]
404 404
405 405 else:
406 406 directory = dirList[-1]
407 407 directory = os.path.join(path,directory)
408 408
409 409 filename = getlastFileFromPath(directory, ext)
410 410
411 411 if not(filename):
412 412 return None, None, None, None, None
413 413
414 414 if not(self.__verifyFile(os.path.join(directory, filename))):
415 415 return None, None, None, None, None
416 416
417 417 year = int( filename[1:5] )
418 418 doy = int( filename[5:8] )
419 419 set = int( filename[8:11] )
420 420
421 421 return directory, filename, year, doy, set
422 422
423 423
424 424
425 425 def __setNextFileOffline(self):
426 426
427 427 idFile = self.fileIndex
428 428
429 429 while (True):
430 430 idFile += 1
431 431 if not(idFile < len(self.filenameList)):
432 432 self.flagNoMoreFiles = 1
433 433 print "No more Files"
434 434 return 0
435 435
436 436 filename = self.filenameList[idFile]
437 437
438 438 if not(self.__verifyFile(filename)):
439 439 continue
440 440
441 441 fileSize = os.path.getsize(filename)
442 442 fp = open(filename,'rb')
443 443 break
444 444
445 445 self.flagIsNewFile = 1
446 446 self.fileIndex = idFile
447 447 self.filename = filename
448 448 self.fileSize = fileSize
449 449 self.fp = fp
450 450
451 451 print "Setting the file: %s"%self.filename
452 452
453 453 return 1
454 454
455 455 def __setNextFileOnline(self):
456 456 """
457 457 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
458 458 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
459 459 siguientes.
460 460
461 461 Affected:
462 462 self.flagIsNewFile
463 463 self.filename
464 464 self.fileSize
465 465 self.fp
466 466 self.set
467 467 self.flagNoMoreFiles
468 468
469 469 Return:
470 470 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
471 471 1 : si el file fue abierto con exito y esta listo a ser leido
472 472
473 473 Excepciones:
474 474 Si un determinado file no puede ser abierto
475 475 """
476 476 nFiles = 0
477 477 fileOk_flag = False
478 478 firstTime_flag = True
479 479
480 480 self.set += 1
481 481
482 482 #busca el 1er file disponible
483 483 file, filename = checkForRealPath( self.path, self.year, self.doy, self.set, self.ext )
484 484 if file:
485 485 if self.__verifyFile(file, False):
486 486 fileOk_flag = True
487 487
488 488 #si no encuentra un file entonces espera y vuelve a buscar
489 489 if not(fileOk_flag):
490 490 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
491 491
492 492 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
493 493 tries = self.nTries
494 494 else:
495 495 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
496 496
497 497 for nTries in range( tries ):
498 498 if firstTime_flag:
499 499 print "\tWaiting %0.2f sec for the file \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
500 500 time.sleep( self.delay )
501 501 else:
502 502 print "\tSearching next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
503 503
504 504 file, filename = checkForRealPath( self.path, self.year, self.doy, self.set, self.ext )
505 505 if file:
506 506 if self.__verifyFile(file):
507 507 fileOk_flag = True
508 508 break
509 509
510 510 if fileOk_flag:
511 511 break
512 512
513 513 firstTime_flag = False
514 514
515 515 print "\tSkipping the file \"%s\" due to this file doesn't exist" % filename
516 516 self.set += 1
517 517
518 518 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
519 519 self.set = 0
520 520 self.doy += 1
521 521
522 522 if fileOk_flag:
523 523 self.fileSize = os.path.getsize( file )
524 524 self.filename = file
525 525 self.flagIsNewFile = 1
526 526 if self.fp != None: self.fp.close()
527 527 self.fp = open(file, 'rb')
528 528 self.flagNoMoreFiles = 0
529 529 print 'Setting the file: %s' % file
530 530 else:
531 531 self.fileSize = 0
532 532 self.filename = None
533 533 self.flagIsNewFile = 0
534 534 self.fp = None
535 535 self.flagNoMoreFiles = 1
536 536 print 'No more Files'
537 537
538 538 return fileOk_flag
539 539
540 540
541 541 def setNextFile(self):
542 542 if self.fp != None:
543 543 self.fp.close()
544 544
545 545 if self.online:
546 546 newFile = self.__setNextFileOnline()
547 547 else:
548 548 newFile = self.__setNextFileOffline()
549 549
550 550 if not(newFile):
551 551 return 0
552 552
553 553 self.__readFirstHeader()
554 554 self.nReadBlocks = 0
555 555 return 1
556 556
557 557 def __waitNewBlock(self):
558 558 #si es OnLine y ademas aun no se han leido un bloque completo entonces se espera por uno valido
559 559 if not self.online:
560 560 return 0
561 561
562 562 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
563 563 return 0
564 564
565 565 currentPointer = self.fp.tell()
566 566
567 567 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
568 568
569 569 for nTries in range( self.nTries ):
570 570
571 571 self.fp.close()
572 572 self.fp = open( self.filename, 'rb' )
573 573 self.fp.seek( currentPointer )
574 574
575 575 self.fileSize = os.path.getsize( self.filename )
576 576 currentSize = self.fileSize - currentPointer
577 577
578 578 if ( currentSize >= neededSize ):
579 579 self.__rdBasicHeader()
580 580 return 1
581 581
582 582 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
583 583 time.sleep( self.delay )
584 584
585 585
586 586 return 0
587 587
588 588 def __setNewBlock(self):
589 589 if self.fp == None:
590 590 return 0
591 591
592 592 if self.flagIsNewFile:
593 593 return 1
594 594
595 595 self.lastUTTime = self.basicHeaderObj.utc
596 596 currentSize = self.fileSize - self.fp.tell()
597 597 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
598 598
599 599 if (currentSize >= neededSize):
600 600 self.__rdBasicHeader()
601 601 return 1
602 602
603 603 if self.__waitNewBlock():
604 604 return 1
605 605
606 606 if not(self.setNextFile()):
607 607 return 0
608 608
609 609 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
610 610
611 611 self.flagTimeBlock = 0
612 612
613 613 if deltaTime > self.maxTimeStep:
614 614 self.flagTimeBlock = 1
615 615
616 616 return 1
617 617
618 618
619 619 def readNextBlock(self):
620 620 if not(self.__setNewBlock()):
621 621 return 0
622 622
623 623 if not(self.readBlock()):
624 624 return 0
625 625
626 626 return 1
627 627
628 628 def __rdProcessingHeader(self, fp=None):
629 629 if fp == None:
630 630 fp = self.fp
631 631
632 632 self.processingHeaderObj.read(fp)
633 633
634 634 def __rdRadarControllerHeader(self, fp=None):
635 635 if fp == None:
636 636 fp = self.fp
637 637
638 638 self.radarControllerHeaderObj.read(fp)
639 639
640 640 def __rdSystemHeader(self, fp=None):
641 641 if fp == None:
642 642 fp = self.fp
643 643
644 644 self.systemHeaderObj.read(fp)
645 645
646 646 def __rdBasicHeader(self, fp=None):
647 647 if fp == None:
648 648 fp = self.fp
649 649
650 650 self.basicHeaderObj.read(fp)
651 651
652 652
653 653 def __readFirstHeader(self):
654 654 self.__rdBasicHeader()
655 655 self.__rdSystemHeader()
656 656 self.__rdRadarControllerHeader()
657 657 self.__rdProcessingHeader()
658 658
659 659 self.firstHeaderSize = self.basicHeaderObj.size
660 660
661 661 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
662 662 if datatype == 0:
663 663 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
664 664 elif datatype == 1:
665 665 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
666 666 elif datatype == 2:
667 667 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
668 668 elif datatype == 3:
669 669 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
670 670 elif datatype == 4:
671 671 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
672 672 elif datatype == 5:
673 673 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
674 674 else:
675 675 raise ValueError, 'Data type was not defined'
676 676
677 677 self.dtype = datatype_str
678 678 self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
679 679 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
680 680 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
681 681 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
682 682 self.getBlockDimension()
683 683
684 684
685 685 def __verifyFile(self, filename, msgFlag=True):
686 686 msg = None
687 687 try:
688 688 fp = open(filename, 'rb')
689 689 currentPosition = fp.tell()
690 690 except:
691 691 if msgFlag:
692 692 print "The file %s can't be opened" % (filename)
693 693 return False
694 694
695 695 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
696 696
697 697 if neededSize == 0:
698 698 basicHeaderObj = BasicHeader()
699 699 systemHeaderObj = SystemHeader()
700 700 radarControllerHeaderObj = RadarControllerHeader()
701 701 processingHeaderObj = ProcessingHeader()
702 702
703 703 try:
704 704 if not( basicHeaderObj.read(fp) ): raise IOError
705 705 if not( systemHeaderObj.read(fp) ): raise IOError
706 706 if not( radarControllerHeaderObj.read(fp) ): raise IOError
707 707 if not( processingHeaderObj.read(fp) ): raise IOError
708 708 data_type = int(numpy.log2((processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
709 709
710 710 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
711 711
712 712 except:
713 713 if msgFlag:
714 714 print "\tThe file %s is empty or it hasn't enough data" % filename
715 715
716 716 fp.close()
717 717 return False
718 718 else:
719 719 msg = "\tSkipping the file %s due to it hasn't enough data" %filename
720 720
721 721 fp.close()
722 722 fileSize = os.path.getsize(filename)
723 723 currentSize = fileSize - currentPosition
724 724 if currentSize < neededSize:
725 725 if msgFlag and (msg != None):
726 726 print msg #print"\tSkipping the file %s due to it hasn't enough data" %filename
727 727 return False
728 728
729 729 return True
730 730
731 731 def setup(self,
732 732 path=None,
733 733 startDate=None,
734 734 endDate=None,
735 735 startTime=datetime.time(0,0,0),
736 736 endTime=datetime.time(23,59,59),
737 737 set=0,
738 738 expLabel = "",
739 739 ext = None,
740 740 online = False,
741 741 delay = 60):
742 742
743 743 if path == None:
744 744 raise ValueError, "The path is not valid"
745 745
746 746 if ext == None:
747 747 ext = self.ext
748 748
749 749 if online:
750 750 print "Searching files in online mode..."
751 751 doypath, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext)
752 752
753 753 if not(doypath):
754 754 for nTries in range( self.nTries ):
755 755 print '\tWaiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
756 756 time.sleep( self.delay )
757 757 doypath, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext)
758 758 if doypath:
759 759 break
760 760
761 761 if not(doypath):
762 762 print "There 'isn't valied files in %s" % path
763 763 return None
764 764
765 765 self.year = year
766 766 self.doy = doy
767 767 self.set = set - 1
768 768 self.path = path
769 769
770 770 else:
771 771 print "Searching files in offline mode ..."
772 772 pathList, filenameList = self.__searchFilesOffLine(path, startDate, endDate, startTime, endTime, set, expLabel, ext)
773 773
774 774 if not(pathList):
775 775 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
776 776 datetime.datetime.combine(startDate,startTime).ctime(),
777 777 datetime.datetime.combine(endDate,endTime).ctime())
778 778
779 779 sys.exit(-1)
780 780
781 781
782 782 self.fileIndex = -1
783 783 self.pathList = pathList
784 784 self.filenameList = filenameList
785 785
786 786 self.online = online
787 787 self.delay = delay
788 788 ext = ext.lower()
789 789 self.ext = ext
790 790
791 791 if not(self.setNextFile()):
792 792 if (startDate!=None) and (endDate!=None):
793 793 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
794 794 elif startDate != None:
795 795 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
796 796 else:
797 797 print "No files"
798 798
799 799 sys.exit(-1)
800 800
801 801 # self.updateDataHeader()
802 802
803 803 return self.dataOut
804 804
805 805 def getData():
806 806
807 807 raise ValueError, "This method has not been implemented"
808 808
809 809 def hasNotDataInBuffer():
810 810
811 811 raise ValueError, "This method has not been implemented"
812 812
813 813 def readBlock():
814 814
815 815 raise ValueError, "This method has not been implemented"
816 816
817 817 def isEndProcess(self):
818 818
819 819 return self.flagNoMoreFiles
820 820
821 821 def printReadBlocks(self):
822 822
823 823 print "Number of read blocks per file %04d" %self.nReadBlocks
824 824
825 825 def printTotalBlocks(self):
826 826
827 827 print "Number of read blocks %04d" %self.nTotalBlocks
828 828
829 def printInfo(self):
830
831 print self.basicHeaderObj.printInfo()
832 print self.systemHeaderObj.printInfo()
833 print self.radarControllerHeaderObj.printInfo()
834 print self.processingHeaderObj.printInfo()
835
836
829 837 def run(self, **kwargs):
830 838
831 839 if not(self.isConfig):
832 840
833 841 # self.dataOut = dataOut
834 842 self.setup(**kwargs)
835 843 self.isConfig = True
836 844
837 845 self.getData()
838 846
839 847 class JRODataWriter(JRODataIO, Operation):
840 848
841 849 """
842 850 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
843 851 de los datos siempre se realiza por bloques.
844 852 """
845 853
846 854 blockIndex = 0
847 855
848 856 path = None
849 857
850 858 setFile = None
851 859
852 860 profilesPerBlock = None
853 861
854 862 blocksPerFile = None
855 863
856 864 nWriteBlocks = 0
857 865
858 866 def __init__(self, dataOut=None):
859 867 raise ValueError, "Not implemented"
860 868
861 869
862 870 def hasAllDataInBuffer(self):
863 871 raise ValueError, "Not implemented"
864 872
865 873
866 874 def setBlockDimension(self):
867 875 raise ValueError, "Not implemented"
868 876
869 877
870 878 def writeBlock(self):
871 879 raise ValueError, "No implemented"
872 880
873 881
874 882 def putData(self):
875 883 raise ValueError, "No implemented"
876 884
877 885 def getDataHeader(self):
878 886 """
879 887 Obtiene una copia del First Header
880 888
881 889 Affected:
882 890
883 891 self.basicHeaderObj
884 892 self.systemHeaderObj
885 893 self.radarControllerHeaderObj
886 894 self.processingHeaderObj self.
887 895
888 896 Return:
889 897 None
890 898 """
891 899
892 900 raise ValueError, "No implemented"
893 901
894 902 def getBasicHeader(self):
895 903
896 904 self.basicHeaderObj.size = self.basicHeaderSize #bytes
897 905 self.basicHeaderObj.version = self.versionFile
898 906 self.basicHeaderObj.dataBlock = self.nTotalBlocks
899 907
900 908 utc = numpy.floor(self.dataOut.utctime)
901 909 milisecond = (self.dataOut.utctime - utc)* 1000.0
902 910
903 911 self.basicHeaderObj.utc = utc
904 912 self.basicHeaderObj.miliSecond = milisecond
905 913 self.basicHeaderObj.timeZone = 0
906 914 self.basicHeaderObj.dstFlag = 0
907 915 self.basicHeaderObj.errorCount = 0
908 916
909 917 def __writeFirstHeader(self):
910 918 """
911 919 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
912 920
913 921 Affected:
914 922 __dataType
915 923
916 924 Return:
917 925 None
918 926 """
919 927
920 928 # CALCULAR PARAMETROS
921 929
922 930 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
923 931 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
924 932
925 933 self.basicHeaderObj.write(self.fp)
926 934 self.systemHeaderObj.write(self.fp)
927 935 self.radarControllerHeaderObj.write(self.fp)
928 936 self.processingHeaderObj.write(self.fp)
929 937
930 938 self.dtype = self.dataOut.dtype
931 939
932 940 def __setNewBlock(self):
933 941 """
934 942 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
935 943
936 944 Return:
937 945 0 : si no pudo escribir nada
938 946 1 : Si escribio el Basic el First Header
939 947 """
940 948 if self.fp == None:
941 949 self.setNextFile()
942 950
943 951 if self.flagIsNewFile:
944 952 return 1
945 953
946 954 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
947 955 self.basicHeaderObj.write(self.fp)
948 956 return 1
949 957
950 958 if not( self.setNextFile() ):
951 959 return 0
952 960
953 961 return 1
954 962
955 963
956 964 def writeNextBlock(self):
957 965 """
958 966 Selecciona el bloque siguiente de datos y los escribe en un file
959 967
960 968 Return:
961 969 0 : Si no hizo pudo escribir el bloque de datos
962 970 1 : Si no pudo escribir el bloque de datos
963 971 """
964 972 if not( self.__setNewBlock() ):
965 973 return 0
966 974
967 975 self.writeBlock()
968 976
969 977 return 1
970 978
971 979 def setNextFile(self):
972 980 """
973 981 Determina el siguiente file que sera escrito
974 982
975 983 Affected:
976 984 self.filename
977 985 self.subfolder
978 986 self.fp
979 987 self.setFile
980 988 self.flagIsNewFile
981 989
982 990 Return:
983 991 0 : Si el archivo no puede ser escrito
984 992 1 : Si el archivo esta listo para ser escrito
985 993 """
986 994 ext = self.ext
987 995 path = self.path
988 996
989 997 if self.fp != None:
990 998 self.fp.close()
991 999
992 1000 timeTuple = time.localtime( self.dataOut.dataUtcTime)
993 1001 subfolder = 'D%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
994 1002
995 1003 doypath = os.path.join( path, subfolder )
996 1004 if not( os.path.exists(doypath) ):
997 1005 os.mkdir(doypath)
998 1006 self.setFile = -1 #inicializo mi contador de seteo
999 1007 else:
1000 1008 filesList = os.listdir( doypath )
1001 1009 if len( filesList ) > 0:
1002 1010 filesList = sorted( filesList, key=str.lower )
1003 1011 filen = filesList[-1]
1004 1012 # el filename debera tener el siguiente formato
1005 1013 # 0 1234 567 89A BCDE (hex)
1006 1014 # x YYYY DDD SSS .ext
1007 1015 if isNumber( filen[8:11] ):
1008 1016 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1009 1017 else:
1010 1018 self.setFile = -1
1011 1019 else:
1012 1020 self.setFile = -1 #inicializo mi contador de seteo
1013 1021
1014 1022 setFile = self.setFile
1015 1023 setFile += 1
1016 1024
1017 1025 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
1018 1026 timeTuple.tm_year,
1019 1027 timeTuple.tm_yday,
1020 1028 setFile,
1021 1029 ext )
1022 1030
1023 1031 filename = os.path.join( path, subfolder, file )
1024 1032
1025 1033 fp = open( filename,'wb' )
1026 1034
1027 1035 self.blockIndex = 0
1028 1036
1029 1037 #guardando atributos
1030 1038 self.filename = filename
1031 1039 self.subfolder = subfolder
1032 1040 self.fp = fp
1033 1041 self.setFile = setFile
1034 1042 self.flagIsNewFile = 1
1035 1043
1036 1044 self.getDataHeader()
1037 1045
1038 1046 print 'Writing the file: %s'%self.filename
1039 1047
1040 1048 self.__writeFirstHeader()
1041 1049
1042 1050 return 1
1043 1051
1044 1052 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=None, set=0, ext=None):
1045 1053 """
1046 1054 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1047 1055
1048 1056 Inputs:
1049 1057 path : el path destino en el cual se escribiran los files a crear
1050 1058 format : formato en el cual sera salvado un file
1051 1059 set : el setebo del file
1052 1060
1053 1061 Return:
1054 1062 0 : Si no realizo un buen seteo
1055 1063 1 : Si realizo un buen seteo
1056 1064 """
1057 1065
1058 1066 if ext == None:
1059 1067 ext = self.ext
1060 1068
1061 1069 ext = ext.lower()
1062 1070
1063 1071 self.ext = ext
1064 1072
1065 1073 self.path = path
1066 1074
1067 1075 self.setFile = set - 1
1068 1076
1069 1077 self.blocksPerFile = blocksPerFile
1070 1078
1071 1079 self.profilesPerBlock = profilesPerBlock
1072 1080
1073 1081 self.dataOut = dataOut
1074 1082
1075 1083 if not(self.setNextFile()):
1076 1084 print "There isn't a next file"
1077 1085 return 0
1078 1086
1079 1087 self.setBlockDimension()
1080 1088
1081 1089 return 1
1082 1090
1083 1091 def run(self, dataOut, **kwargs):
1084 1092
1085 1093 if not(self.isConfig):
1086 1094
1087 1095 self.setup(dataOut, **kwargs)
1088 1096 self.isConfig = True
1089 1097
1090 1098 self.putData()
1091 1099
1092 1100 class VoltageReader(JRODataReader):
1093 1101 """
1094 1102 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
1095 1103 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
1096 1104 perfiles*alturas*canales) son almacenados en la variable "buffer".
1097 1105
1098 1106 perfiles * alturas * canales
1099 1107
1100 1108 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1101 1109 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
1102 1110 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
1103 1111 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1104 1112
1105 1113 Example:
1106 1114
1107 1115 dpath = "/home/myuser/data"
1108 1116
1109 1117 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1110 1118
1111 1119 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1112 1120
1113 1121 readerObj = VoltageReader()
1114 1122
1115 1123 readerObj.setup(dpath, startTime, endTime)
1116 1124
1117 1125 while(True):
1118 1126
1119 1127 #to get one profile
1120 1128 profile = readerObj.getData()
1121 1129
1122 1130 #print the profile
1123 1131 print profile
1124 1132
1125 1133 #If you want to see all datablock
1126 1134 print readerObj.datablock
1127 1135
1128 1136 if readerObj.flagNoMoreFiles:
1129 1137 break
1130 1138
1131 1139 """
1132 1140
1133 1141 ext = ".r"
1134 1142
1135 1143 optchar = "D"
1136 1144 dataOut = None
1137 1145
1138 1146
1139 1147 def __init__(self):
1140 1148 """
1141 1149 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
1142 1150
1143 1151 Input:
1144 1152 dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para
1145 1153 almacenar un perfil de datos cada vez que se haga un requerimiento
1146 1154 (getData). El perfil sera obtenido a partir del buffer de datos,
1147 1155 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1148 1156 bloque de datos.
1149 1157 Si este parametro no es pasado se creara uno internamente.
1150 1158
1151 1159 Variables afectadas:
1152 1160 self.dataOut
1153 1161
1154 1162 Return:
1155 1163 None
1156 1164 """
1157 1165
1158 1166 self.isConfig = False
1159 1167
1160 1168 self.datablock = None
1161 1169
1162 1170 self.utc = 0
1163 1171
1164 1172 self.ext = ".r"
1165 1173
1166 1174 self.optchar = "D"
1167 1175
1168 1176 self.basicHeaderObj = BasicHeader()
1169 1177
1170 1178 self.systemHeaderObj = SystemHeader()
1171 1179
1172 1180 self.radarControllerHeaderObj = RadarControllerHeader()
1173 1181
1174 1182 self.processingHeaderObj = ProcessingHeader()
1175 1183
1176 1184 self.online = 0
1177 1185
1178 1186 self.fp = None
1179 1187
1180 1188 self.idFile = None
1181 1189
1182 1190 self.dtype = None
1183 1191
1184 1192 self.fileSizeByHeader = None
1185 1193
1186 1194 self.filenameList = []
1187 1195
1188 1196 self.filename = None
1189 1197
1190 1198 self.fileSize = None
1191 1199
1192 1200 self.firstHeaderSize = 0
1193 1201
1194 1202 self.basicHeaderSize = 24
1195 1203
1196 1204 self.pathList = []
1197 1205
1198 1206 self.filenameList = []
1199 1207
1200 1208 self.lastUTTime = 0
1201 1209
1202 1210 self.maxTimeStep = 30
1203 1211
1204 1212 self.flagNoMoreFiles = 0
1205 1213
1206 1214 self.set = 0
1207 1215
1208 1216 self.path = None
1209 1217
1210 1218 self.profileIndex = 9999
1211 1219
1212 1220 self.delay = 3 #seconds
1213 1221
1214 1222 self.nTries = 3 #quantity tries
1215 1223
1216 1224 self.nFiles = 3 #number of files for searching
1217 1225
1218 1226 self.nReadBlocks = 0
1219 1227
1220 1228 self.flagIsNewFile = 1
1221 1229
1222 1230 self.ippSeconds = 0
1223 1231
1224 1232 self.flagTimeBlock = 0
1225 1233
1226 1234 self.flagIsNewBlock = 0
1227 1235
1228 1236 self.nTotalBlocks = 0
1229 1237
1230 1238 self.blocksize = 0
1231 1239
1232 1240 self.dataOut = self.createObjByDefault()
1233 1241
1234 1242 def createObjByDefault(self):
1235 1243
1236 1244 dataObj = Voltage()
1237 1245
1238 1246 return dataObj
1239 1247
1240 1248 def __hasNotDataInBuffer(self):
1241 1249 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1242 1250 return 1
1243 1251 return 0
1244 1252
1245 1253
1246 1254 def getBlockDimension(self):
1247 1255 """
1248 1256 Obtiene la cantidad de puntos a leer por cada bloque de datos
1249 1257
1250 1258 Affected:
1251 1259 self.blocksize
1252 1260
1253 1261 Return:
1254 1262 None
1255 1263 """
1256 1264 pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
1257 1265 self.blocksize = pts2read
1258 1266
1259 1267
1260 1268 def readBlock(self):
1261 1269 """
1262 1270 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
1263 1271 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1264 1272 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1265 1273 es seteado a 0
1266 1274
1267 1275 Inputs:
1268 1276 None
1269 1277
1270 1278 Return:
1271 1279 None
1272 1280
1273 1281 Affected:
1274 1282 self.profileIndex
1275 1283 self.datablock
1276 1284 self.flagIsNewFile
1277 1285 self.flagIsNewBlock
1278 1286 self.nTotalBlocks
1279 1287
1280 1288 Exceptions:
1281 1289 Si un bloque leido no es un bloque valido
1282 1290 """
1283 1291
1284 1292 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
1285 1293
1286 1294 try:
1287 1295 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
1288 1296 except:
1289 1297 print "The read block (%3d) has not enough data" %self.nReadBlocks
1290 1298 return 0
1291 1299
1292 1300 junk = numpy.transpose(junk, (2,0,1))
1293 1301 self.datablock = junk['real'] + junk['imag']*1j
1294 1302
1295 1303 self.profileIndex = 0
1296 1304
1297 1305 self.flagIsNewFile = 0
1298 1306 self.flagIsNewBlock = 1
1299 1307
1300 1308 self.nTotalBlocks += 1
1301 1309 self.nReadBlocks += 1
1302 1310
1303 1311 return 1
1304 1312
1305 1313
1306 1314 def getData(self):
1307 1315 """
1308 1316 getData obtiene una unidad de datos del buffer de lectura y la copia a la clase "Voltage"
1309 1317 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
1310 1318 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
1311 1319
1312 1320 Ademas incrementa el contador del buffer en 1.
1313 1321
1314 1322 Return:
1315 1323 data : retorna un perfil de voltages (alturas * canales) copiados desde el
1316 1324 buffer. Si no hay mas archivos a leer retorna None.
1317 1325
1318 1326 Variables afectadas:
1319 1327 self.dataOut
1320 1328 self.profileIndex
1321 1329
1322 1330 Affected:
1323 1331 self.dataOut
1324 1332 self.profileIndex
1325 1333 self.flagTimeBlock
1326 1334 self.flagIsNewBlock
1327 1335 """
1328 1336
1329 1337 if self.flagNoMoreFiles:
1330 1338 self.dataOut.flagNoData = True
1331 1339 print 'Process finished'
1332 1340 return 0
1333 1341
1334 1342 self.flagTimeBlock = 0
1335 1343 self.flagIsNewBlock = 0
1336 1344
1337 1345 if self.__hasNotDataInBuffer():
1338 1346
1339 1347 if not( self.readNextBlock() ):
1340 1348 return 0
1341 1349
1342 # self.updateDataHeader()
1343
1344 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
1345
1346 if self.datablock == None:
1347 self.dataOut.flagNoData = True
1348 return 0
1349
1350 self.dataOut.data = self.datablock[:,self.profileIndex,:]
1351
1352 1350 self.dataOut.dtype = self.dtype
1353 1351
1354 # self.dataOut.nChannels = self.systemHeaderObj.nChannels
1355
1356 # self.dataOut.nHeights = self.processingHeaderObj.nHeights
1357
1358 1352 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
1359 1353
1360 1354 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
1361 1355
1362 1356 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
1363 1357
1364 1358 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
1365 1359
1366 # self.dataOut.channelIndexList = range(self.systemHeaderObj.nChannels)
1367
1368 1360 self.dataOut.flagTimeBlock = self.flagTimeBlock
1369 1361
1370 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.ippSeconds
1371
1372 1362 self.dataOut.ippSeconds = self.ippSeconds
1373 1363
1374 1364 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt
1375 1365
1376 1366 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
1377 1367
1378 1368 self.dataOut.flagShiftFFT = False
1379 1369
1380 1370 if self.processingHeaderObj.code != None:
1381 1371 self.dataOut.nCode = self.processingHeaderObj.nCode
1382 1372
1383 1373 self.dataOut.nBaud = self.processingHeaderObj.nBaud
1384 1374
1385 1375 self.dataOut.code = self.processingHeaderObj.code
1386 1376
1387 self.profileIndex += 1
1388
1389 1377 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
1390 1378
1391 1379 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
1392 1380
1381 # self.updateDataHeader()
1382
1383 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
1384
1385 if self.datablock == None:
1386 self.dataOut.flagNoData = True
1387 return 0
1388
1389 self.dataOut.data = self.datablock[:,self.profileIndex,:]
1390
1391 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.ippSeconds
1392
1393 self.profileIndex += 1
1394
1393 1395 self.dataOut.flagNoData = False
1394 1396
1395 1397 # print self.profileIndex, self.dataOut.utctime
1396 1398 # if self.profileIndex == 800:
1397 1399 # a=1
1398 1400
1401
1399 1402 return self.dataOut.data
1400 1403
1401 1404
1402 1405 class VoltageWriter(JRODataWriter):
1403 1406 """
1404 1407 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
1405 1408 de los datos siempre se realiza por bloques.
1406 1409 """
1407 1410
1408 1411 ext = ".r"
1409 1412
1410 1413 optchar = "D"
1411 1414
1412 1415 shapeBuffer = None
1413 1416
1414 1417
1415 1418 def __init__(self):
1416 1419 """
1417 1420 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
1418 1421
1419 1422 Affected:
1420 1423 self.dataOut
1421 1424
1422 1425 Return: None
1423 1426 """
1424 1427
1425 1428 self.nTotalBlocks = 0
1426 1429
1427 1430 self.profileIndex = 0
1428 1431
1429 1432 self.isConfig = False
1430 1433
1431 1434 self.fp = None
1432 1435
1433 1436 self.flagIsNewFile = 1
1434 1437
1435 1438 self.nTotalBlocks = 0
1436 1439
1437 1440 self.flagIsNewBlock = 0
1438 1441
1439 1442 self.setFile = None
1440 1443
1441 1444 self.dtype = None
1442 1445
1443 1446 self.path = None
1444 1447
1445 1448 self.filename = None
1446 1449
1447 1450 self.basicHeaderObj = BasicHeader()
1448 1451
1449 1452 self.systemHeaderObj = SystemHeader()
1450 1453
1451 1454 self.radarControllerHeaderObj = RadarControllerHeader()
1452 1455
1453 1456 self.processingHeaderObj = ProcessingHeader()
1454 1457
1455 1458 def hasAllDataInBuffer(self):
1456 1459 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1457 1460 return 1
1458 1461 return 0
1459 1462
1460 1463
1461 1464 def setBlockDimension(self):
1462 1465 """
1463 1466 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
1464 1467
1465 1468 Affected:
1466 1469 self.shape_spc_Buffer
1467 1470 self.shape_cspc_Buffer
1468 1471 self.shape_dc_Buffer
1469 1472
1470 1473 Return: None
1471 1474 """
1472 1475 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
1473 1476 self.processingHeaderObj.nHeights,
1474 1477 self.systemHeaderObj.nChannels)
1475 1478
1476 1479 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
1477 1480 self.processingHeaderObj.profilesPerBlock,
1478 1481 self.processingHeaderObj.nHeights),
1479 1482 dtype=numpy.dtype('complex'))
1480 1483
1481 1484
1482 1485 def writeBlock(self):
1483 1486 """
1484 1487 Escribe el buffer en el file designado
1485 1488
1486 1489 Affected:
1487 1490 self.profileIndex
1488 1491 self.flagIsNewFile
1489 1492 self.flagIsNewBlock
1490 1493 self.nTotalBlocks
1491 1494 self.blockIndex
1492 1495
1493 1496 Return: None
1494 1497 """
1495 1498 data = numpy.zeros( self.shapeBuffer, self.dtype )
1496 1499
1497 1500 junk = numpy.transpose(self.datablock, (1,2,0))
1498 1501
1499 1502 data['real'] = junk.real
1500 1503 data['imag'] = junk.imag
1501 1504
1502 1505 data = data.reshape( (-1) )
1503 1506
1504 1507 data.tofile( self.fp )
1505 1508
1506 1509 self.datablock.fill(0)
1507 1510
1508 1511 self.profileIndex = 0
1509 1512 self.flagIsNewFile = 0
1510 1513 self.flagIsNewBlock = 1
1511 1514
1512 1515 self.blockIndex += 1
1513 1516 self.nTotalBlocks += 1
1514 1517
1515 1518 def putData(self):
1516 1519 """
1517 1520 Setea un bloque de datos y luego los escribe en un file
1518 1521
1519 1522 Affected:
1520 1523 self.flagIsNewBlock
1521 1524 self.profileIndex
1522 1525
1523 1526 Return:
1524 1527 0 : Si no hay data o no hay mas files que puedan escribirse
1525 1528 1 : Si se escribio la data de un bloque en un file
1526 1529 """
1527 1530 if self.dataOut.flagNoData:
1528 1531 return 0
1529 1532
1530 1533 self.flagIsNewBlock = 0
1531 1534
1532 1535 if self.dataOut.flagTimeBlock:
1533 1536
1534 1537 self.datablock.fill(0)
1535 1538 self.profileIndex = 0
1536 1539 self.setNextFile()
1537 1540
1538 1541 if self.profileIndex == 0:
1539 1542 self.getBasicHeader()
1540 1543
1541 1544 self.datablock[:,self.profileIndex,:] = self.dataOut.data
1542 1545
1543 1546 self.profileIndex += 1
1544 1547
1545 1548 if self.hasAllDataInBuffer():
1546 1549 #if self.flagIsNewFile:
1547 1550 self.writeNextBlock()
1548 1551 # self.getDataHeader()
1549 1552
1550 1553 return 1
1551 1554
1552 1555 def __getProcessFlags(self):
1553 1556
1554 1557 processFlags = 0
1555 1558
1556 1559 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1557 1560 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1558 1561 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1559 1562 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1560 1563 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1561 1564 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1562 1565
1563 1566 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1564 1567
1565 1568
1566 1569
1567 1570 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
1568 1571 PROCFLAG.DATATYPE_SHORT,
1569 1572 PROCFLAG.DATATYPE_LONG,
1570 1573 PROCFLAG.DATATYPE_INT64,
1571 1574 PROCFLAG.DATATYPE_FLOAT,
1572 1575 PROCFLAG.DATATYPE_DOUBLE]
1573 1576
1574 1577
1575 1578 for index in range(len(dtypeList)):
1576 1579 if self.dataOut.dtype == dtypeList[index]:
1577 1580 dtypeValue = datatypeValueList[index]
1578 1581 break
1579 1582
1580 1583 processFlags += dtypeValue
1581 1584
1582 1585 if self.dataOut.flagDecodeData:
1583 1586 processFlags += PROCFLAG.DECODE_DATA
1584 1587
1585 1588 if self.dataOut.flagDeflipData:
1586 1589 processFlags += PROCFLAG.DEFLIP_DATA
1587 1590
1588 1591 if self.dataOut.code != None:
1589 1592 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1590 1593
1591 1594 if self.dataOut.nCohInt > 1:
1592 1595 processFlags += PROCFLAG.COHERENT_INTEGRATION
1593 1596
1594 1597 return processFlags
1595 1598
1596 1599
1597 1600 def __getBlockSize(self):
1598 1601 '''
1599 1602 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
1600 1603 '''
1601 1604
1602 1605 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1603 1606 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1604 1607 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1605 1608 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1606 1609 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1607 1610 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1608 1611
1609 1612 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1610 1613 datatypeValueList = [1,2,4,8,4,8]
1611 1614 for index in range(len(dtypeList)):
1612 1615 if self.dataOut.dtype == dtypeList[index]:
1613 1616 datatypeValue = datatypeValueList[index]
1614 1617 break
1615 1618
1616 1619 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels * self.dataOut.nProfiles * datatypeValue * 2)
1617 1620
1618 1621 return blocksize
1619 1622
1620 1623 def getDataHeader(self):
1621 1624
1622 1625 """
1623 1626 Obtiene una copia del First Header
1624 1627
1625 1628 Affected:
1626 1629 self.systemHeaderObj
1627 1630 self.radarControllerHeaderObj
1628 1631 self.dtype
1629 1632
1630 1633 Return:
1631 1634 None
1632 1635 """
1633 1636
1634 1637 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
1635 1638 self.systemHeaderObj.nChannels = self.dataOut.nChannels
1636 1639 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
1637 1640
1638 1641 self.getBasicHeader()
1639 1642
1640 1643 processingHeaderSize = 40 # bytes
1641 1644 self.processingHeaderObj.dtype = 0 # Voltage
1642 1645 self.processingHeaderObj.blockSize = self.__getBlockSize()
1643 1646 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
1644 1647 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
1645 1648 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
1646 1649 self.processingHeaderObj.processFlags = self.__getProcessFlags()
1647 1650 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
1648 1651 self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage
1649 1652 self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage
1650 1653
1651 1654 if self.dataOut.code != None:
1652 1655 self.processingHeaderObj.code = self.dataOut.code
1653 1656 self.processingHeaderObj.nCode = self.dataOut.nCode
1654 1657 self.processingHeaderObj.nBaud = self.dataOut.nBaud
1655 1658 codesize = int(8 + 4 * self.dataOut.nCode * self.dataOut.nBaud)
1656 1659 processingHeaderSize += codesize
1657 1660
1658 1661 if self.processingHeaderObj.nWindows != 0:
1659 1662 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
1660 1663 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
1661 1664 self.processingHeaderObj.nHeights = self.dataOut.nHeights
1662 1665 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
1663 1666 processingHeaderSize += 12
1664 1667
1665 1668 self.processingHeaderObj.size = processingHeaderSize
1666 1669
1667 1670 class SpectraReader(JRODataReader):
1668 1671 """
1669 1672 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
1670 1673 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
1671 1674 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
1672 1675
1673 1676 paresCanalesIguales * alturas * perfiles (Self Spectra)
1674 1677 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
1675 1678 canales * alturas (DC Channels)
1676 1679
1677 1680 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1678 1681 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
1679 1682 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
1680 1683 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1681 1684
1682 1685 Example:
1683 1686 dpath = "/home/myuser/data"
1684 1687
1685 1688 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1686 1689
1687 1690 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1688 1691
1689 1692 readerObj = SpectraReader()
1690 1693
1691 1694 readerObj.setup(dpath, startTime, endTime)
1692 1695
1693 1696 while(True):
1694 1697
1695 1698 readerObj.getData()
1696 1699
1697 1700 print readerObj.data_spc
1698 1701
1699 1702 print readerObj.data_cspc
1700 1703
1701 1704 print readerObj.data_dc
1702 1705
1703 1706 if readerObj.flagNoMoreFiles:
1704 1707 break
1705 1708
1706 1709 """
1707 1710
1708 1711 pts2read_SelfSpectra = 0
1709 1712
1710 1713 pts2read_CrossSpectra = 0
1711 1714
1712 1715 pts2read_DCchannels = 0
1713 1716
1714 1717 ext = ".pdata"
1715 1718
1716 1719 optchar = "P"
1717 1720
1718 1721 dataOut = None
1719 1722
1720 1723 nRdChannels = None
1721 1724
1722 1725 nRdPairs = None
1723 1726
1724 1727 rdPairList = []
1725 1728
1726 1729
1727 1730 def __init__(self):
1728 1731 """
1729 1732 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
1730 1733
1731 1734 Inputs:
1732 1735 dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para
1733 1736 almacenar un perfil de datos cada vez que se haga un requerimiento
1734 1737 (getData). El perfil sera obtenido a partir del buffer de datos,
1735 1738 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1736 1739 bloque de datos.
1737 1740 Si este parametro no es pasado se creara uno internamente.
1738 1741
1739 1742 Affected:
1740 1743 self.dataOut
1741 1744
1742 1745 Return : None
1743 1746 """
1744 1747
1745 1748 self.isConfig = False
1746 1749
1747 1750 self.pts2read_SelfSpectra = 0
1748 1751
1749 1752 self.pts2read_CrossSpectra = 0
1750 1753
1751 1754 self.pts2read_DCchannels = 0
1752 1755
1753 1756 self.datablock = None
1754 1757
1755 1758 self.utc = None
1756 1759
1757 1760 self.ext = ".pdata"
1758 1761
1759 1762 self.optchar = "P"
1760 1763
1761 1764 self.basicHeaderObj = BasicHeader()
1762 1765
1763 1766 self.systemHeaderObj = SystemHeader()
1764 1767
1765 1768 self.radarControllerHeaderObj = RadarControllerHeader()
1766 1769
1767 1770 self.processingHeaderObj = ProcessingHeader()
1768 1771
1769 1772 self.online = 0
1770 1773
1771 1774 self.fp = None
1772 1775
1773 1776 self.idFile = None
1774 1777
1775 1778 self.dtype = None
1776 1779
1777 1780 self.fileSizeByHeader = None
1778 1781
1779 1782 self.filenameList = []
1780 1783
1781 1784 self.filename = None
1782 1785
1783 1786 self.fileSize = None
1784 1787
1785 1788 self.firstHeaderSize = 0
1786 1789
1787 1790 self.basicHeaderSize = 24
1788 1791
1789 1792 self.pathList = []
1790 1793
1791 1794 self.lastUTTime = 0
1792 1795
1793 1796 self.maxTimeStep = 30
1794 1797
1795 1798 self.flagNoMoreFiles = 0
1796 1799
1797 1800 self.set = 0
1798 1801
1799 1802 self.path = None
1800 1803
1801 1804 self.delay = 3 #seconds
1802 1805
1803 1806 self.nTries = 3 #quantity tries
1804 1807
1805 1808 self.nFiles = 3 #number of files for searching
1806 1809
1807 1810 self.nReadBlocks = 0
1808 1811
1809 1812 self.flagIsNewFile = 1
1810 1813
1811 1814 self.ippSeconds = 0
1812 1815
1813 1816 self.flagTimeBlock = 0
1814 1817
1815 1818 self.flagIsNewBlock = 0
1816 1819
1817 1820 self.nTotalBlocks = 0
1818 1821
1819 1822 self.blocksize = 0
1820 1823
1821 1824 self.dataOut = self.createObjByDefault()
1822 1825
1823 1826
1824 1827 def createObjByDefault(self):
1825 1828
1826 1829 dataObj = Spectra()
1827 1830
1828 1831 return dataObj
1829 1832
1830 1833 def __hasNotDataInBuffer(self):
1831 1834 return 1
1832 1835
1833 1836
1834 1837 def getBlockDimension(self):
1835 1838 """
1836 1839 Obtiene la cantidad de puntos a leer por cada bloque de datos
1837 1840
1838 1841 Affected:
1839 1842 self.nRdChannels
1840 1843 self.nRdPairs
1841 1844 self.pts2read_SelfSpectra
1842 1845 self.pts2read_CrossSpectra
1843 1846 self.pts2read_DCchannels
1844 1847 self.blocksize
1845 1848 self.dataOut.nChannels
1846 1849 self.dataOut.nPairs
1847 1850
1848 1851 Return:
1849 1852 None
1850 1853 """
1851 1854 self.nRdChannels = 0
1852 1855 self.nRdPairs = 0
1853 1856 self.rdPairList = []
1854 1857
1855 1858 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
1856 1859 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
1857 1860 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
1858 1861 else:
1859 1862 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
1860 1863 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
1861 1864
1862 1865 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
1863 1866
1864 1867 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
1865 1868 self.blocksize = self.pts2read_SelfSpectra
1866 1869
1867 1870 if self.processingHeaderObj.flag_cspc:
1868 1871 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
1869 1872 self.blocksize += self.pts2read_CrossSpectra
1870 1873
1871 1874 if self.processingHeaderObj.flag_dc:
1872 1875 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
1873 1876 self.blocksize += self.pts2read_DCchannels
1874 1877
1875 1878 # self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels
1876 1879
1877 1880
1878 1881 def readBlock(self):
1879 1882 """
1880 1883 Lee el bloque de datos desde la posicion actual del puntero del archivo
1881 1884 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1882 1885 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1883 1886 es seteado a 0
1884 1887
1885 1888 Return: None
1886 1889
1887 1890 Variables afectadas:
1888 1891
1889 1892 self.flagIsNewFile
1890 1893 self.flagIsNewBlock
1891 1894 self.nTotalBlocks
1892 1895 self.data_spc
1893 1896 self.data_cspc
1894 1897 self.data_dc
1895 1898
1896 1899 Exceptions:
1897 1900 Si un bloque leido no es un bloque valido
1898 1901 """
1899 1902 blockOk_flag = False
1900 1903 fpointer = self.fp.tell()
1901 1904
1902 1905 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
1903 1906 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
1904 1907
1905 1908 if self.processingHeaderObj.flag_cspc:
1906 1909 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
1907 1910 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
1908 1911
1909 1912 if self.processingHeaderObj.flag_dc:
1910 1913 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
1911 1914 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
1912 1915
1913 1916
1914 1917 if not(self.processingHeaderObj.shif_fft):
1915 1918 #desplaza a la derecha en el eje 2 determinadas posiciones
1916 1919 shift = int(self.processingHeaderObj.profilesPerBlock/2)
1917 1920 spc = numpy.roll( spc, shift , axis=2 )
1918 1921
1919 1922 if self.processingHeaderObj.flag_cspc:
1920 1923 #desplaza a la derecha en el eje 2 determinadas posiciones
1921 1924 cspc = numpy.roll( cspc, shift, axis=2 )
1922 1925
1923 1926
1924 1927 spc = numpy.transpose( spc, (0,2,1) )
1925 1928 self.data_spc = spc
1926 1929
1927 1930 if self.processingHeaderObj.flag_cspc:
1928 1931 cspc = numpy.transpose( cspc, (0,2,1) )
1929 1932 self.data_cspc = cspc['real'] + cspc['imag']*1j
1930 1933 else:
1931 1934 self.data_cspc = None
1932 1935
1933 1936 if self.processingHeaderObj.flag_dc:
1934 1937 self.data_dc = dc['real'] + dc['imag']*1j
1935 1938 else:
1936 1939 self.data_dc = None
1937 1940
1938 1941 self.flagIsNewFile = 0
1939 1942 self.flagIsNewBlock = 1
1940 1943
1941 1944 self.nTotalBlocks += 1
1942 1945 self.nReadBlocks += 1
1943 1946
1944 1947 return 1
1945 1948
1946 1949
1947 1950 def getData(self):
1948 1951 """
1949 1952 Copia el buffer de lectura a la clase "Spectra",
1950 1953 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
1951 1954 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
1952 1955
1953 1956 Return:
1954 1957 0 : Si no hay mas archivos disponibles
1955 1958 1 : Si hizo una buena copia del buffer
1956 1959
1957 1960 Affected:
1958 1961 self.dataOut
1959 1962
1960 1963 self.flagTimeBlock
1961 1964 self.flagIsNewBlock
1962 1965 """
1963 1966
1964 1967 if self.flagNoMoreFiles:
1965 1968 self.dataOut.flagNoData = True
1966 1969 print 'Process finished'
1967 1970 return 0
1968 1971
1969 1972 self.flagTimeBlock = 0
1970 1973 self.flagIsNewBlock = 0
1971 1974
1972 1975 if self.__hasNotDataInBuffer():
1973 1976
1974 1977 if not( self.readNextBlock() ):
1975 1978 self.dataOut.flagNoData = True
1976 1979 return 0
1977 1980
1978 1981 # self.updateDataHeader()
1979 1982
1980 1983 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
1981 1984
1982 1985 if self.data_dc == None:
1983 1986 self.dataOut.flagNoData = True
1984 1987 return 0
1985 1988
1986 1989 self.dataOut.data_spc = self.data_spc
1987 1990
1988 1991 self.dataOut.data_cspc = self.data_cspc
1989 1992
1990 1993 self.dataOut.data_dc = self.data_dc
1991 1994
1992 1995 self.dataOut.flagTimeBlock = self.flagTimeBlock
1993 1996
1994 1997 self.dataOut.flagNoData = False
1995 1998
1996 1999 self.dataOut.dtype = self.dtype
1997 2000
1998 2001 # self.dataOut.nChannels = self.nRdChannels
1999 2002
2000 2003 self.dataOut.nPairs = self.nRdPairs
2001 2004
2002 2005 self.dataOut.pairsList = self.rdPairList
2003 2006
2004 2007 # self.dataOut.nHeights = self.processingHeaderObj.nHeights
2005 2008
2006 2009 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
2007 2010
2008 2011 self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
2009 2012
2010 2013 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
2011 2014
2012 2015 self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
2013 2016
2014 2017 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
2015 2018
2016 2019 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
2017 2020
2018 2021 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
2019 2022
2020 2023 # self.dataOut.channelIndexList = range(self.systemHeaderObj.nChannels)
2021 2024
2022 2025 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000.#+ self.profileIndex * self.ippSeconds
2023 2026
2024 2027 self.dataOut.ippSeconds = self.ippSeconds
2025 2028
2026 2029 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt * self.processingHeaderObj.nIncohInt * self.dataOut.nFFTPoints
2027 2030
2028 2031 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
2029 2032
2030 2033 # self.profileIndex += 1
2031 2034
2032 2035 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
2033 2036
2034 2037 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
2035 2038
2036 2039 return self.dataOut.data_spc
2037 2040
2038 2041
2039 2042 class SpectraWriter(JRODataWriter):
2040 2043
2041 2044 """
2042 2045 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
2043 2046 de los datos siempre se realiza por bloques.
2044 2047 """
2045 2048
2046 2049 ext = ".pdata"
2047 2050
2048 2051 optchar = "P"
2049 2052
2050 2053 shape_spc_Buffer = None
2051 2054
2052 2055 shape_cspc_Buffer = None
2053 2056
2054 2057 shape_dc_Buffer = None
2055 2058
2056 2059 data_spc = None
2057 2060
2058 2061 data_cspc = None
2059 2062
2060 2063 data_dc = None
2061 2064
2062 2065 # dataOut = None
2063 2066
2064 2067 def __init__(self):
2065 2068 """
2066 2069 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
2067 2070
2068 2071 Affected:
2069 2072 self.dataOut
2070 2073 self.basicHeaderObj
2071 2074 self.systemHeaderObj
2072 2075 self.radarControllerHeaderObj
2073 2076 self.processingHeaderObj
2074 2077
2075 2078 Return: None
2076 2079 """
2077 2080
2078 2081 self.isConfig = False
2079 2082
2080 2083 self.nTotalBlocks = 0
2081 2084
2082 2085 self.data_spc = None
2083 2086
2084 2087 self.data_cspc = None
2085 2088
2086 2089 self.data_dc = None
2087 2090
2088 2091 self.fp = None
2089 2092
2090 2093 self.flagIsNewFile = 1
2091 2094
2092 2095 self.nTotalBlocks = 0
2093 2096
2094 2097 self.flagIsNewBlock = 0
2095 2098
2096 2099 self.setFile = None
2097 2100
2098 2101 self.dtype = None
2099 2102
2100 2103 self.path = None
2101 2104
2102 2105 self.noMoreFiles = 0
2103 2106
2104 2107 self.filename = None
2105 2108
2106 2109 self.basicHeaderObj = BasicHeader()
2107 2110
2108 2111 self.systemHeaderObj = SystemHeader()
2109 2112
2110 2113 self.radarControllerHeaderObj = RadarControllerHeader()
2111 2114
2112 2115 self.processingHeaderObj = ProcessingHeader()
2113 2116
2114 2117
2115 2118 def hasAllDataInBuffer(self):
2116 2119 return 1
2117 2120
2118 2121
2119 2122 def setBlockDimension(self):
2120 2123 """
2121 2124 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
2122 2125
2123 2126 Affected:
2124 2127 self.shape_spc_Buffer
2125 2128 self.shape_cspc_Buffer
2126 2129 self.shape_dc_Buffer
2127 2130
2128 2131 Return: None
2129 2132 """
2130 2133 self.shape_spc_Buffer = (self.dataOut.nChannels,
2131 2134 self.processingHeaderObj.nHeights,
2132 2135 self.processingHeaderObj.profilesPerBlock)
2133 2136
2134 2137 self.shape_cspc_Buffer = (self.dataOut.nPairs,
2135 2138 self.processingHeaderObj.nHeights,
2136 2139 self.processingHeaderObj.profilesPerBlock)
2137 2140
2138 2141 self.shape_dc_Buffer = (self.dataOut.nChannels,
2139 2142 self.processingHeaderObj.nHeights)
2140 2143
2141 2144
2142 2145 def writeBlock(self):
2143 2146 """
2144 2147 Escribe el buffer en el file designado
2145 2148
2146 2149 Affected:
2147 2150 self.data_spc
2148 2151 self.data_cspc
2149 2152 self.data_dc
2150 2153 self.flagIsNewFile
2151 2154 self.flagIsNewBlock
2152 2155 self.nTotalBlocks
2153 2156 self.nWriteBlocks
2154 2157
2155 2158 Return: None
2156 2159 """
2157 2160
2158 2161 spc = numpy.transpose( self.data_spc, (0,2,1) )
2159 2162 if not( self.processingHeaderObj.shif_fft ):
2160 2163 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2161 2164 data = spc.reshape((-1))
2162 2165 data.tofile(self.fp)
2163 2166
2164 2167 if self.data_cspc != None:
2165 2168 data = numpy.zeros( self.shape_cspc_Buffer, self.dtype )
2166 2169 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
2167 2170 if not( self.processingHeaderObj.shif_fft ):
2168 2171 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2169 2172 data['real'] = cspc.real
2170 2173 data['imag'] = cspc.imag
2171 2174 data = data.reshape((-1))
2172 2175 data.tofile(self.fp)
2173 2176
2174 2177 if self.data_dc != None:
2175 2178 data = numpy.zeros( self.shape_dc_Buffer, self.dtype )
2176 2179 dc = self.data_dc
2177 2180 data['real'] = dc.real
2178 2181 data['imag'] = dc.imag
2179 2182 data = data.reshape((-1))
2180 2183 data.tofile(self.fp)
2181 2184
2182 2185 self.data_spc.fill(0)
2183 2186 self.data_dc.fill(0)
2184 2187 if self.data_cspc != None:
2185 2188 self.data_cspc.fill(0)
2186 2189
2187 2190 self.flagIsNewFile = 0
2188 2191 self.flagIsNewBlock = 1
2189 2192 self.nTotalBlocks += 1
2190 2193 self.nWriteBlocks += 1
2191 2194 self.blockIndex += 1
2192 2195
2193 2196
2194 2197 def putData(self):
2195 2198 """
2196 2199 Setea un bloque de datos y luego los escribe en un file
2197 2200
2198 2201 Affected:
2199 2202 self.data_spc
2200 2203 self.data_cspc
2201 2204 self.data_dc
2202 2205
2203 2206 Return:
2204 2207 0 : Si no hay data o no hay mas files que puedan escribirse
2205 2208 1 : Si se escribio la data de un bloque en un file
2206 2209 """
2207 2210
2208 2211 if self.dataOut.flagNoData:
2209 2212 return 0
2210 2213
2211 2214 self.flagIsNewBlock = 0
2212 2215
2213 2216 if self.dataOut.flagTimeBlock:
2214 2217 self.data_spc.fill(0)
2215 2218 self.data_cspc.fill(0)
2216 2219 self.data_dc.fill(0)
2217 2220 self.setNextFile()
2218 2221
2219 2222 if self.flagIsNewFile == 0:
2220 2223 self.getBasicHeader()
2221 2224
2222 2225 self.data_spc = self.dataOut.data_spc
2223 2226 self.data_cspc = self.dataOut.data_cspc
2224 2227 self.data_dc = self.dataOut.data_dc
2225 2228
2226 2229 # #self.processingHeaderObj.dataBlocksPerFile)
2227 2230 if self.hasAllDataInBuffer():
2228 2231 # self.getDataHeader()
2229 2232 self.writeNextBlock()
2230 2233
2231 2234 return 1
2232 2235
2233 2236
2234 2237 def __getProcessFlags(self):
2235 2238
2236 2239 processFlags = 0
2237 2240
2238 2241 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2239 2242 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2240 2243 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2241 2244 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2242 2245 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2243 2246 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2244 2247
2245 2248 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2246 2249
2247 2250
2248 2251
2249 2252 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
2250 2253 PROCFLAG.DATATYPE_SHORT,
2251 2254 PROCFLAG.DATATYPE_LONG,
2252 2255 PROCFLAG.DATATYPE_INT64,
2253 2256 PROCFLAG.DATATYPE_FLOAT,
2254 2257 PROCFLAG.DATATYPE_DOUBLE]
2255 2258
2256 2259
2257 2260 for index in range(len(dtypeList)):
2258 2261 if self.dataOut.dtype == dtypeList[index]:
2259 2262 dtypeValue = datatypeValueList[index]
2260 2263 break
2261 2264
2262 2265 processFlags += dtypeValue
2263 2266
2264 2267 if self.dataOut.flagDecodeData:
2265 2268 processFlags += PROCFLAG.DECODE_DATA
2266 2269
2267 2270 if self.dataOut.flagDeflipData:
2268 2271 processFlags += PROCFLAG.DEFLIP_DATA
2269 2272
2270 2273 if self.dataOut.code != None:
2271 2274 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
2272 2275
2273 2276 if self.dataOut.nIncohInt > 1:
2274 2277 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
2275 2278
2276 2279 if self.dataOut.data_dc != None:
2277 2280 processFlags += PROCFLAG.SAVE_CHANNELS_DC
2278 2281
2279 2282 return processFlags
2280 2283
2281 2284
2282 2285 def __getBlockSize(self):
2283 2286 '''
2284 2287 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
2285 2288 '''
2286 2289
2287 2290 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2288 2291 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2289 2292 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2290 2293 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2291 2294 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2292 2295 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2293 2296
2294 2297 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2295 2298 datatypeValueList = [1,2,4,8,4,8]
2296 2299 for index in range(len(dtypeList)):
2297 2300 if self.dataOut.dtype == dtypeList[index]:
2298 2301 datatypeValue = datatypeValueList[index]
2299 2302 break
2300 2303
2301 2304
2302 2305 pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints
2303 2306
2304 2307 pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write)
2305 2308 blocksize = (pts2write_SelfSpectra*datatypeValue)
2306 2309
2307 2310 if self.dataOut.data_cspc != None:
2308 2311 pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write)
2309 2312 blocksize += (pts2write_CrossSpectra*datatypeValue*2)
2310 2313
2311 2314 if self.dataOut.data_dc != None:
2312 2315 pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights)
2313 2316 blocksize += (pts2write_DCchannels*datatypeValue*2)
2314 2317
2315 2318 blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
2316 2319
2317 2320 return blocksize
2318 2321
2319 2322 def getDataHeader(self):
2320 2323
2321 2324 """
2322 2325 Obtiene una copia del First Header
2323 2326
2324 2327 Affected:
2325 2328 self.systemHeaderObj
2326 2329 self.radarControllerHeaderObj
2327 2330 self.dtype
2328 2331
2329 2332 Return:
2330 2333 None
2331 2334 """
2332 2335
2333 2336 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
2334 2337 self.systemHeaderObj.nChannels = self.dataOut.nChannels
2335 2338 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
2336 2339
2337 2340 self.getBasicHeader()
2338 2341
2339 2342 processingHeaderSize = 40 # bytes
2340 2343 self.processingHeaderObj.dtype = 0 # Voltage
2341 2344 self.processingHeaderObj.blockSize = self.__getBlockSize()
2342 2345 self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints
2343 2346 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
2344 2347 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
2345 2348 self.processingHeaderObj.processFlags = self.__getProcessFlags()
2346 2349 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval
2347 2350 self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt
2348 2351 self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
2349 2352
2350 2353 if self.processingHeaderObj.totalSpectra > 0:
2351 2354 channelList = []
2352 2355 for channel in range(self.dataOut.nChannels):
2353 2356 channelList.append(channel)
2354 2357 channelList.append(channel)
2355 2358
2356 2359 pairsList = []
2357 2360 for pair in self.dataOut.pairsList:
2358 2361 pairsList.append(pair[0])
2359 2362 pairsList.append(pair[1])
2360 2363 spectraComb = channelList + pairsList
2361 2364 spectraComb = numpy.array(spectraComb,dtype="u1")
2362 2365 self.processingHeaderObj.spectraComb = spectraComb
2363 2366 sizeOfSpcComb = len(spectraComb)
2364 2367 processingHeaderSize += sizeOfSpcComb
2365 2368
2366 2369 if self.dataOut.code != None:
2367 2370 self.processingHeaderObj.code = self.dataOut.code
2368 2371 self.processingHeaderObj.nCode = self.dataOut.nCode
2369 2372 self.processingHeaderObj.nBaud = self.dataOut.nBaud
2370 2373 nCodeSize = 4 # bytes
2371 2374 nBaudSize = 4 # bytes
2372 2375 codeSize = 4 # bytes
2373 2376 sizeOfCode = int(nCodeSize + nBaudSize + codeSize * self.dataOut.nCode * self.dataOut.nBaud)
2374 2377 processingHeaderSize += sizeOfCode
2375 2378
2376 2379 if self.processingHeaderObj.nWindows != 0:
2377 2380 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
2378 2381 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
2379 2382 self.processingHeaderObj.nHeights = self.dataOut.nHeights
2380 2383 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
2381 2384 sizeOfFirstHeight = 4
2382 2385 sizeOfdeltaHeight = 4
2383 2386 sizeOfnHeights = 4
2384 2387 sizeOfWindows = (sizeOfFirstHeight + sizeOfdeltaHeight + sizeOfnHeights)*self.processingHeaderObj.nWindows
2385 2388 processingHeaderSize += sizeOfWindows
2386 2389
2387 2390 self.processingHeaderObj.size = processingHeaderSize
2388 2391
2389 2392 class SpectraHeisWriter():
2390 2393
2391 2394 i=0
2392 2395
2393 2396 def __init__(self, dataOut):
2394 2397
2395 2398 self.wrObj = FITS()
2396 2399 self.dataOut = dataOut
2397 2400
2398 2401 def isNumber(str):
2399 2402 """
2400 2403 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
2401 2404
2402 2405 Excepciones:
2403 2406 Si un determinado string no puede ser convertido a numero
2404 2407 Input:
2405 2408 str, string al cual se le analiza para determinar si convertible a un numero o no
2406 2409
2407 2410 Return:
2408 2411 True : si el string es uno numerico
2409 2412 False : no es un string numerico
2410 2413 """
2411 2414 try:
2412 2415 float( str )
2413 2416 return True
2414 2417 except:
2415 2418 return False
2416 2419
2417 2420 def setup(self, wrpath,):
2418 2421
2419 2422 if not(os.path.exists(wrpath)):
2420 2423 os.mkdir(wrpath)
2421 2424
2422 2425 self.wrpath = wrpath
2423 2426 self.setFile = 0
2424 2427
2425 2428 def putData(self):
2426 2429 # self.wrObj.writeHeader(nChannels=self.dataOut.nChannels, nFFTPoints=self.dataOut.nFFTPoints)
2427 2430 #name = self.dataOut.utctime
2428 2431 name= time.localtime( self.dataOut.utctime)
2429 2432 ext=".fits"
2430 2433 #folder='D%4.4d%3.3d'%(name.tm_year,name.tm_yday)
2431 2434 subfolder = 'D%4.4d%3.3d' % (name.tm_year,name.tm_yday)
2432 2435
2433 2436 doypath = os.path.join( self.wrpath, subfolder )
2434 2437 if not( os.path.exists(doypath) ):
2435 2438 os.mkdir(doypath)
2436 2439 self.setFile += 1
2437 2440 file = 'D%4.4d%3.3d%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext)
2438 2441
2439 2442 filename = os.path.join(self.wrpath,subfolder, file)
2440 2443
2441 2444 # print self.dataOut.ippSeconds
2442 2445 freq=numpy.arange(-1*self.dataOut.nHeights/2.,self.dataOut.nHeights/2.)/(2*self.dataOut.ippSeconds)
2443 2446
2444 2447 col1=self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints)+'E', array=freq)
2445 2448 col2=self.wrObj.writeData(name="P_Ch1",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[0,:]))
2446 2449 col3=self.wrObj.writeData(name="P_Ch2",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[1,:]))
2447 2450 col4=self.wrObj.writeData(name="P_Ch3",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[2,:]))
2448 2451 col5=self.wrObj.writeData(name="P_Ch4",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[3,:]))
2449 2452 col6=self.wrObj.writeData(name="P_Ch5",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[4,:]))
2450 2453 col7=self.wrObj.writeData(name="P_Ch6",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[5,:]))
2451 2454 col8=self.wrObj.writeData(name="P_Ch7",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[6,:]))
2452 2455 col9=self.wrObj.writeData(name="P_Ch8",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[7,:]))
2453 2456 #n=numpy.arange((100))
2454 2457 n=self.dataOut.data_spc[6,:]
2455 2458 a=self.wrObj.cFImage(n)
2456 2459 b=self.wrObj.Ctable(col1,col2,col3,col4,col5,col6,col7,col8,col9)
2457 2460 self.wrObj.CFile(a,b)
2458 2461 self.wrObj.wFile(filename)
2459 2462 return 1
2460 2463
2461 2464 class FITS:
2462 2465
2463 2466 name=None
2464 2467 format=None
2465 2468 array =None
2466 2469 data =None
2467 2470 thdulist=None
2468 2471
2469 2472 def __init__(self):
2470 2473
2471 2474 pass
2472 2475
2473 2476 def setColF(self,name,format,array):
2474 2477 self.name=name
2475 2478 self.format=format
2476 2479 self.array=array
2477 2480 a1=numpy.array([self.array],dtype=numpy.float32)
2478 2481 self.col1 = pyfits.Column(name=self.name, format=self.format, array=a1)
2479 2482 return self.col1
2480 2483
2481 2484 # def setColP(self,name,format,data):
2482 2485 # self.name=name
2483 2486 # self.format=format
2484 2487 # self.data=data
2485 2488 # a2=numpy.array([self.data],dtype=numpy.float32)
2486 2489 # self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2487 2490 # return self.col2
2488 2491
2489 2492 def writeHeader(self,):
2490 2493 pass
2491 2494
2492 2495 def writeData(self,name,format,data):
2493 2496 self.name=name
2494 2497 self.format=format
2495 2498 self.data=data
2496 2499 a2=numpy.array([self.data],dtype=numpy.float32)
2497 2500 self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2498 2501 return self.col2
2499 2502
2500 2503 def cFImage(self,n):
2501 2504 self.hdu= pyfits.PrimaryHDU(n)
2502 2505 return self.hdu
2503 2506
2504 2507 def Ctable(self,col1,col2,col3,col4,col5,col6,col7,col8,col9):
2505 2508 self.cols=pyfits.ColDefs( [col1,col2,col3,col4,col5,col6,col7,col8,col9])
2506 2509 self.tbhdu = pyfits.new_table(self.cols)
2507 2510 return self.tbhdu
2508 2511
2509 2512 def CFile(self,hdu,tbhdu):
2510 2513 self.thdulist=pyfits.HDUList([hdu,tbhdu])
2511 2514
2512 2515 def wFile(self,filename):
2513 2516 self.thdulist.writeto(filename) No newline at end of file
@@ -1,511 +1,516
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JROHeaderIO.py 151 2012-10-31 19:00:51Z murco $
5 5 '''
6 6 import sys
7 7 import numpy
8 8 import copy
9 9
10 10 class Header:
11 11
12 12 def __init__(self):
13 13 raise
14 14
15 15 def copy(self):
16 16 return copy.deepcopy(self)
17 17
18 18 def read():
19 19 pass
20 20
21 21 def write():
22 22 pass
23 23
24 def printInfo(self):
25
26 for key in self.__dict__.keys():
27 print "%s = %s" %(key, self.__dict__[key])
28
24 29 class BasicHeader(Header):
25 30
26 31 size = None
27 32 version = None
28 33 dataBlock = None
29 34 utc = None
30 35 miliSecond = None
31 36 timeZone = None
32 37 dstFlag = None
33 38 errorCount = None
34 39 struct = None
35 40
36 41 def __init__(self):
37 42
38 43 self.size = 0
39 44 self.version = 0
40 45 self.dataBlock = 0
41 46 self.utc = 0
42 47 self.miliSecond = 0
43 48 self.timeZone = 0
44 49 self.dstFlag = 0
45 50 self.errorCount = 0
46 51 self.struct = numpy.dtype([
47 52 ('nSize','<u4'),
48 53 ('nVersion','<u2'),
49 54 ('nDataBlockId','<u4'),
50 55 ('nUtime','<u4'),
51 56 ('nMilsec','<u2'),
52 57 ('nTimezone','<i2'),
53 58 ('nDstflag','<i2'),
54 59 ('nErrorCount','<u4')
55 60 ])
56 61
57 62
58 63 def read(self, fp):
59 64 try:
60 65 header = numpy.fromfile(fp, self.struct,1)
61 66 self.size = int(header['nSize'][0])
62 67 self.version = int(header['nVersion'][0])
63 68 self.dataBlock = int(header['nDataBlockId'][0])
64 69 self.utc = int(header['nUtime'][0])
65 70 self.miliSecond = int(header['nMilsec'][0])
66 71 self.timeZone = int(header['nTimezone'][0])
67 72 self.dstFlag = int(header['nDstflag'][0])
68 73 self.errorCount = int(header['nErrorCount'][0])
69 74
70 75 except Exception, e:
71 76 print "BasicHeader: " + e
72 77 return 0
73 78
74 79 return 1
75 80
76 81 def write(self, fp):
77 82 headerTuple = (self.size,self.version,self.dataBlock,self.utc,self.miliSecond,self.timeZone,self.dstFlag,self.errorCount)
78 83 header = numpy.array(headerTuple,self.struct)
79 84 header.tofile(fp)
80 85
81 86 return 1
82 87
83 88 class SystemHeader(Header):
84 89
85 90 size = None
86 91 nSamples = None
87 92 nProfiles = None
88 93 nChannels = None
89 94 adcResolution = None
90 95 pciDioBusWidth = None
91 96 struct = None
92 97
93 98 def __init__(self):
94 99 self.size = 0
95 100 self.nSamples = 0
96 101 self.nProfiles = 0
97 102 self.nChannels = 0
98 103 self.adcResolution = 0
99 104 self.pciDioBusWidth = 0
100 105 self.struct = numpy.dtype([
101 106 ('nSize','<u4'),
102 107 ('nNumSamples','<u4'),
103 108 ('nNumProfiles','<u4'),
104 109 ('nNumChannels','<u4'),
105 110 ('nADCResolution','<u4'),
106 111 ('nPCDIOBusWidth','<u4'),
107 112 ])
108 113
109 114
110 115 def read(self, fp):
111 116 try:
112 117 header = numpy.fromfile(fp,self.struct,1)
113 118 self.size = header['nSize'][0]
114 119 self.nSamples = header['nNumSamples'][0]
115 120 self.nProfiles = header['nNumProfiles'][0]
116 121 self.nChannels = header['nNumChannels'][0]
117 122 self.adcResolution = header['nADCResolution'][0]
118 123 self.pciDioBusWidth = header['nPCDIOBusWidth'][0]
119 124
120 125 except Exception, e:
121 126 print "SystemHeader: " + e
122 127 return 0
123 128
124 129 return 1
125 130
126 131 def write(self, fp):
127 132 headerTuple = (self.size,self.nSamples,self.nProfiles,self.nChannels,self.adcResolution,self.pciDioBusWidth)
128 133 header = numpy.array(headerTuple,self.struct)
129 134 header.tofile(fp)
130 135
131 136 return 1
132 137
133 138 class RadarControllerHeader(Header):
134 139
135 140 size = None
136 141 expType = None
137 142 nTx = None
138 143 ipp = None
139 144 txA = None
140 145 txB = None
141 146 nWindows = None
142 147 numTaus = None
143 148 codeType = None
144 149 line6Function = None
145 150 line5Function = None
146 151 fClock = None
147 152 prePulseBefore = None
148 153 prePulserAfter = None
149 154 rangeIpp = None
150 155 rangeTxA = None
151 156 rangeTxB = None
152 157 struct = None
153 158
154 159 def __init__(self):
155 160 self.size = 0
156 161 self.expType = 0
157 162 self.nTx = 0
158 163 self.ipp = 0
159 164 self.txA = 0
160 165 self.txB = 0
161 166 self.nWindows = 0
162 167 self.numTaus = 0
163 168 self.codeType = 0
164 169 self.line6Function = 0
165 170 self.line5Function = 0
166 171 self.fClock = 0
167 172 self.prePulseBefore = 0
168 173 self.prePulserAfter = 0
169 174 self.rangeIpp = 0
170 175 self.rangeTxA = 0
171 176 self.rangeTxB = 0
172 177 self.struct = numpy.dtype([
173 178 ('nSize','<u4'),
174 179 ('nExpType','<u4'),
175 180 ('nNTx','<u4'),
176 181 ('fIpp','<f4'),
177 182 ('fTxA','<f4'),
178 183 ('fTxB','<f4'),
179 184 ('nNumWindows','<u4'),
180 185 ('nNumTaus','<u4'),
181 186 ('nCodeType','<u4'),
182 187 ('nLine6Function','<u4'),
183 188 ('nLine5Function','<u4'),
184 189 ('fClock','<f4'),
185 190 ('nPrePulseBefore','<u4'),
186 191 ('nPrePulseAfter','<u4'),
187 192 ('sRangeIPP','<a20'),
188 193 ('sRangeTxA','<a20'),
189 194 ('sRangeTxB','<a20'),
190 195 ])
191 196
192 197 self.samplingWindowStruct = numpy.dtype([('h0','<f4'),('dh','<f4'),('nsa','<u4')])
193 198
194 199 self.samplingWindow = None
195 200 self.nHeights = None
196 201 self.firstHeight = None
197 202 self.deltaHeight = None
198 203 self.samplesWin = None
199 204
200 205 self.nCode = None
201 206 self.nBaud = None
202 207 self.code = None
203 208 self.flip1 = None
204 209 self.flip2 = None
205 210
206 211 self.dynamic = numpy.array([],numpy.dtype('byte'))
207 212
208 213
209 214 def read(self, fp):
210 215 try:
211 216 startFp = fp.tell()
212 217 header = numpy.fromfile(fp,self.struct,1)
213 218 self.size = int(header['nSize'][0])
214 219 self.expType = int(header['nExpType'][0])
215 220 self.nTx = int(header['nNTx'][0])
216 221 self.ipp = float(header['fIpp'][0])
217 222 self.txA = float(header['fTxA'][0])
218 223 self.txB = float(header['fTxB'][0])
219 224 self.nWindows = int(header['nNumWindows'][0])
220 225 self.numTaus = int(header['nNumTaus'][0])
221 226 self.codeType = int(header['nCodeType'][0])
222 227 self.line6Function = int(header['nLine6Function'][0])
223 228 self.line5Function = int(header['nLine5Function'][0])
224 229 self.fClock = float(header['fClock'][0])
225 230 self.prePulseBefore = int(header['nPrePulseBefore'][0])
226 231 self.prePulserAfter = int(header['nPrePulseAfter'][0])
227 232 self.rangeIpp = header['sRangeIPP'][0]
228 233 self.rangeTxA = header['sRangeTxA'][0]
229 234 self.rangeTxB = header['sRangeTxB'][0]
230 235 # jump Dynamic Radar Controller Header
231 236 jumpFp = self.size - 116
232 237 self.dynamic = numpy.fromfile(fp,numpy.dtype('byte'),jumpFp)
233 238 #pointer backward to dynamic header and read
234 239 backFp = fp.tell() - jumpFp
235 240 fp.seek(backFp)
236 241
237 242 self.samplingWindow = numpy.fromfile(fp,self.samplingWindowStruct,self.nWindows)
238 243 self.nHeights = int(numpy.sum(self.samplingWindow['nsa']))
239 244 self.firstHeight = self.samplingWindow['h0']
240 245 self.deltaHeight = self.samplingWindow['dh']
241 246 self.samplesWin = self.samplingWindow['nsa']
242 247
243 248 self.Taus = numpy.fromfile(fp,'<f4',self.numTaus)
244 249
245 250 if self.codeType != 0:
246 251 self.nCode = int(numpy.fromfile(fp,'<u4',1))
247 252 self.nBaud = int(numpy.fromfile(fp,'<u4',1))
248 253 self.code = numpy.empty([self.nCode,self.nBaud],dtype='u1')
249 254 tempList = []
250 255 for ic in range(self.nCode):
251 256 temp = numpy.fromfile(fp,'u1',4*int(numpy.ceil(self.nBaud/32.)))
252 257 tempList.append(temp)
253 258 self.code[ic] = numpy.unpackbits(temp[::-1])[-1*self.nBaud:]
254 259 self.code = 2.0*self.code - 1.0
255 260
256 261 if self.line5Function == RCfunction.FLIP:
257 262 self.flip1 = numpy.fromfile(fp,'<u4',1)
258 263
259 264 if self.line6Function == RCfunction.FLIP:
260 265 self.flip2 = numpy.fromfile(fp,'<u4',1)
261 266
262 267 endFp = self.size + startFp
263 268 jumpFp = endFp - fp.tell()
264 269 if jumpFp > 0:
265 270 fp.seek(jumpFp)
266 271
267 272 except Exception, e:
268 273 print "RadarControllerHeader: " + e
269 274 return 0
270 275
271 276 return 1
272 277
273 278 def write(self, fp):
274 279 headerTuple = (self.size,
275 280 self.expType,
276 281 self.nTx,
277 282 self.ipp,
278 283 self.txA,
279 284 self.txB,
280 285 self.nWindows,
281 286 self.numTaus,
282 287 self.codeType,
283 288 self.line6Function,
284 289 self.line5Function,
285 290 self.fClock,
286 291 self.prePulseBefore,
287 292 self.prePulserAfter,
288 293 self.rangeIpp,
289 294 self.rangeTxA,
290 295 self.rangeTxB)
291 296
292 297 header = numpy.array(headerTuple,self.struct)
293 298 header.tofile(fp)
294 299
295 300 dynamic = self.dynamic
296 301 dynamic.tofile(fp)
297 302
298 303 return 1
299 304
300 305
301 306
302 307 class ProcessingHeader(Header):
303 308
304 309 size = None
305 310 dtype = None
306 311 blockSize = None
307 312 profilesPerBlock = None
308 313 dataBlocksPerFile = None
309 314 nWindows = None
310 315 processFlags = None
311 316 nCohInt = None
312 317 nIncohInt = None
313 318 totalSpectra = None
314 319 struct = None
315 320 flag_dc = None
316 321 flag_cspc = None
317 322
318 323 def __init__(self):
319 324 self.size = 0
320 325 self.dtype = 0
321 326 self.blockSize = 0
322 327 self.profilesPerBlock = 0
323 328 self.dataBlocksPerFile = 0
324 329 self.nWindows = 0
325 330 self.processFlags = 0
326 331 self.nCohInt = 0
327 332 self.nIncohInt = 0
328 333 self.totalSpectra = 0
329 334 self.struct = numpy.dtype([
330 335 ('nSize','<u4'),
331 336 ('nDataType','<u4'),
332 337 ('nSizeOfDataBlock','<u4'),
333 338 ('nProfilesperBlock','<u4'),
334 339 ('nDataBlocksperFile','<u4'),
335 340 ('nNumWindows','<u4'),
336 341 ('nProcessFlags','<u4'),
337 342 ('nCoherentIntegrations','<u4'),
338 343 ('nIncoherentIntegrations','<u4'),
339 344 ('nTotalSpectra','<u4')
340 345 ])
341 346 self.samplingWindow = 0
342 347 self.structSamplingWindow = numpy.dtype([('h0','<f4'),('dh','<f4'),('nsa','<u4')])
343 348 self.nHeights = 0
344 349 self.firstHeight = 0
345 350 self.deltaHeight = 0
346 351 self.samplesWin = 0
347 352 self.spectraComb = 0
348 353 self.nCode = None
349 354 self.code = None
350 355 self.nBaud = None
351 356 self.shif_fft = False
352 357 self.flag_dc = False
353 358 self.flag_cspc = False
354 359
355 360 def read(self, fp):
356 361 try:
357 362 header = numpy.fromfile(fp,self.struct,1)
358 363 self.size = int(header['nSize'][0])
359 364 self.dtype = int(header['nDataType'][0])
360 365 self.blockSize = int(header['nSizeOfDataBlock'][0])
361 366 self.profilesPerBlock = int(header['nProfilesperBlock'][0])
362 367 self.dataBlocksPerFile = int(header['nDataBlocksperFile'][0])
363 368 self.nWindows = int(header['nNumWindows'][0])
364 369 self.processFlags = int(header['nProcessFlags'])
365 370 self.nCohInt = int(header['nCoherentIntegrations'][0])
366 371 self.nIncohInt = int(header['nIncoherentIntegrations'][0])
367 372 self.totalSpectra = int(header['nTotalSpectra'][0])
368 373 self.samplingWindow = numpy.fromfile(fp,self.structSamplingWindow,self.nWindows)
369 374 self.nHeights = int(numpy.sum(self.samplingWindow['nsa']))
370 375 self.firstHeight = float(self.samplingWindow['h0'][0])
371 376 self.deltaHeight = float(self.samplingWindow['dh'][0])
372 377 self.samplesWin = self.samplingWindow['nsa']
373 378 self.spectraComb = numpy.fromfile(fp,'u1',2*self.totalSpectra)
374 379
375 380 if ((self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE) == PROCFLAG.DEFINE_PROCESS_CODE):
376 381 self.nCode = int(numpy.fromfile(fp,'<u4',1))
377 382 self.nBaud = int(numpy.fromfile(fp,'<u4',1))
378 383 self.code = numpy.fromfile(fp,'<f4',self.nCode*self.nBaud).reshape(self.nBaud,self.nCode)
379 384
380 385 if ((self.processFlags & PROCFLAG.SHIFT_FFT_DATA) == PROCFLAG.SHIFT_FFT_DATA):
381 386 self.shif_fft = True
382 387 else:
383 388 self.shif_fft = False
384 389
385 390 if ((self.processFlags & PROCFLAG.SAVE_CHANNELS_DC) == PROCFLAG.SAVE_CHANNELS_DC):
386 391 self.flag_dc = True
387 392
388 393 nChannels = 0
389 394 nPairs = 0
390 395 pairList = []
391 396
392 397 for i in range( 0, self.totalSpectra*2, 2 ):
393 398 if self.spectraComb[i] == self.spectraComb[i+1]:
394 399 nChannels = nChannels + 1 #par de canales iguales
395 400 else:
396 401 nPairs = nPairs + 1 #par de canales diferentes
397 402 pairList.append( (self.spectraComb[i], self.spectraComb[i+1]) )
398 403
399 404 self.flag_cspc = False
400 405 if nPairs > 0:
401 406 self.flag_cspc = True
402 407
403 408 except Exception, e:
404 409 print "ProcessingHeader: " + e
405 410 return 0
406 411
407 412 return 1
408 413
409 414 def write(self, fp):
410 415 headerTuple = (self.size,
411 416 self.dtype,
412 417 self.blockSize,
413 418 self.profilesPerBlock,
414 419 self.dataBlocksPerFile,
415 420 self.nWindows,
416 421 self.processFlags,
417 422 self.nCohInt,
418 423 self.nIncohInt,
419 424 self.totalSpectra)
420 425
421 426 header = numpy.array(headerTuple,self.struct)
422 427 header.tofile(fp)
423 428
424 429 if self.nWindows != 0:
425 430 sampleWindowTuple = (self.firstHeight,self.deltaHeight,self.samplesWin)
426 431 samplingWindow = numpy.array(sampleWindowTuple,self.structSamplingWindow)
427 432 samplingWindow.tofile(fp)
428 433
429 434
430 435 if self.totalSpectra != 0:
431 436 spectraComb = numpy.array([],numpy.dtype('u1'))
432 437 spectraComb = self.spectraComb
433 438 spectraComb.tofile(fp)
434 439
435 440
436 441 if self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE == PROCFLAG.DEFINE_PROCESS_CODE:
437 442 nCode = self.nCode #Probar con un dato que almacene codigo, hasta el momento no se hizo la prueba
438 443 nCode.tofile(fp)
439 444
440 445 nBaud = self.nBaud
441 446 nBaud.tofile(fp)
442 447
443 448 code = self.code.reshape(nCode*nBaud)
444 449 code.tofile(fp)
445 450
446 451 return 1
447 452
448 453 class RCfunction:
449 454 NONE=0
450 455 FLIP=1
451 456 CODE=2
452 457 SAMPLING=3
453 458 LIN6DIV256=4
454 459 SYNCHRO=5
455 460
456 461 class nCodeType:
457 462 NONE=0
458 463 USERDEFINE=1
459 464 BARKER2=2
460 465 BARKER3=3
461 466 BARKER4=4
462 467 BARKER5=5
463 468 BARKER7=6
464 469 BARKER11=7
465 470 BARKER13=8
466 471 AC128=9
467 472 COMPLEMENTARYCODE2=10
468 473 COMPLEMENTARYCODE4=11
469 474 COMPLEMENTARYCODE8=12
470 475 COMPLEMENTARYCODE16=13
471 476 COMPLEMENTARYCODE32=14
472 477 COMPLEMENTARYCODE64=15
473 478 COMPLEMENTARYCODE128=16
474 479 CODE_BINARY28=17
475 480
476 481 class PROCFLAG:
477 482 COHERENT_INTEGRATION = numpy.uint32(0x00000001)
478 483 DECODE_DATA = numpy.uint32(0x00000002)
479 484 SPECTRA_CALC = numpy.uint32(0x00000004)
480 485 INCOHERENT_INTEGRATION = numpy.uint32(0x00000008)
481 486 POST_COHERENT_INTEGRATION = numpy.uint32(0x00000010)
482 487 SHIFT_FFT_DATA = numpy.uint32(0x00000020)
483 488
484 489 DATATYPE_CHAR = numpy.uint32(0x00000040)
485 490 DATATYPE_SHORT = numpy.uint32(0x00000080)
486 491 DATATYPE_LONG = numpy.uint32(0x00000100)
487 492 DATATYPE_INT64 = numpy.uint32(0x00000200)
488 493 DATATYPE_FLOAT = numpy.uint32(0x00000400)
489 494 DATATYPE_DOUBLE = numpy.uint32(0x00000800)
490 495
491 496 DATAARRANGE_CONTIGUOUS_CH = numpy.uint32(0x00001000)
492 497 DATAARRANGE_CONTIGUOUS_H = numpy.uint32(0x00002000)
493 498 DATAARRANGE_CONTIGUOUS_P = numpy.uint32(0x00004000)
494 499
495 500 SAVE_CHANNELS_DC = numpy.uint32(0x00008000)
496 501 DEFLIP_DATA = numpy.uint32(0x00010000)
497 502 DEFINE_PROCESS_CODE = numpy.uint32(0x00020000)
498 503
499 504 ACQ_SYS_NATALIA = numpy.uint32(0x00040000)
500 505 ACQ_SYS_ECHOTEK = numpy.uint32(0x00080000)
501 506 ACQ_SYS_ADRXD = numpy.uint32(0x000C0000)
502 507 ACQ_SYS_JULIA = numpy.uint32(0x00100000)
503 508 ACQ_SYS_XXXXXX = numpy.uint32(0x00140000)
504 509
505 510 EXP_NAME_ESP = numpy.uint32(0x00200000)
506 511 CHANNEL_NAMES_ESP = numpy.uint32(0x00400000)
507 512
508 513 OPERATION_MASK = numpy.uint32(0x0000003F)
509 514 DATATYPE_MASK = numpy.uint32(0x00000FC0)
510 515 DATAARRANGE_MASK = numpy.uint32(0x00007000)
511 516 ACQ_SYS_MASK = numpy.uint32(0x001C0000) No newline at end of file
@@ -1,985 +1,971
1 1 '''
2 2
3 3 $Author: dsuarez $
4 4 $Id: Processor.py 1 2012-11-12 18:56:07Z dsuarez $
5 5 '''
6 6 import os
7 7 import numpy
8 8 import datetime
9 9 import time
10 10
11 11 from jrodata import *
12 12 from jrodataIO import *
13 13 from jroplot import *
14 14
15 15 class ProcessingUnit:
16 16
17 17 """
18 18 Esta es la clase base para el procesamiento de datos.
19 19
20 20 Contiene el metodo "call" para llamar operaciones. Las operaciones pueden ser:
21 21 - Metodos internos (callMethod)
22 22 - Objetos del tipo Operation (callObject). Antes de ser llamados, estos objetos
23 23 tienen que ser agreagados con el metodo "add".
24 24
25 25 """
26 26 # objeto de datos de entrada (Voltage, Spectra o Correlation)
27 27 dataIn = None
28 28
29 29 # objeto de datos de entrada (Voltage, Spectra o Correlation)
30 30 dataOut = None
31 31
32 32
33 33 objectDict = None
34 34
35 35 def __init__(self):
36 36
37 37 self.objectDict = {}
38 38
39 39 def init(self):
40 40
41 41 raise ValueError, "Not implemented"
42 42
43 43 def addOperation(self, object, objId):
44 44
45 45 """
46 46 Agrega el objeto "object" a la lista de objetos "self.objectList" y retorna el
47 47 identificador asociado a este objeto.
48 48
49 49 Input:
50 50
51 51 object : objeto de la clase "Operation"
52 52
53 53 Return:
54 54
55 55 objId : identificador del objeto, necesario para ejecutar la operacion
56 56 """
57 57
58 58 self.objectDict[objId] = object
59 59
60 60 return objId
61 61
62 62 def operation(self, **kwargs):
63 63
64 64 """
65 65 Operacion directa sobre la data (dataout.data). Es necesario actualizar los valores de los
66 66 atributos del objeto dataOut
67 67
68 68 Input:
69 69
70 70 **kwargs : Diccionario de argumentos de la funcion a ejecutar
71 71 """
72 72
73 73 raise ValueError, "ImplementedError"
74 74
75 75 def callMethod(self, name, **kwargs):
76 76
77 77 """
78 78 Ejecuta el metodo con el nombre "name" y con argumentos **kwargs de la propia clase.
79 79
80 80 Input:
81 81 name : nombre del metodo a ejecutar
82 82
83 83 **kwargs : diccionario con los nombres y valores de la funcion a ejecutar.
84 84
85 85 """
86 86 if name != 'run':
87 87
88 88 if name == 'init' and self.dataIn.isEmpty():
89 89 self.dataOut.flagNoData = True
90 90 return False
91 91
92 92 if name != 'init' and self.dataOut.isEmpty():
93 93 return False
94 94
95 95 methodToCall = getattr(self, name)
96 96
97 97 methodToCall(**kwargs)
98 98
99 99 if name != 'run':
100 100 return True
101 101
102 102 if self.dataOut.isEmpty():
103 103 return False
104 104
105 105 return True
106 106
107 107 def callObject(self, objId, **kwargs):
108 108
109 109 """
110 110 Ejecuta la operacion asociada al identificador del objeto "objId"
111 111
112 112 Input:
113 113
114 114 objId : identificador del objeto a ejecutar
115 115
116 116 **kwargs : diccionario con los nombres y valores de la funcion a ejecutar.
117 117
118 118 Return:
119 119
120 120 None
121 121 """
122 122
123 123 if self.dataOut.isEmpty():
124 124 return False
125 125
126 126 object = self.objectDict[objId]
127 127
128 128 object.run(self.dataOut, **kwargs)
129 129
130 130 return True
131 131
132 132 def call(self, operationConf, **kwargs):
133 133
134 134 """
135 135 Return True si ejecuta la operacion "operationConf.name" con los
136 136 argumentos "**kwargs". False si la operacion no se ha ejecutado.
137 137 La operacion puede ser de dos tipos:
138 138
139 139 1. Un metodo propio de esta clase:
140 140
141 141 operation.type = "self"
142 142
143 143 2. El metodo "run" de un objeto del tipo Operation o de un derivado de ella:
144 144 operation.type = "other".
145 145
146 146 Este objeto de tipo Operation debe de haber sido agregado antes con el metodo:
147 147 "addOperation" e identificado con el operation.id
148 148
149 149
150 150 con el id de la operacion.
151 151
152 152 Input:
153 153
154 154 Operation : Objeto del tipo operacion con los atributos: name, type y id.
155 155
156 156 """
157 157
158 158 if operationConf.type == 'self':
159 159 sts = self.callMethod(operationConf.name, **kwargs)
160 160
161 161 if operationConf.type == 'other':
162 162 sts = self.callObject(operationConf.id, **kwargs)
163 163
164 164 return sts
165 165
166 166 def setInput(self, dataIn):
167 167
168 168 self.dataIn = dataIn
169 169
170 170 def getOutput(self):
171 171
172 172 return self.dataOut
173 173
174 174 class Operation():
175 175
176 176 """
177 177 Clase base para definir las operaciones adicionales que se pueden agregar a la clase ProcessingUnit
178 178 y necesiten acumular informacion previa de los datos a procesar. De preferencia usar un buffer de
179 179 acumulacion dentro de esta clase
180 180
181 181 Ejemplo: Integraciones coherentes, necesita la informacion previa de los n perfiles anteriores (bufffer)
182 182
183 183 """
184 184
185 185 __buffer = None
186 186 __isConfig = False
187 187
188 188 def __init__(self):
189 189
190 190 pass
191 191
192 192 def run(self, dataIn, **kwargs):
193 193
194 194 """
195 195 Realiza las operaciones necesarias sobre la dataIn.data y actualiza los atributos del objeto dataIn.
196 196
197 197 Input:
198 198
199 199 dataIn : objeto del tipo JROData
200 200
201 201 Return:
202 202
203 203 None
204 204
205 205 Affected:
206 206 __buffer : buffer de recepcion de datos.
207 207
208 208 """
209 209
210 210 raise ValueError, "ImplementedError"
211 211
212 212 class VoltageProc(ProcessingUnit):
213 213
214 214
215 215 def __init__(self):
216 216
217 217 self.objectDict = {}
218 218 self.dataOut = Voltage()
219 219
220 220 def init(self):
221 221
222 222 self.dataOut.copy(self.dataIn)
223 223 # No necesita copiar en cada init() los atributos de dataIn
224 224 # la copia deberia hacerse por cada nuevo bloque de datos
225 225
226 226 def selectChannels(self, channelList):
227 227
228 228 channelIndexList = []
229 229
230 230 for channel in channelList:
231 231 index = self.dataOut.channelList.index(channel)
232 232 channelIndexList.append(index)
233 233
234 234 self.selectChannelsByIndex(channelIndexList)
235 235
236 236 def selectChannelsByIndex(self, channelIndexList):
237 237 """
238 238 Selecciona un bloque de datos en base a canales segun el channelIndexList
239 239
240 240 Input:
241 241 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
242 242
243 243 Affected:
244 244 self.dataOut.data
245 245 self.dataOut.channelIndexList
246 246 self.dataOut.nChannels
247 247 self.dataOut.m_ProcessingHeader.totalSpectra
248 248 self.dataOut.systemHeaderObj.numChannels
249 249 self.dataOut.m_ProcessingHeader.blockSize
250 250
251 251 Return:
252 252 None
253 253 """
254 254
255 255 for channelIndex in channelIndexList:
256 256 if channelIndex not in self.dataOut.channelIndexList:
257 257 print channelIndexList
258 258 raise ValueError, "The value %d in channelIndexList is not valid" %channelIndex
259 259
260 260 nChannels = len(channelIndexList)
261 261
262 262 data = self.dataOut.data[channelIndexList,:]
263 263
264 264 self.dataOut.data = data
265 265 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
266 266 # self.dataOut.nChannels = nChannels
267 267
268 268 return 1
269 269
270 270 def selectHeights(self, minHei, maxHei):
271 271 """
272 272 Selecciona un bloque de datos en base a un grupo de valores de alturas segun el rango
273 273 minHei <= height <= maxHei
274 274
275 275 Input:
276 276 minHei : valor minimo de altura a considerar
277 277 maxHei : valor maximo de altura a considerar
278 278
279 279 Affected:
280 280 Indirectamente son cambiados varios valores a travez del metodo selectHeightsByIndex
281 281
282 282 Return:
283 283 1 si el metodo se ejecuto con exito caso contrario devuelve 0
284 284 """
285 285 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
286 286 raise ValueError, "some value in (%d,%d) is not valid" % (minHei, maxHei)
287 287
288 288 if (maxHei > self.dataOut.heightList[-1]):
289 289 maxHei = self.dataOut.heightList[-1]
290 290 # raise ValueError, "some value in (%d,%d) is not valid" % (minHei, maxHei)
291 291
292 292 minIndex = 0
293 293 maxIndex = 0
294 294 data = self.dataOut.heightList
295 295
296 296 for i,val in enumerate(data):
297 297 if val < minHei:
298 298 continue
299 299 else:
300 300 minIndex = i;
301 301 break
302 302
303 303 for i,val in enumerate(data):
304 304 if val <= maxHei:
305 305 maxIndex = i;
306 306 else:
307 307 break
308 308
309 309 self.selectHeightsByIndex(minIndex, maxIndex)
310 310
311 311 return 1
312 312
313 313
314 314 def selectHeightsByIndex(self, minIndex, maxIndex):
315 315 """
316 316 Selecciona un bloque de datos en base a un grupo indices de alturas segun el rango
317 317 minIndex <= index <= maxIndex
318 318
319 319 Input:
320 320 minIndex : valor de indice minimo de altura a considerar
321 321 maxIndex : valor de indice maximo de altura a considerar
322 322
323 323 Affected:
324 324 self.dataOut.data
325 325 self.dataOut.heightList
326 326
327 327 Return:
328 328 1 si el metodo se ejecuto con exito caso contrario devuelve 0
329 329 """
330 330
331 331 if (minIndex < 0) or (minIndex > maxIndex):
332 332 raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
333 333
334 334 if (maxIndex >= self.dataOut.nHeights):
335 335 maxIndex = self.dataOut.nHeights-1
336 336 # raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
337 337
338 338 nHeights = maxIndex - minIndex + 1
339 339
340 340 #voltage
341 341 data = self.dataOut.data[:,minIndex:maxIndex+1]
342 342
343 343 firstHeight = self.dataOut.heightList[minIndex]
344 344
345 345 self.dataOut.data = data
346 346 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex+1]
347 347
348 348 return 1
349 349
350 350
351 351 class CohInt(Operation):
352 352
353 353 __profIndex = 0
354 354 __withOverapping = False
355 355
356 356 __byTime = False
357 357 __initime = None
358 358 __lastdatatime = None
359 359 __integrationtime = None
360 360
361 361 __buffer = None
362 362
363 363 __dataReady = False
364 364
365 365 n = None
366 366
367 367
368 368 def __init__(self):
369 369
370 370 self.__isConfig = False
371 371
372 372 def setup(self, n=None, timeInterval=None, overlapping=False):
373 373 """
374 374 Set the parameters of the integration class.
375 375
376 376 Inputs:
377 377
378 378 n : Number of coherent integrations
379 379 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
380 380 overlapping :
381 381
382 382 """
383 383
384 384 self.__initime = None
385 385 self.__lastdatatime = 0
386 386 self.__buffer = None
387 387 self.__dataReady = False
388 388
389 389
390 390 if n == None and timeInterval == None:
391 391 raise ValueError, "n or timeInterval should be specified ..."
392 392
393 393 if n != None:
394 394 self.n = n
395 395 self.__byTime = False
396 396 else:
397 397 self.__integrationtime = timeInterval * 60. #if (type(timeInterval)!=integer) -> change this line
398 398 self.n = 9999
399 399 self.__byTime = True
400 400
401 401 if overlapping:
402 402 self.__withOverapping = True
403 403 self.__buffer = None
404 404 else:
405 405 self.__withOverapping = False
406 406 self.__buffer = 0
407 407
408 408 self.__profIndex = 0
409 409
410 410 def putData(self, data):
411 411
412 412 """
413 413 Add a profile to the __buffer and increase in one the __profileIndex
414 414
415 415 """
416 416
417 417 if not self.__withOverapping:
418 418 self.__buffer += data.copy()
419 419 self.__profIndex += 1
420 420 return
421 421
422 422 #Overlapping data
423 423 nChannels, nHeis = data.shape
424 424 data = numpy.reshape(data, (1, nChannels, nHeis))
425 425
426 426 #If the buffer is empty then it takes the data value
427 427 if self.__buffer == None:
428 428 self.__buffer = data
429 429 self.__profIndex += 1
430 430 return
431 431
432 432 #If the buffer length is lower than n then stakcing the data value
433 433 if self.__profIndex < self.n:
434 434 self.__buffer = numpy.vstack((self.__buffer, data))
435 435 self.__profIndex += 1
436 436 return
437 437
438 438 #If the buffer length is equal to n then replacing the last buffer value with the data value
439 439 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
440 440 self.__buffer[self.n-1] = data
441 441 self.__profIndex = self.n
442 442 return
443 443
444 444
445 445 def pushData(self):
446 446 """
447 447 Return the sum of the last profiles and the profiles used in the sum.
448 448
449 449 Affected:
450 450
451 451 self.__profileIndex
452 452
453 453 """
454 454
455 455 if not self.__withOverapping:
456 456 data = self.__buffer
457 457 n = self.__profIndex
458 458
459 459 self.__buffer = 0
460 460 self.__profIndex = 0
461 461
462 462 return data, n
463 463
464 464 #Integration with Overlapping
465 465 data = numpy.sum(self.__buffer, axis=0)
466 466 n = self.__profIndex
467 467
468 468 return data, n
469 469
470 470 def byProfiles(self, data):
471 471
472 472 self.__dataReady = False
473 473 avgdata = None
474 474 n = None
475 475
476 476 self.putData(data)
477 477
478 478 if self.__profIndex == self.n:
479 479
480 480 avgdata, n = self.pushData()
481 481 self.__dataReady = True
482 482
483 483 return avgdata
484 484
485 485 def byTime(self, data, datatime):
486 486
487 487 self.__dataReady = False
488 488 avgdata = None
489 489 n = None
490 490
491 491 self.putData(data)
492 492
493 493 if (datatime - self.__initime) >= self.__integrationtime:
494 494 avgdata, n = self.pushData()
495 495 self.n = n
496 496 self.__dataReady = True
497 497
498 498 return avgdata
499 499
500 500 def integrate(self, data, datatime=None):
501 501
502 502 if self.__initime == None:
503 503 self.__initime = datatime
504 504
505 505 if self.__byTime:
506 506 avgdata = self.byTime(data, datatime)
507 507 else:
508 508 avgdata = self.byProfiles(data)
509 509
510 510
511 511 self.__lastdatatime = datatime
512 512
513 513 if avgdata == None:
514 514 return None, None
515 515
516 516 avgdatatime = self.__initime
517 517
518 518 deltatime = datatime -self.__lastdatatime
519 519
520 520 if not self.__withOverapping:
521 521 self.__initime = datatime
522 522 else:
523 523 self.__initime += deltatime
524 524
525 525 return avgdata, avgdatatime
526 526
527 527 def run(self, dataOut, n=None, timeInterval=None, overlapping=False):
528 528
529 529 if not self.__isConfig:
530 530 self.setup(n, timeInterval, overlapping)
531 531 self.__isConfig = True
532 532
533 533 avgdata, avgdatatime = self.integrate(dataOut.data, dataOut.utctime)
534 534
535 535 # dataOut.timeInterval *= n
536 536 dataOut.flagNoData = True
537 537
538 538 if self.__dataReady:
539 539 dataOut.data = avgdata
540 540 dataOut.nCohInt *= self.n
541 541 dataOut.utctime = avgdatatime
542 542 dataOut.timeInterval = dataOut.ippSeconds * dataOut.nCohInt
543 543 dataOut.flagNoData = False
544 544
545 545
546 546 class SpectraProc(ProcessingUnit):
547 547
548 548 def __init__(self):
549 549
550 550 self.objectDict = {}
551 551 self.buffer = None
552 552 self.firstdatatime = None
553 553 self.profIndex = 0
554 554 self.dataOut = Spectra()
555 555
556 556 def __updateObjFromInput(self):
557 557
558 558 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
559 559 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
560 560 self.dataOut.channelList = self.dataIn.channelList
561 561 self.dataOut.heightList = self.dataIn.heightList
562 562 self.dataOut.dtype = self.dataIn.dtype
563 563 # self.dataOut.nHeights = self.dataIn.nHeights
564 564 # self.dataOut.nChannels = self.dataIn.nChannels
565 565 self.dataOut.nBaud = self.dataIn.nBaud
566 566 self.dataOut.nCode = self.dataIn.nCode
567 567 self.dataOut.code = self.dataIn.code
568 568 self.dataOut.nProfiles = self.dataOut.nFFTPoints
569 569 # self.dataOut.channelIndexList = self.dataIn.channelIndexList
570 570 self.dataOut.flagTimeBlock = self.dataIn.flagTimeBlock
571 571 self.dataOut.utctime = self.firstdatatime
572 572 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
573 573 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
574 574 self.dataOut.flagShiftFFT = self.dataIn.flagShiftFFT
575 575 self.dataOut.nCohInt = self.dataIn.nCohInt
576 576 self.dataOut.nIncohInt = 1
577 577 self.dataOut.ippSeconds = self.dataIn.ippSeconds
578 578
579 self.dataOut.timeInterval = self.dataIn.timeInterval*self.dataOut.nFFTPoints*self.dataOut.nCohInt*self.dataOut.nIncohInt
579 self.dataOut.timeInterval = self.dataIn.timeInterval*self.dataOut.nFFTPoints*self.dataOut.nIncohInt
580 580
581 581 def __getFft(self):
582 582 """
583 583 Convierte valores de Voltaje a Spectra
584 584
585 585 Affected:
586 586 self.dataOut.data_spc
587 587 self.dataOut.data_cspc
588 588 self.dataOut.data_dc
589 589 self.dataOut.heightList
590 self.dataOut.m_BasicHeader
591 self.dataOut.m_ProcessingHeader
592 self.dataOut.radarControllerHeaderObj
593 self.dataOut.systemHeaderObj
594 590 self.profIndex
595 591 self.buffer
596 592 self.dataOut.flagNoData
597 self.dataOut.dtype
598 self.dataOut.nPairs
599 self.dataOut.nChannels
600 self.dataOut.nProfiles
601 self.dataOut.systemHeaderObj.numChannels
602 self.dataOut.m_ProcessingHeader.totalSpectra
603 self.dataOut.m_ProcessingHeader.profilesPerBlock
604 self.dataOut.m_ProcessingHeader.numHeights
605 self.dataOut.m_ProcessingHeader.spectraComb
606 self.dataOut.m_ProcessingHeader.shif_fft
607 593 """
608 594 fft_volt = numpy.fft.fft(self.buffer,axis=1)
609 595 dc = fft_volt[:,0,:]
610 596
611 597 #calculo de self-spectra
612 598 fft_volt = numpy.fft.fftshift(fft_volt,axes=(1,))
613 599 spc = fft_volt * numpy.conjugate(fft_volt)
614 600 spc = spc.real
615 601
616 602 blocksize = 0
617 603 blocksize += dc.size
618 604 blocksize += spc.size
619 605
620 606 cspc = None
621 607 pairIndex = 0
622 608 if self.dataOut.pairsList != None:
623 609 #calculo de cross-spectra
624 610 cspc = numpy.zeros((self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex')
625 611 for pair in self.dataOut.pairsList:
626 612 cspc[pairIndex,:,:] = numpy.abs(fft_volt[pair[0],:,:] * numpy.conjugate(fft_volt[pair[1],:,:]))
627 613 pairIndex += 1
628 614 blocksize += cspc.size
629 615
630 616 self.dataOut.data_spc = spc
631 617 self.dataOut.data_cspc = cspc
632 618 self.dataOut.data_dc = dc
633 619 self.dataOut.blockSize = blocksize
634 620
635 621 def init(self, nFFTPoints=None, pairsList=None):
636 622
637 623 if self.dataIn.type == "Spectra":
638 624 self.dataOut.copy(self.dataIn)
639 625 return
640 626
641 627 if self.dataIn.type == "Voltage":
642 628
643 629 if nFFTPoints == None:
644 630 raise ValueError, "This SpectraProc.init() need nFFTPoints input variable"
645 631
646 632 if pairsList == None:
647 633 nPairs = 0
648 634 else:
649 635 nPairs = len(pairsList)
650 636
651 637 self.dataOut.nFFTPoints = nFFTPoints
652 638 self.dataOut.pairsList = pairsList
653 639 self.dataOut.nPairs = nPairs
654 640
655 641 if self.buffer == None:
656 642 self.buffer = numpy.zeros((self.dataIn.nChannels,
657 643 self.dataOut.nFFTPoints,
658 644 self.dataIn.nHeights),
659 645 dtype='complex')
660 646
661 647
662 648 self.buffer[:,self.profIndex,:] = self.dataIn.data
663 649 self.profIndex += 1
664 650
665 651 if self.firstdatatime == None:
666 652 self.firstdatatime = self.dataIn.utctime
667 653
668 654 if self.profIndex == self.dataOut.nFFTPoints:
669 655 self.__updateObjFromInput()
670 656 self.__getFft()
671 657
672 658 self.dataOut.flagNoData = False
673 659
674 660 self.buffer = None
675 661 self.firstdatatime = None
676 662 self.profIndex = 0
677 663
678 664 return
679 665
680 666 raise ValuError, "The type object %s is not valid"%(self.dataIn.type)
681 667
682 668 def selectChannels(self, channelList):
683 669
684 670 channelIndexList = []
685 671
686 672 for channel in channelList:
687 673 index = self.dataOut.channelList.index(channel)
688 674 channelIndexList.append(index)
689 675
690 676 self.selectChannelsByIndex(channelIndexList)
691 677
692 678 def selectChannelsByIndex(self, channelIndexList):
693 679 """
694 680 Selecciona un bloque de datos en base a canales segun el channelIndexList
695 681
696 682 Input:
697 683 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
698 684
699 685 Affected:
700 686 self.dataOut.data_spc
701 687 self.dataOut.channelIndexList
702 688 self.dataOut.nChannels
703 689
704 690 Return:
705 691 None
706 692 """
707 693
708 694 for channelIndex in channelIndexList:
709 695 if channelIndex not in self.dataOut.channelIndexList:
710 696 print channelIndexList
711 697 raise ValueError, "The value %d in channelIndexList is not valid" %channelIndex
712 698
713 699 nChannels = len(channelIndexList)
714 700
715 701 data_spc = self.dataOut.data_spc[channelIndexList,:]
716 702
717 703 self.dataOut.data_spc = data_spc
718 704 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
719 705 # self.dataOut.nChannels = nChannels
720 706
721 707 return 1
722 708
723 709
724 710 class IncohInt(Operation):
725 711
726 712
727 713 __profIndex = 0
728 714 __withOverapping = False
729 715
730 716 __byTime = False
731 717 __initime = None
732 718 __lastdatatime = None
733 719 __integrationtime = None
734 720
735 721 __buffer_spc = None
736 722 __buffer_cspc = None
737 723 __buffer_dc = None
738 724
739 725 __dataReady = False
740 726
741 727 n = None
742 728
743 729
744 730 def __init__(self):
745 731
746 732 self.__isConfig = False
747 733
748 734 def setup(self, n=None, timeInterval=None, overlapping=False):
749 735 """
750 736 Set the parameters of the integration class.
751 737
752 738 Inputs:
753 739
754 740 n : Number of coherent integrations
755 741 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
756 742 overlapping :
757 743
758 744 """
759 745
760 746 self.__initime = None
761 747 self.__lastdatatime = 0
762 748 self.__buffer_spc = None
763 749 self.__buffer_cspc = None
764 750 self.__buffer_dc = None
765 751 self.__dataReady = False
766 752
767 753
768 754 if n == None and timeInterval == None:
769 755 raise ValueError, "n or timeInterval should be specified ..."
770 756
771 757 if n != None:
772 758 self.n = n
773 759 self.__byTime = False
774 760 else:
775 761 self.__integrationtime = timeInterval * 60. #if (type(timeInterval)!=integer) -> change this line
776 762 self.n = 9999
777 763 self.__byTime = True
778 764
779 765 if overlapping:
780 766 self.__withOverapping = True
781 767 else:
782 768 self.__withOverapping = False
783 769 self.__buffer_spc = 0
784 770 self.__buffer_cspc = 0
785 771 self.__buffer_dc = 0
786 772
787 773 self.__profIndex = 0
788 774
789 775 def putData(self, data_spc, data_cspc, data_dc):
790 776
791 777 """
792 778 Add a profile to the __buffer_spc and increase in one the __profileIndex
793 779
794 780 """
795 781
796 782 if not self.__withOverapping:
797 783 self.__buffer_spc += data_spc
798 784
799 785 if data_cspc == None:
800 786 self.__buffer_cspc = None
801 787 else:
802 788 self.__buffer_cspc += data_cspc
803 789
804 790 if data_dc == None:
805 791 self.__buffer_dc = None
806 792 else:
807 793 self.__buffer_dc += data_dc
808 794
809 795 self.__profIndex += 1
810 796 return
811 797
812 798 #Overlapping data
813 799 nChannels, nFFTPoints, nHeis = data_spc.shape
814 800 data_spc = numpy.reshape(data_spc, (1, nChannels, nFFTPoints, nHeis))
815 801 data_cspc = numpy.reshape(data_cspc, (1, -1, nFFTPoints, nHeis))
816 802 data_dc = numpy.reshape(data_dc, (1, -1, nHeis))
817 803
818 804 #If the buffer is empty then it takes the data value
819 805 if self.__buffer_spc == None:
820 806 self.__buffer_spc = data_spc.copy()
821 807
822 808 if data_cspc == None:
823 809 self.__buffer_cspc = None
824 810 else:
825 811 self.__buffer_cspc += data_cspc.copy()
826 812
827 813 if data_dc == None:
828 814 self.__buffer_dc = None
829 815 else:
830 816 self.__buffer_dc += data_dc.copy()
831 817
832 818 self.__profIndex += 1
833 819 return
834 820
835 821 #If the buffer length is lower than n then stakcing the data value
836 822 if self.__profIndex < self.n:
837 823 self.__buffer_spc = numpy.vstack((self.__buffer_spc, data_spc))
838 824
839 825 if self.__buffer_cspc != None:
840 826 self.__buffer_cspc = numpy.vstack((self.__buffer_cspc, data_cspc))
841 827
842 828 if self.__buffer_dc != None:
843 829 self.__buffer_dc = numpy.vstack((self.__buffer_dc, data_dc))
844 830
845 831 self.__profIndex += 1
846 832 return
847 833
848 834 #If the buffer length is equal to n then replacing the last buffer value with the data value
849 835 self.__buffer_spc = numpy.roll(self.__buffer_spc, -1, axis=0)
850 836 self.__buffer_spc[self.n-1] = data_spc
851 837
852 838 self.__buffer_cspc = numpy.roll(self.__buffer_cspc, -1, axis=0)
853 839 self.__buffer_cspc[self.n-1] = data_cspc
854 840
855 841 self.__buffer_dc = numpy.roll(self.__buffer_dc, -1, axis=0)
856 842 self.__buffer_dc[self.n-1] = data_dc
857 843
858 844 self.__profIndex = self.n
859 845 return
860 846
861 847
862 848 def pushData(self):
863 849 """
864 850 Return the sum of the last profiles and the profiles used in the sum.
865 851
866 852 Affected:
867 853
868 854 self.__profileIndex
869 855
870 856 """
871 857 data_spc = None
872 858 data_cspc = None
873 859 data_dc = None
874 860
875 861 if not self.__withOverapping:
876 862 data_spc = self.__buffer_spc
877 863 data_cspc = self.__buffer_cspc
878 864 data_dc = self.__buffer_dc
879 865
880 866 n = self.__profIndex
881 867
882 868 self.__buffer_spc = 0
883 869 self.__buffer_cspc = 0
884 870 self.__buffer_dc = 0
885 871 self.__profIndex = 0
886 872
887 873 return data_spc, data_cspc, data_dc, n
888 874
889 875 #Integration with Overlapping
890 876 data_spc = numpy.sum(self.__buffer_spc, axis=0)
891 877
892 878 if self.__buffer_cspc != None:
893 879 data_cspc = numpy.sum(self.__buffer_cspc, axis=0)
894 880
895 881 if self.__buffer_dc != None:
896 882 data_dc = numpy.sum(self.__buffer_dc, axis=0)
897 883
898 884 n = self.__profIndex
899 885
900 886 return data_spc, data_cspc, data_dc, n
901 887
902 888 def byProfiles(self, *args):
903 889
904 890 self.__dataReady = False
905 891 avgdata_spc = None
906 892 avgdata_cspc = None
907 893 avgdata_dc = None
908 894 n = None
909 895
910 896 self.putData(*args)
911 897
912 898 if self.__profIndex == self.n:
913 899
914 900 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
915 901 self.__dataReady = True
916 902
917 903 return avgdata_spc, avgdata_cspc, avgdata_dc
918 904
919 905 def byTime(self, datatime, *args):
920 906
921 907 self.__dataReady = False
922 908 avgdata_spc = None
923 909 avgdata_cspc = None
924 910 avgdata_dc = None
925 911 n = None
926 912
927 913 self.putData(*args)
928 914
929 915 if (datatime - self.__initime) >= self.__integrationtime:
930 916 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
931 917 self.n = n
932 918 self.__dataReady = True
933 919
934 920 return avgdata_spc, avgdata_cspc, avgdata_dc
935 921
936 922 def integrate(self, datatime, *args):
937 923
938 924 if self.__initime == None:
939 925 self.__initime = datatime
940 926
941 927 if self.__byTime:
942 928 avgdata_spc, avgdata_cspc, avgdata_dc = self.byTime(datatime, *args)
943 929 else:
944 930 avgdata_spc, avgdata_cspc, avgdata_dc = self.byProfiles(*args)
945 931
946 932 self.__lastdatatime = datatime
947 933
948 934 if avgdata_spc == None:
949 935 return None, None, None, None
950 936
951 937 avgdatatime = self.__initime
952 938
953 939 deltatime = datatime -self.__lastdatatime
954 940
955 941 if not self.__withOverapping:
956 942 self.__initime = datatime
957 943 else:
958 944 self.__initime += deltatime
959 945
960 946 return avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc
961 947
962 948 def run(self, dataOut, n=None, timeInterval=None, overlapping=False):
963 949
964 950 if not self.__isConfig:
965 951 self.setup(n, timeInterval, overlapping)
966 952 self.__isConfig = True
967 953
968 954 avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc = self.integrate(dataOut.utctime,
969 955 dataOut.data_spc,
970 956 dataOut.data_cspc,
971 957 dataOut.data_dc)
972 958
973 959 # dataOut.timeInterval *= n
974 960 dataOut.flagNoData = True
975 961
976 962 if self.__dataReady:
977 963 dataOut.data_spc = avgdata_spc
978 964 dataOut.data_cspc = avgdata_cspc
979 965 dataOut.data_dc = avgdata_dc
980 966
981 967 dataOut.nIncohInt *= self.n
982 968 dataOut.utctime = avgdatatime
983 969 dataOut.timeInterval = dataOut.ippSeconds * dataOut.nCohInt * dataOut.nIncohInt * dataOut.nFFTPoints
984 970 dataOut.flagNoData = False
985 971 No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now