##// END OF EJS Templates
Actulizacion de nombres en minusculas al importarlos en los modulos
Miguel Valdez -
r178:619c0eb38b43
parent child
Show More
@@ -1,241 +1,241
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JROData.py 173 2012-11-20 15:06:21Z murco $
5 5 '''
6 6
7 7 import os, sys
8 8 import copy
9 9 import numpy
10 10
11 from JROHeaderIO import SystemHeader, RadarControllerHeader
11 from jroheaderIO import SystemHeader, RadarControllerHeader
12 12
13 13 class JROData:
14 14
15 15 # m_BasicHeader = BasicHeader()
16 16 # m_ProcessingHeader = ProcessingHeader()
17 17
18 18 systemHeaderObj = SystemHeader()
19 19
20 20 radarControllerHeaderObj = RadarControllerHeader()
21 21
22 22 # data = None
23 23
24 24 type = None
25 25
26 26 dtype = None
27 27
28 28 nChannels = None
29 29
30 30 nHeights = None
31 31
32 32 nProfiles = None
33 33
34 34 heightList = None
35 35
36 36 channelList = None
37 37
38 38 channelIndexList = None
39 39
40 40 flagNoData = True
41 41
42 42 flagTimeBlock = False
43 43
44 44 utctime = None
45 45
46 46 blocksize = None
47 47
48 48 nCode = None
49 49
50 50 nBaud = None
51 51
52 52 code = None
53 53
54 54 flagDecodeData = True #asumo q la data esta decodificada
55 55
56 56 flagDeflipData = True #asumo q la data esta sin flip
57 57
58 58 flagShiftFFT = False
59 59
60 60 ippSeconds = None
61 61
62 62 timeInterval = None
63 63
64 64 def __init__(self):
65 65
66 66 raise ValueError, "This class has not been implemented"
67 67
68 68 def copy(self, inputObj=None):
69 69
70 70 if inputObj == None:
71 71 return copy.deepcopy(self)
72 72
73 73 for key in inputObj.__dict__.keys():
74 74 self.__dict__[key] = inputObj.__dict__[key]
75 75
76 76 def deepcopy(self):
77 77
78 78 return copy.deepcopy(self)
79 79
80 80 class Voltage(JROData):
81 81
82 82 nCohInt = None
83 83
84 84 #data es un numpy array de 2 dmensiones (canales, alturas)
85 85 data = None
86 86
87 87 def __init__(self):
88 88 '''
89 89 Constructor
90 90 '''
91 91
92 92 self.radarControllerHeaderObj = RadarControllerHeader()
93 93
94 94 self.systemHeaderObj = SystemHeader()
95 95
96 96 self.type = "Voltage"
97 97
98 98 self.data = None
99 99
100 100 self.dtype = None
101 101
102 102 self.nChannels = 0
103 103
104 104 self.nHeights = 0
105 105
106 106 self.nProfiles = None
107 107
108 108 self.heightList = None
109 109
110 110 self.channelList = None
111 111
112 112 self.channelIndexList = None
113 113
114 114 self.flagNoData = True
115 115
116 116 self.flagTimeBlock = False
117 117
118 118 self.utctime = None
119 119
120 120 self.nCohInt = None
121 121
122 122 self.blocksize = None
123 123
124 124 class Spectra(JROData):
125 125
126 126 #data es un numpy array de 2 dmensiones (canales, perfiles, alturas)
127 127 data_spc = None
128 128
129 129 #data es un numpy array de 2 dmensiones (canales, pares, alturas)
130 130 data_cspc = None
131 131
132 132 #data es un numpy array de 2 dmensiones (canales, alturas)
133 133 data_dc = None
134 134
135 135 nFFTPoints = None
136 136
137 137 nPairs = None
138 138
139 139 pairsList = None
140 140
141 141 nIncohInt = None
142 142
143 143 wavelength = None #Necesario para cacular el rango de velocidad desde la frecuencia
144 144
145 145 nCohInt = None #se requiere para determinar el valor de timeInterval
146 146
147 147 def __init__(self):
148 148 '''
149 149 Constructor
150 150 '''
151 151
152 152 self.radarControllerHeaderObj = RadarControllerHeader()
153 153
154 154 self.systemHeaderObj = SystemHeader()
155 155
156 156 self.type = "Spectra"
157 157
158 158 # self.data = None
159 159
160 160 self.dtype = None
161 161
162 162 self.nChannels = 0
163 163
164 164 self.nHeights = 0
165 165
166 166 self.nProfiles = None
167 167
168 168 self.heightList = None
169 169
170 170 self.channelList = None
171 171
172 172 self.channelIndexList = None
173 173
174 174 self.flagNoData = True
175 175
176 176 self.flagTimeBlock = False
177 177
178 178 self.utctime = None
179 179
180 180 self.nIncohInt = None
181 181
182 182 self.blocksize = None
183 183
184 184 self.nFFTPoints = None
185 185
186 186 self.wavelength = None
187 187
188 188 def getFrequencies(self):
189 189
190 190 xrange = numpy.arange(self.nFFTPoints)
191 191 xrange = xrange
192 192 return None
193 193
194 194
195 195 class SpectraHeis(JROData):
196 196
197 197 data_spc = None
198 198
199 199 data_cspc = None
200 200
201 201 data_dc = None
202 202
203 203 nFFTPoints = None
204 204
205 205 nPairs = None
206 206
207 207 pairsList = None
208 208
209 209 nIncohInt = None
210 210
211 211 def __init__(self):
212 212
213 213 self.radarControllerHeaderObj = RadarControllerHeader()
214 214
215 215 self.systemHeaderObj = SystemHeader()
216 216
217 217 self.type = "SpectraHeis"
218 218
219 219 self.dtype = None
220 220
221 221 self.nChannels = 0
222 222
223 223 self.nHeights = 0
224 224
225 225 self.nProfiles = None
226 226
227 227 self.heightList = None
228 228
229 229 self.channelList = None
230 230
231 231 self.channelIndexList = None
232 232
233 233 self.flagNoData = True
234 234
235 235 self.flagTimeBlock = False
236 236
237 237 self.nPairs = 0
238 238
239 239 self.utctime = None
240 240
241 241 self.blocksize = None
@@ -1,2485 +1,2485
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JRODataIO.py 169 2012-11-19 21:57:03Z murco $
5 5 '''
6 6
7 7 import os, sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import time, datetime
13 13
14 from Data.JROData import *
15 from JROHeaderIO import *
14 from jrodata import *
15 from jroheaderIO import *
16 16
17 17 def isNumber(str):
18 18 """
19 19 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
20 20
21 21 Excepciones:
22 22 Si un determinado string no puede ser convertido a numero
23 23 Input:
24 24 str, string al cual se le analiza para determinar si convertible a un numero o no
25 25
26 26 Return:
27 27 True : si el string es uno numerico
28 28 False : no es un string numerico
29 29 """
30 30 try:
31 31 float( str )
32 32 return True
33 33 except:
34 34 return False
35 35
36 36 def isThisFileinRange(filename, startUTSeconds, endUTSeconds):
37 37 """
38 38 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
39 39
40 40 Inputs:
41 41 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
42 42
43 43 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
44 44 segundos contados desde 01/01/1970.
45 45 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
46 46 segundos contados desde 01/01/1970.
47 47
48 48 Return:
49 49 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
50 50 fecha especificado, de lo contrario retorna False.
51 51
52 52 Excepciones:
53 53 Si el archivo no existe o no puede ser abierto
54 54 Si la cabecera no puede ser leida.
55 55
56 56 """
57 57 basicHeaderObj = BasicHeader()
58 58
59 59 try:
60 60 fp = open(filename,'rb')
61 61 except:
62 62 raise IOError, "The file %s can't be opened" %(filename)
63 63
64 64 sts = basicHeaderObj.read(fp)
65 65 fp.close()
66 66
67 67 if not(sts):
68 68 print "Skipping the file %s because it has not a valid header" %(filename)
69 69 return 0
70 70
71 71 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
72 72 return 0
73 73
74 74 return 1
75 75
76 76 def getlastFileFromPath(path, ext):
77 77 """
78 78 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
79 79 al final de la depuracion devuelve el ultimo file de la lista que quedo.
80 80
81 81 Input:
82 82 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
83 83 ext : extension de los files contenidos en una carpeta
84 84
85 85 Return:
86 86 El ultimo file de una determinada carpeta, no se considera el path.
87 87 """
88 88 validFilelist = []
89 89 fileList = os.listdir(path)
90 90
91 91 # 0 1234 567 89A BCDE
92 92 # H YYYY DDD SSS .ext
93 93
94 94 for file in fileList:
95 95 try:
96 96 year = int(file[1:5])
97 97 doy = int(file[5:8])
98 98
99 99 if (os.path.splitext(file)[-1].upper() != ext.upper()) : continue
100 100 except:
101 101 continue
102 102
103 103 validFilelist.append(file)
104 104
105 105 if validFilelist:
106 106 validFilelist = sorted( validFilelist, key=str.lower )
107 107 return validFilelist[-1]
108 108
109 109 return None
110 110
111 111 def checkForRealPath(path, year, doy, set, ext):
112 112 """
113 113 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
114 114 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
115 115 el path exacto de un determinado file.
116 116
117 117 Example :
118 118 nombre correcto del file es .../.../D2009307/P2009307367.ext
119 119
120 120 Entonces la funcion prueba con las siguientes combinaciones
121 121 .../.../x2009307/y2009307367.ext
122 122 .../.../x2009307/Y2009307367.ext
123 123 .../.../X2009307/y2009307367.ext
124 124 .../.../X2009307/Y2009307367.ext
125 125 siendo para este caso, la ultima combinacion de letras, identica al file buscado
126 126
127 127 Return:
128 128 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
129 129 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
130 130 para el filename
131 131 """
132 132 filepath = None
133 133 find_flag = False
134 134 filename = None
135 135
136 136 if ext.lower() == ".r": #voltage
137 137 header1 = "dD"
138 138 header2 = "dD"
139 139 elif ext.lower() == ".pdata": #spectra
140 140 header1 = "dD"
141 141 header2 = "pP"
142 142 else:
143 143 return None, filename
144 144
145 145 for dir in header1: #barrido por las dos combinaciones posibles de "D"
146 146 for fil in header2: #barrido por las dos combinaciones posibles de "D"
147 147 doypath = "%s%04d%03d" % ( dir, year, doy ) #formo el nombre del directorio xYYYYDDD (x=d o x=D)
148 148 filename = "%s%04d%03d%03d%s" % ( fil, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
149 149 filepath = os.path.join( path, doypath, filename ) #formo el path completo
150 150 if os.path.exists( filepath ): #verifico que exista
151 151 find_flag = True
152 152 break
153 153 if find_flag:
154 154 break
155 155
156 156 if not(find_flag):
157 157 return None, filename
158 158
159 159 return filepath, filename
160 160
161 161 class JRODataIO:
162 162
163 163 c = 3E8
164 164
165 165 __isConfig = False
166 166
167 167 basicHeaderObj = BasicHeader()
168 168
169 169 systemHeaderObj = SystemHeader()
170 170
171 171 radarControllerHeaderObj = RadarControllerHeader()
172 172
173 173 processingHeaderObj = ProcessingHeader()
174 174
175 175 online = 0
176 176
177 177 dtype = None
178 178
179 179 pathList = []
180 180
181 181 filenameList = []
182 182
183 183 filename = None
184 184
185 185 ext = None
186 186
187 187 flagNoMoreFiles = 0
188 188
189 189 flagIsNewFile = 1
190 190
191 191 flagTimeBlock = 0
192 192
193 193 flagIsNewBlock = 0
194 194
195 195 fp = None
196 196
197 197 firstHeaderSize = 0
198 198
199 199 basicHeaderSize = 24
200 200
201 201 versionFile = 1103
202 202
203 203 fileSize = None
204 204
205 205 ippSeconds = None
206 206
207 207 fileSizeByHeader = None
208 208
209 209 fileIndex = None
210 210
211 211 profileIndex = None
212 212
213 213 blockIndex = None
214 214
215 215 nTotalBlocks = None
216 216
217 217 maxTimeStep = 30
218 218
219 219 lastUTTime = None
220 220
221 221 datablock = None
222 222
223 223 dataOutObj = None
224 224
225 225 blocksize = None
226 226
227 227 def __init__(self):
228 228
229 229 raise ValueError, "Not implemented"
230 230
231 231 def run(self):
232 232
233 233 raise ValueError, "Not implemented"
234 234
235 235
236 236
237 237 class JRODataReader(JRODataIO):
238 238
239 239 nReadBlocks = 0
240 240
241 241 delay = 60 #number of seconds waiting a new file
242 242
243 243 nTries = 3 #quantity tries
244 244
245 245 nFiles = 3 #number of files for searching
246 246
247 247
248 248 def __init__(self):
249 249
250 250 """
251 251
252 252 """
253 253
254 254 raise ValueError, "This method has not been implemented"
255 255
256 256
257 257 def createObjByDefault(self):
258 258 """
259 259
260 260 """
261 261 raise ValueError, "This method has not been implemented"
262 262
263 263 def getBlockDimension(self):
264 264
265 265 raise ValueError, "No implemented"
266 266
267 267 def __searchFilesOffLine(self,
268 268 path,
269 269 startDate,
270 270 endDate,
271 271 startTime=datetime.time(0,0,0),
272 272 endTime=datetime.time(23,59,59),
273 273 set=None,
274 274 expLabel="",
275 275 ext=".r"):
276 276 dirList = []
277 277 for thisPath in os.listdir(path):
278 278 if os.path.isdir(os.path.join(path,thisPath)):
279 279 dirList.append(thisPath)
280 280
281 281 if not(dirList):
282 282 return None, None
283 283
284 284 pathList = []
285 285 dateList = []
286 286
287 287 thisDate = startDate
288 288
289 289 while(thisDate <= endDate):
290 290 year = thisDate.timetuple().tm_year
291 291 doy = thisDate.timetuple().tm_yday
292 292
293 293 match = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy))
294 294 if len(match) == 0:
295 295 thisDate += datetime.timedelta(1)
296 296 continue
297 297
298 298 pathList.append(os.path.join(path,match[0],expLabel))
299 299 dateList.append(thisDate)
300 300 thisDate += datetime.timedelta(1)
301 301
302 302 filenameList = []
303 303 for index in range(len(pathList)):
304 304
305 305 thisPath = pathList[index]
306 306 fileList = glob.glob1(thisPath, "*%s" %ext)
307 307 fileList.sort()
308 308
309 309 #Busqueda de datos en el rango de horas indicados
310 310 thisDate = dateList[index]
311 311 startDT = datetime.datetime.combine(thisDate, startTime)
312 312 endDT = datetime.datetime.combine(thisDate, endTime)
313 313
314 314 startUtSeconds = time.mktime(startDT.timetuple())
315 315 endUtSeconds = time.mktime(endDT.timetuple())
316 316
317 317 for file in fileList:
318 318
319 319 filename = os.path.join(thisPath,file)
320 320
321 321 if isThisFileinRange(filename, startUtSeconds, endUtSeconds):
322 322 filenameList.append(filename)
323 323
324 324 if not(filenameList):
325 325 return None, None
326 326
327 327 self.filenameList = filenameList
328 328
329 329 return pathList, filenameList
330 330
331 331 def __searchFilesOnLine(self, path, startDate=None, endDate=None, startTime=None, endTime=None, expLabel = "", ext = None):
332 332
333 333 """
334 334 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
335 335 devuelve el archivo encontrado ademas de otros datos.
336 336
337 337 Input:
338 338 path : carpeta donde estan contenidos los files que contiene data
339 339
340 340 startDate : Fecha inicial. Rechaza todos los directorios donde
341 341 file end time < startDate (obejto datetime.date)
342 342
343 343 endDate : Fecha final. Rechaza todos los directorios donde
344 344 file start time > endDate (obejto datetime.date)
345 345
346 346 startTime : Tiempo inicial. Rechaza todos los archivos donde
347 347 file end time < startTime (obejto datetime.time)
348 348
349 349 endTime : Tiempo final. Rechaza todos los archivos donde
350 350 file start time > endTime (obejto datetime.time)
351 351
352 352 expLabel : Nombre del subexperimento (subfolder)
353 353
354 354 ext : extension de los files
355 355
356 356 Return:
357 357 directory : eL directorio donde esta el file encontrado
358 358 filename : el ultimo file de una determinada carpeta
359 359 year : el anho
360 360 doy : el numero de dia del anho
361 361 set : el set del archivo
362 362
363 363
364 364 """
365 365 dirList = []
366 366 pathList = []
367 367 directory = None
368 368
369 369 #Filtra solo los directorios
370 370 for thisPath in os.listdir(path):
371 371 if os.path.isdir(os.path.join(path, thisPath)):
372 372 dirList.append(thisPath)
373 373
374 374 if not(dirList):
375 375 return None, None, None, None, None
376 376
377 377 dirList = sorted( dirList, key=str.lower )
378 378
379 379 if startDate:
380 380 startDateTime = datetime.datetime.combine(startDate, startTime)
381 381 thisDateTime = startDateTime
382 382 if endDate == None: endDateTime = startDateTime
383 383 else: endDateTime = datetime.datetime.combine(endDate, endTime)
384 384
385 385 while(thisDateTime <= endDateTime):
386 386 year = thisDateTime.timetuple().tm_year
387 387 doy = thisDateTime.timetuple().tm_yday
388 388
389 389 match = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy))
390 390 if len(match) == 0:
391 391 thisDateTime += datetime.timedelta(1)
392 392 continue
393 393
394 394 pathList.append(os.path.join(path,match[0], expLabel))
395 395 thisDateTime += datetime.timedelta(1)
396 396
397 397 if not(pathList):
398 398 print "\tNo files in range: %s - %s" %(startDateTime.ctime(), endDateTime.ctime())
399 399 return None, None, None, None, None
400 400
401 401 directory = pathList[0]
402 402
403 403 else:
404 404 directory = dirList[-1]
405 405 directory = os.path.join(path,directory)
406 406
407 407 filename = getlastFileFromPath(directory, ext)
408 408
409 409 if not(filename):
410 410 return None, None, None, None, None
411 411
412 412 if not(self.__verifyFile(os.path.join(directory, filename))):
413 413 return None, None, None, None, None
414 414
415 415 year = int( filename[1:5] )
416 416 doy = int( filename[5:8] )
417 417 set = int( filename[8:11] )
418 418
419 419 return directory, filename, year, doy, set
420 420
421 421 def setup(self,dataOutObj=None,
422 422 path=None,
423 423 startDate=None,
424 424 endDate=None,
425 425 startTime=datetime.time(0,0,0),
426 426 endTime=datetime.time(23,59,59),
427 427 set=0,
428 428 expLabel = "",
429 429 ext = None,
430 430 online = False,
431 431 delay = 60):
432 432
433 433 if path == None:
434 434 raise ValueError, "The path is not valid"
435 435
436 436 if ext == None:
437 437 ext = self.ext
438 438
439 439 if dataOutObj == None:
440 440 dataOutObj = self.createObjByDefault()
441 441
442 442 self.dataOutObj = dataOutObj
443 443
444 444 if online:
445 445 print "Searching files in online mode..."
446 446 doypath, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext)
447 447
448 448 if not(doypath):
449 449 for nTries in range( self.nTries ):
450 450 print '\tWaiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
451 451 time.sleep( self.delay )
452 452 doypath, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=exp)
453 453 if doypath:
454 454 break
455 455
456 456 if not(doypath):
457 457 print "There 'isn't valied files in %s" % path
458 458 return None
459 459
460 460 self.year = year
461 461 self.doy = doy
462 462 self.set = set - 1
463 463 self.path = path
464 464
465 465 else:
466 466 print "Searching files in offline mode ..."
467 467 pathList, filenameList = self.__searchFilesOffLine(path, startDate, endDate, startTime, endTime, set, expLabel, ext)
468 468
469 469 if not(pathList):
470 470 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
471 471 datetime.datetime.combine(startDate,startTime).ctime(),
472 472 datetime.datetime.combine(endDate,endTime).ctime())
473 473
474 474 sys.exit(-1)
475 475
476 476
477 477 self.fileIndex = -1
478 478 self.pathList = pathList
479 479 self.filenameList = filenameList
480 480
481 481 self.online = online
482 482 self.delay = delay
483 483 ext = ext.lower()
484 484 self.ext = ext
485 485
486 486 if not(self.setNextFile()):
487 487 if (startDate!=None) and (endDate!=None):
488 488 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
489 489 elif startDate != None:
490 490 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
491 491 else:
492 492 print "No files"
493 493
494 494 sys.exit(-1)
495 495
496 496 # self.updateDataHeader()
497 497
498 498 return self.dataOutObj
499 499
500 500 def __setNextFileOffline(self):
501 501
502 502 idFile = self.fileIndex
503 503
504 504 while (True):
505 505 idFile += 1
506 506 if not(idFile < len(self.filenameList)):
507 507 self.flagNoMoreFiles = 1
508 508 print "No more Files"
509 509 return 0
510 510
511 511 filename = self.filenameList[idFile]
512 512
513 513 if not(self.__verifyFile(filename)):
514 514 continue
515 515
516 516 fileSize = os.path.getsize(filename)
517 517 fp = open(filename,'rb')
518 518 break
519 519
520 520 self.flagIsNewFile = 1
521 521 self.fileIndex = idFile
522 522 self.filename = filename
523 523 self.fileSize = fileSize
524 524 self.fp = fp
525 525
526 526 print "Setting the file: %s"%self.filename
527 527
528 528 return 1
529 529
530 530 def __setNextFileOnline(self):
531 531 """
532 532 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
533 533 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
534 534 siguientes.
535 535
536 536 Affected:
537 537 self.flagIsNewFile
538 538 self.filename
539 539 self.fileSize
540 540 self.fp
541 541 self.set
542 542 self.flagNoMoreFiles
543 543
544 544 Return:
545 545 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
546 546 1 : si el file fue abierto con exito y esta listo a ser leido
547 547
548 548 Excepciones:
549 549 Si un determinado file no puede ser abierto
550 550 """
551 551 nFiles = 0
552 552 fileOk_flag = False
553 553 firstTime_flag = True
554 554
555 555 self.set += 1
556 556
557 557 #busca el 1er file disponible
558 558 file, filename = checkForRealPath( self.path, self.year, self.doy, self.set, self.ext )
559 559 if file:
560 560 if self.__verifyFile(file, False):
561 561 fileOk_flag = True
562 562
563 563 #si no encuentra un file entonces espera y vuelve a buscar
564 564 if not(fileOk_flag):
565 565 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
566 566
567 567 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
568 568 tries = self.nTries
569 569 else:
570 570 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
571 571
572 572 for nTries in range( tries ):
573 573 if firstTime_flag:
574 574 print "\tWaiting %0.2f sec for the file \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
575 575 time.sleep( self.delay )
576 576 else:
577 577 print "\tSearching next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
578 578
579 579 file, filename = checkForRealPath( self.path, self.year, self.doy, self.set, self.ext )
580 580 if file:
581 581 if self.__verifyFile(file):
582 582 fileOk_flag = True
583 583 break
584 584
585 585 if fileOk_flag:
586 586 break
587 587
588 588 firstTime_flag = False
589 589
590 590 print "\tSkipping the file \"%s\" due to this file doesn't exist" % filename
591 591 self.set += 1
592 592
593 593 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
594 594 self.set = 0
595 595 self.doy += 1
596 596
597 597 if fileOk_flag:
598 598 self.fileSize = os.path.getsize( file )
599 599 self.filename = file
600 600 self.flagIsNewFile = 1
601 601 if self.fp != None: self.fp.close()
602 602 self.fp = open(file)
603 603 self.flagNoMoreFiles = 0
604 604 print 'Setting the file: %s' % file
605 605 else:
606 606 self.fileSize = 0
607 607 self.filename = None
608 608 self.flagIsNewFile = 0
609 609 self.fp = None
610 610 self.flagNoMoreFiles = 1
611 611 print 'No more Files'
612 612
613 613 return fileOk_flag
614 614
615 615
616 616 def setNextFile(self):
617 617 if self.fp != None:
618 618 self.fp.close()
619 619
620 620 if self.online:
621 621 newFile = self.__setNextFileOnline()
622 622 else:
623 623 newFile = self.__setNextFileOffline()
624 624
625 625 if not(newFile):
626 626 return 0
627 627
628 628 self.__readFirstHeader()
629 629 self.nReadBlocks = 0
630 630 return 1
631 631
632 632 def __setNewBlock(self):
633 633 if self.fp == None:
634 634 return 0
635 635
636 636 if self.flagIsNewFile:
637 637 return 1
638 638
639 639 self.lastUTTime = self.basicHeaderObj.utc
640 640 currentSize = self.fileSize - self.fp.tell()
641 641 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
642 642
643 643 if (currentSize >= neededSize):
644 644 self.__rdBasicHeader()
645 645 return 1
646 646
647 647 if not(self.setNextFile()):
648 648 return 0
649 649
650 650 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
651 651
652 652 self.flagTimeBlock = 0
653 653
654 654 if deltaTime > self.maxTimeStep:
655 655 self.flagTimeBlock = 1
656 656
657 657 return 1
658 658
659 659
660 660 def readNextBlock(self):
661 661 if not(self.__setNewBlock()):
662 662 return 0
663 663
664 664 if not(self.readBlock()):
665 665 return 0
666 666
667 667 return 1
668 668
669 669 def __rdProcessingHeader(self, fp=None):
670 670 if fp == None:
671 671 fp = self.fp
672 672
673 673 self.processingHeaderObj.read(fp)
674 674
675 675 def __rdRadarControllerHeader(self, fp=None):
676 676 if fp == None:
677 677 fp = self.fp
678 678
679 679 self.radarControllerHeaderObj.read(fp)
680 680
681 681 def __rdSystemHeader(self, fp=None):
682 682 if fp == None:
683 683 fp = self.fp
684 684
685 685 self.systemHeaderObj.read(fp)
686 686
687 687 def __rdBasicHeader(self, fp=None):
688 688 if fp == None:
689 689 fp = self.fp
690 690
691 691 self.basicHeaderObj.read(fp)
692 692
693 693
694 694 def __readFirstHeader(self):
695 695 self.__rdBasicHeader()
696 696 self.__rdSystemHeader()
697 697 self.__rdRadarControllerHeader()
698 698 self.__rdProcessingHeader()
699 699
700 700 self.firstHeaderSize = self.basicHeaderObj.size
701 701
702 702 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
703 703 if datatype == 0:
704 704 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
705 705 elif datatype == 1:
706 706 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
707 707 elif datatype == 2:
708 708 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
709 709 elif datatype == 3:
710 710 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
711 711 elif datatype == 4:
712 712 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
713 713 elif datatype == 5:
714 714 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
715 715 else:
716 716 raise ValueError, 'Data type was not defined'
717 717
718 718 self.dtype = datatype_str
719 719 self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
720 720 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
721 721 # self.dataOutObj.channelList = numpy.arange(self.systemHeaderObj.numChannels)
722 722 # self.dataOutObj.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
723 723 self.getBlockDimension()
724 724
725 725
726 726 def __verifyFile(self, filename, msgFlag=True):
727 727 msg = None
728 728 try:
729 729 fp = open(filename, 'rb')
730 730 currentPosition = fp.tell()
731 731 except:
732 732 if msgFlag:
733 733 print "The file %s can't be opened" % (filename)
734 734 return False
735 735
736 736 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
737 737
738 738 if neededSize == 0:
739 739 basicHeaderObj = BasicHeader()
740 740 systemHeaderObj = SystemHeader()
741 741 radarControllerHeaderObj = RadarControllerHeader()
742 742 processingHeaderObj = ProcessingHeader()
743 743
744 744 try:
745 745 if not( basicHeaderObj.read(fp) ): raise ValueError
746 746 if not( systemHeaderObj.read(fp) ): raise ValueError
747 747 if not( radarControllerHeaderObj.read(fp) ): raise ValueError
748 748 if not( processingHeaderObj.read(fp) ): raise ValueError
749 749 data_type = int(numpy.log2((processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
750 750
751 751 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
752 752
753 753 except:
754 754 if msgFlag:
755 755 print "\tThe file %s is empty or it hasn't enough data" % filename
756 756
757 757 fp.close()
758 758 return False
759 759 else:
760 760 msg = "\tSkipping the file %s due to it hasn't enough data" %filename
761 761
762 762 fp.close()
763 763 fileSize = os.path.getsize(filename)
764 764 currentSize = fileSize - currentPosition
765 765 if currentSize < neededSize:
766 766 if msgFlag and (msg != None):
767 767 print msg #print"\tSkipping the file %s due to it hasn't enough data" %filename
768 768 return False
769 769
770 770 return True
771 771
772 772 def getData():
773 773 pass
774 774
775 775 def hasNotDataInBuffer():
776 776 pass
777 777
778 778 def readBlock():
779 779 pass
780 780
781 781 def run(self, **kwargs):
782 782
783 783 if not(self.__isConfig):
784 784
785 785 self.dataOutObj = dataOut
786 786 self.setup(**kwargs)
787 787 self.__isConfig = True
788 788
789 789 self.getData()
790 790
791 791 class JRODataWriter(JRODataIO):
792 792
793 793 """
794 794 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
795 795 de los datos siempre se realiza por bloques.
796 796 """
797 797
798 798 blockIndex = 0
799 799
800 800 path = None
801 801
802 802 setFile = None
803 803
804 804 profilesPerBlock = None
805 805
806 806 blocksPerFile = None
807 807
808 808 nWriteBlocks = 0
809 809
810 810 def __init__(self, dataOutObj=None):
811 811 raise ValueError, "Not implemented"
812 812
813 813
814 814 def hasAllDataInBuffer(self):
815 815 raise ValueError, "Not implemented"
816 816
817 817
818 818 def setBlockDimension(self):
819 819 raise ValueError, "Not implemented"
820 820
821 821
822 822 def writeBlock(self):
823 823 raise ValueError, "No implemented"
824 824
825 825
826 826 def putData(self):
827 827 raise ValueError, "No implemented"
828 828
829 829 def getDataHeader(self):
830 830 """
831 831 Obtiene una copia del First Header
832 832
833 833 Affected:
834 834
835 835 self.basicHeaderObj
836 836 self.systemHeaderObj
837 837 self.radarControllerHeaderObj
838 838 self.processingHeaderObj self.
839 839
840 840 Return:
841 841 None
842 842 """
843 843
844 844 raise ValueError, "No implemented"
845 845
846 846 def getBasicHeader(self):
847 847
848 848 self.basicHeaderObj.size = self.basicHeaderSize #bytes
849 849 self.basicHeaderObj.version = self.versionFile
850 850 self.basicHeaderObj.dataBlock = self.nTotalBlocks
851 851
852 852 utc = numpy.floor(self.dataOutObj.utctime)
853 853 milisecond = (self.dataOutObj.utctime - utc)* 1000.0
854 854
855 855 self.basicHeaderObj.utc = utc
856 856 self.basicHeaderObj.miliSecond = milisecond
857 857 self.basicHeaderObj.timeZone = 0
858 858 self.basicHeaderObj.dstFlag = 0
859 859 self.basicHeaderObj.errorCount = 0
860 860
861 861 def __writeFirstHeader(self):
862 862 """
863 863 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
864 864
865 865 Affected:
866 866 __dataType
867 867
868 868 Return:
869 869 None
870 870 """
871 871
872 872 # CALCULAR PARAMETROS
873 873
874 874 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
875 875 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
876 876
877 877 self.basicHeaderObj.write(self.fp)
878 878 self.systemHeaderObj.write(self.fp)
879 879 self.radarControllerHeaderObj.write(self.fp)
880 880 self.processingHeaderObj.write(self.fp)
881 881
882 882 self.dtype = self.dataOutObj.dtype
883 883
884 884 def __setNewBlock(self):
885 885 """
886 886 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
887 887
888 888 Return:
889 889 0 : si no pudo escribir nada
890 890 1 : Si escribio el Basic el First Header
891 891 """
892 892 if self.fp == None:
893 893 self.setNextFile()
894 894
895 895 if self.flagIsNewFile:
896 896 return 1
897 897
898 898 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
899 899 self.basicHeaderObj.write(self.fp)
900 900 return 1
901 901
902 902 if not( self.setNextFile() ):
903 903 return 0
904 904
905 905 return 1
906 906
907 907
908 908 def writeNextBlock(self):
909 909 """
910 910 Selecciona el bloque siguiente de datos y los escribe en un file
911 911
912 912 Return:
913 913 0 : Si no hizo pudo escribir el bloque de datos
914 914 1 : Si no pudo escribir el bloque de datos
915 915 """
916 916 if not( self.__setNewBlock() ):
917 917 return 0
918 918
919 919 self.writeBlock()
920 920
921 921 return 1
922 922
923 923 def setNextFile(self):
924 924 """
925 925 Determina el siguiente file que sera escrito
926 926
927 927 Affected:
928 928 self.filename
929 929 self.subfolder
930 930 self.fp
931 931 self.setFile
932 932 self.flagIsNewFile
933 933
934 934 Return:
935 935 0 : Si el archivo no puede ser escrito
936 936 1 : Si el archivo esta listo para ser escrito
937 937 """
938 938 ext = self.ext
939 939 path = self.path
940 940
941 941 if self.fp != None:
942 942 self.fp.close()
943 943
944 944 timeTuple = time.localtime( self.dataOutObj.dataUtcTime)
945 945 subfolder = 'D%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
946 946
947 947 doypath = os.path.join( path, subfolder )
948 948 if not( os.path.exists(doypath) ):
949 949 os.mkdir(doypath)
950 950 self.setFile = -1 #inicializo mi contador de seteo
951 951 else:
952 952 filesList = os.listdir( doypath )
953 953 if len( filesList ) > 0:
954 954 filesList = sorted( filesList, key=str.lower )
955 955 filen = filesList[-1]
956 956 # el filename debera tener el siguiente formato
957 957 # 0 1234 567 89A BCDE (hex)
958 958 # x YYYY DDD SSS .ext
959 959 if isNumber( filen[8:11] ):
960 960 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
961 961 else:
962 962 self.setFile = -1
963 963 else:
964 964 self.setFile = -1 #inicializo mi contador de seteo
965 965
966 966 setFile = self.setFile
967 967 setFile += 1
968 968
969 969 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
970 970 timeTuple.tm_year,
971 971 timeTuple.tm_yday,
972 972 setFile,
973 973 ext )
974 974
975 975 filename = os.path.join( path, subfolder, file )
976 976
977 977 fp = open( filename,'wb' )
978 978
979 979 self.blockIndex = 0
980 980
981 981 #guardando atributos
982 982 self.filename = filename
983 983 self.subfolder = subfolder
984 984 self.fp = fp
985 985 self.setFile = setFile
986 986 self.flagIsNewFile = 1
987 987
988 988 self.getDataHeader()
989 989
990 990 print 'Writing the file: %s'%self.filename
991 991
992 992 self.__writeFirstHeader()
993 993
994 994 return 1
995 995
996 996 def setup(self, path, blocksPerFile, profilesPerBlock=None, set=0, ext=None):
997 997 """
998 998 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
999 999
1000 1000 Inputs:
1001 1001 path : el path destino en el cual se escribiran los files a crear
1002 1002 format : formato en el cual sera salvado un file
1003 1003 set : el setebo del file
1004 1004
1005 1005 Return:
1006 1006 0 : Si no realizo un buen seteo
1007 1007 1 : Si realizo un buen seteo
1008 1008 """
1009 1009
1010 1010 if ext == None:
1011 1011 ext = self.ext
1012 1012
1013 1013 ext = ext.lower()
1014 1014
1015 1015 self.ext = ext
1016 1016
1017 1017 self.path = path
1018 1018
1019 1019 self.setFile = set - 1
1020 1020
1021 1021 self.blocksPerFile = blocksPerFile
1022 1022
1023 1023 self.profilesPerBlock = profilesPerBlock
1024 1024
1025 1025 if not(self.setNextFile()):
1026 1026 print "There isn't a next file"
1027 1027 return 0
1028 1028
1029 1029 self.setBlockDimension()
1030 1030
1031 1031 return 1
1032 1032
1033 1033 def run(self, dataOut, **kwargs):
1034 1034
1035 1035 if not(self.__isConfig):
1036 1036
1037 1037 self.dataOutObj = dataOut
1038 1038 self.setup(**kwargs)
1039 1039 self.__isConfig = True
1040 1040
1041 1041 self.putData()
1042 1042
1043 1043 class VoltageReader(JRODataReader):
1044 1044 """
1045 1045 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
1046 1046 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
1047 1047 perfiles*alturas*canales) son almacenados en la variable "buffer".
1048 1048
1049 1049 perfiles * alturas * canales
1050 1050
1051 1051 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1052 1052 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
1053 1053 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
1054 1054 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1055 1055
1056 1056 Example:
1057 1057
1058 1058 dpath = "/home/myuser/data"
1059 1059
1060 1060 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1061 1061
1062 1062 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1063 1063
1064 1064 readerObj = VoltageReader()
1065 1065
1066 1066 readerObj.setup(dpath, startTime, endTime)
1067 1067
1068 1068 while(True):
1069 1069
1070 1070 #to get one profile
1071 1071 profile = readerObj.getData()
1072 1072
1073 1073 #print the profile
1074 1074 print profile
1075 1075
1076 1076 #If you want to see all datablock
1077 1077 print readerObj.datablock
1078 1078
1079 1079 if readerObj.flagNoMoreFiles:
1080 1080 break
1081 1081
1082 1082 """
1083 1083
1084 1084 ext = ".r"
1085 1085
1086 1086 optchar = "D"
1087 1087 dataOutObj = None
1088 1088
1089 1089
1090 1090 def __init__(self, dataOutObj=None):
1091 1091 """
1092 1092 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
1093 1093
1094 1094 Input:
1095 1095 dataOutObj : Objeto de la clase Voltage. Este objeto sera utilizado para
1096 1096 almacenar un perfil de datos cada vez que se haga un requerimiento
1097 1097 (getData). El perfil sera obtenido a partir del buffer de datos,
1098 1098 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1099 1099 bloque de datos.
1100 1100 Si este parametro no es pasado se creara uno internamente.
1101 1101
1102 1102 Variables afectadas:
1103 1103 self.dataOutObj
1104 1104
1105 1105 Return:
1106 1106 None
1107 1107 """
1108 1108
1109 1109 self.__isConfig = False
1110 1110
1111 1111 self.datablock = None
1112 1112
1113 1113 self.utc = 0
1114 1114
1115 1115 self.ext = ".r"
1116 1116
1117 1117 self.optchar = "D"
1118 1118
1119 1119 self.basicHeaderObj = BasicHeader()
1120 1120
1121 1121 self.systemHeaderObj = SystemHeader()
1122 1122
1123 1123 self.radarControllerHeaderObj = RadarControllerHeader()
1124 1124
1125 1125 self.processingHeaderObj = ProcessingHeader()
1126 1126
1127 1127 self.online = 0
1128 1128
1129 1129 self.fp = None
1130 1130
1131 1131 self.idFile = None
1132 1132
1133 1133 self.dtype = None
1134 1134
1135 1135 self.fileSizeByHeader = None
1136 1136
1137 1137 self.filenameList = []
1138 1138
1139 1139 self.filename = None
1140 1140
1141 1141 self.fileSize = None
1142 1142
1143 1143 self.firstHeaderSize = 0
1144 1144
1145 1145 self.basicHeaderSize = 24
1146 1146
1147 1147 self.pathList = []
1148 1148
1149 1149 self.filenameList = []
1150 1150
1151 1151 self.lastUTTime = 0
1152 1152
1153 1153 self.maxTimeStep = 30
1154 1154
1155 1155 self.flagNoMoreFiles = 0
1156 1156
1157 1157 self.set = 0
1158 1158
1159 1159 self.path = None
1160 1160
1161 1161 self.profileIndex = 9999
1162 1162
1163 1163 self.delay = 3 #seconds
1164 1164
1165 1165 self.nTries = 3 #quantity tries
1166 1166
1167 1167 self.nFiles = 3 #number of files for searching
1168 1168
1169 1169 self.nReadBlocks = 0
1170 1170
1171 1171 self.flagIsNewFile = 1
1172 1172
1173 1173 self.ippSeconds = 0
1174 1174
1175 1175 self.flagTimeBlock = 0
1176 1176
1177 1177 self.flagIsNewBlock = 0
1178 1178
1179 1179 self.nTotalBlocks = 0
1180 1180
1181 1181 self.blocksize = 0
1182 1182
1183 1183 def createObjByDefault(self):
1184 1184
1185 1185 dataObj = Voltage()
1186 1186
1187 1187 return dataObj
1188 1188
1189 1189 def __hasNotDataInBuffer(self):
1190 1190 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1191 1191 return 1
1192 1192 return 0
1193 1193
1194 1194
1195 1195 def getBlockDimension(self):
1196 1196 """
1197 1197 Obtiene la cantidad de puntos a leer por cada bloque de datos
1198 1198
1199 1199 Affected:
1200 1200 self.blocksize
1201 1201
1202 1202 Return:
1203 1203 None
1204 1204 """
1205 1205 pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
1206 1206 self.blocksize = pts2read
1207 1207
1208 1208
1209 1209 def readBlock(self):
1210 1210 """
1211 1211 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
1212 1212 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1213 1213 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1214 1214 es seteado a 0
1215 1215
1216 1216 Inputs:
1217 1217 None
1218 1218
1219 1219 Return:
1220 1220 None
1221 1221
1222 1222 Affected:
1223 1223 self.profileIndex
1224 1224 self.datablock
1225 1225 self.flagIsNewFile
1226 1226 self.flagIsNewBlock
1227 1227 self.nTotalBlocks
1228 1228
1229 1229 Exceptions:
1230 1230 Si un bloque leido no es un bloque valido
1231 1231 """
1232 1232
1233 1233 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
1234 1234
1235 1235 try:
1236 1236 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
1237 1237 except:
1238 1238 print "The read block (%3d) has not enough data" %self.nReadBlocks
1239 1239 return 0
1240 1240
1241 1241 junk = numpy.transpose(junk, (2,0,1))
1242 1242 self.datablock = junk['real'] + junk['imag']*1j
1243 1243
1244 1244 self.profileIndex = 0
1245 1245
1246 1246 self.flagIsNewFile = 0
1247 1247 self.flagIsNewBlock = 1
1248 1248
1249 1249 self.nTotalBlocks += 1
1250 1250 self.nReadBlocks += 1
1251 1251
1252 1252 return 1
1253 1253
1254 1254
1255 1255 def getData(self):
1256 1256 """
1257 1257 getData obtiene una unidad de datos del buffer de lectura y la copia a la clase "Voltage"
1258 1258 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
1259 1259 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
1260 1260
1261 1261 Ademas incrementa el contador del buffer en 1.
1262 1262
1263 1263 Return:
1264 1264 data : retorna un perfil de voltages (alturas * canales) copiados desde el
1265 1265 buffer. Si no hay mas archivos a leer retorna None.
1266 1266
1267 1267 Variables afectadas:
1268 1268 self.dataOutObj
1269 1269 self.profileIndex
1270 1270
1271 1271 Affected:
1272 1272 self.dataOutObj
1273 1273 self.profileIndex
1274 1274 self.flagTimeBlock
1275 1275 self.flagIsNewBlock
1276 1276 """
1277 1277 if self.flagNoMoreFiles: return 0
1278 1278
1279 1279 self.flagTimeBlock = 0
1280 1280 self.flagIsNewBlock = 0
1281 1281
1282 1282 if self.__hasNotDataInBuffer():
1283 1283
1284 1284 if not( self.readNextBlock() ):
1285 1285 return 0
1286 1286
1287 1287 # self.updateDataHeader()
1288 1288
1289 1289 if self.flagNoMoreFiles == 1:
1290 1290 print 'Process finished'
1291 1291 return 0
1292 1292
1293 1293 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
1294 1294
1295 1295 if self.datablock == None:
1296 1296 self.dataOutObj.flagNoData = True
1297 1297 return 0
1298 1298
1299 1299 self.dataOutObj.data = self.datablock[:,self.profileIndex,:]
1300 1300
1301 1301 self.dataOutObj.dtype = self.dtype
1302 1302
1303 1303 self.dataOutObj.nChannels = self.systemHeaderObj.nChannels
1304 1304
1305 1305 self.dataOutObj.nHeights = self.processingHeaderObj.nHeights
1306 1306
1307 1307 self.dataOutObj.nProfiles = self.processingHeaderObj.profilesPerBlock
1308 1308
1309 1309 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
1310 1310
1311 1311 self.dataOutObj.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
1312 1312
1313 1313 self.dataOutObj.channelList = range(self.systemHeaderObj.nChannels)
1314 1314
1315 1315 self.dataOutObj.channelIndexList = range(self.systemHeaderObj.nChannels)
1316 1316
1317 1317 self.dataOutObj.flagTimeBlock = self.flagTimeBlock
1318 1318
1319 1319 self.dataOutObj.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.ippSeconds
1320 1320
1321 1321 self.dataOutObj.ippSeconds = self.ippSeconds
1322 1322
1323 1323 self.dataOutObj.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt
1324 1324
1325 1325 self.dataOutObj.nCohInt = self.processingHeaderObj.nCohInt
1326 1326
1327 1327 self.dataOutObj.flagShiftFFT = False
1328 1328
1329 1329 if self.processingHeaderObj.code != None:
1330 1330 self.dataOutObj.nCode = self.processingHeaderObj.nCode
1331 1331
1332 1332 self.dataOutObj.nBaud = self.processingHeaderObj.nBaud
1333 1333
1334 1334 self.dataOutObj.code = self.processingHeaderObj.code
1335 1335
1336 1336 self.profileIndex += 1
1337 1337
1338 1338 self.dataOutObj.systemHeaderObj = self.systemHeaderObj.copy()
1339 1339
1340 1340 self.dataOutObj.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
1341 1341
1342 1342 self.dataOutObj.flagNoData = False
1343 1343
1344 1344 # print self.profileIndex, self.dataOutObj.utctime
1345 1345 # if self.profileIndex == 800:
1346 1346 # a=1
1347 1347
1348 1348 return self.dataOutObj.data
1349 1349
1350 1350
1351 1351 class VoltageWriter(JRODataWriter):
1352 1352 """
1353 1353 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
1354 1354 de los datos siempre se realiza por bloques.
1355 1355 """
1356 1356
1357 1357 ext = ".r"
1358 1358
1359 1359 optchar = "D"
1360 1360
1361 1361 shapeBuffer = None
1362 1362
1363 1363
1364 1364 def __init__(self, dataOutObj=None):
1365 1365 """
1366 1366 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
1367 1367
1368 1368 Affected:
1369 1369 self.dataOutObj
1370 1370
1371 1371 Return: None
1372 1372 """
1373 1373 if dataOutObj == None:
1374 1374 dataOutObj = Voltage()
1375 1375
1376 1376 if not( isinstance(dataOutObj, Voltage) ):
1377 1377 raise ValueError, "in VoltageReader, dataOutObj must be an Spectra class object"
1378 1378
1379 1379 self.dataOutObj = dataOutObj
1380 1380
1381 1381 self.nTotalBlocks = 0
1382 1382
1383 1383 self.profileIndex = 0
1384 1384
1385 1385 self.__isConfig = False
1386 1386
1387 1387 self.fp = None
1388 1388
1389 1389 self.flagIsNewFile = 1
1390 1390
1391 1391 self.nTotalBlocks = 0
1392 1392
1393 1393 self.flagIsNewBlock = 0
1394 1394
1395 1395 self.flagNoMoreFiles = 0
1396 1396
1397 1397 self.setFile = None
1398 1398
1399 1399 self.dtype = None
1400 1400
1401 1401 self.path = None
1402 1402
1403 1403 self.noMoreFiles = 0
1404 1404
1405 1405 self.filename = None
1406 1406
1407 1407 self.basicHeaderObj = BasicHeader()
1408 1408
1409 1409 self.systemHeaderObj = SystemHeader()
1410 1410
1411 1411 self.radarControllerHeaderObj = RadarControllerHeader()
1412 1412
1413 1413 self.processingHeaderObj = ProcessingHeader()
1414 1414
1415 1415 def hasAllDataInBuffer(self):
1416 1416 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1417 1417 return 1
1418 1418 return 0
1419 1419
1420 1420
1421 1421 def setBlockDimension(self):
1422 1422 """
1423 1423 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
1424 1424
1425 1425 Affected:
1426 1426 self.shape_spc_Buffer
1427 1427 self.shape_cspc_Buffer
1428 1428 self.shape_dc_Buffer
1429 1429
1430 1430 Return: None
1431 1431 """
1432 1432 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
1433 1433 self.processingHeaderObj.nHeights,
1434 1434 self.systemHeaderObj.nChannels)
1435 1435
1436 1436 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
1437 1437 self.processingHeaderObj.profilesPerBlock,
1438 1438 self.processingHeaderObj.nHeights),
1439 1439 dtype=numpy.dtype('complex'))
1440 1440
1441 1441
1442 1442 def writeBlock(self):
1443 1443 """
1444 1444 Escribe el buffer en el file designado
1445 1445
1446 1446 Affected:
1447 1447 self.profileIndex
1448 1448 self.flagIsNewFile
1449 1449 self.flagIsNewBlock
1450 1450 self.nTotalBlocks
1451 1451 self.blockIndex
1452 1452
1453 1453 Return: None
1454 1454 """
1455 1455 data = numpy.zeros( self.shapeBuffer, self.dtype )
1456 1456
1457 1457 junk = numpy.transpose(self.datablock, (1,2,0))
1458 1458
1459 1459 data['real'] = junk.real
1460 1460 data['imag'] = junk.imag
1461 1461
1462 1462 data = data.reshape( (-1) )
1463 1463
1464 1464 data.tofile( self.fp )
1465 1465
1466 1466 self.datablock.fill(0)
1467 1467
1468 1468 self.profileIndex = 0
1469 1469 self.flagIsNewFile = 0
1470 1470 self.flagIsNewBlock = 1
1471 1471
1472 1472 self.blockIndex += 1
1473 1473 self.nTotalBlocks += 1
1474 1474
1475 1475 def putData(self):
1476 1476 """
1477 1477 Setea un bloque de datos y luego los escribe en un file
1478 1478
1479 1479 Affected:
1480 1480 self.flagIsNewBlock
1481 1481 self.profileIndex
1482 1482
1483 1483 Return:
1484 1484 0 : Si no hay data o no hay mas files que puedan escribirse
1485 1485 1 : Si se escribio la data de un bloque en un file
1486 1486 """
1487 1487 if self.dataOutObj.flagNoData:
1488 1488 return 0
1489 1489
1490 1490 self.flagIsNewBlock = 0
1491 1491
1492 1492 if self.dataOutObj.flagTimeBlock:
1493 1493
1494 1494 self.datablock.fill(0)
1495 1495 self.profileIndex = 0
1496 1496 self.setNextFile()
1497 1497
1498 1498 if self.profileIndex == 0:
1499 1499 self.getBasicHeader()
1500 1500
1501 1501 self.datablock[:,self.profileIndex,:] = self.dataOutObj.data
1502 1502
1503 1503 self.profileIndex += 1
1504 1504
1505 1505 if self.hasAllDataInBuffer():
1506 1506 #if self.flagIsNewFile:
1507 1507 self.writeNextBlock()
1508 1508 # self.getDataHeader()
1509 1509
1510 1510 if self.flagNoMoreFiles:
1511 1511 #print 'Process finished'
1512 1512 return 0
1513 1513
1514 1514 return 1
1515 1515
1516 1516 def __getProcessFlags(self):
1517 1517
1518 1518 processFlags = 0
1519 1519
1520 1520 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1521 1521 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1522 1522 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1523 1523 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1524 1524 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1525 1525 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1526 1526
1527 1527 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1528 1528
1529 1529
1530 1530
1531 1531 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
1532 1532 PROCFLAG.DATATYPE_SHORT,
1533 1533 PROCFLAG.DATATYPE_LONG,
1534 1534 PROCFLAG.DATATYPE_INT64,
1535 1535 PROCFLAG.DATATYPE_FLOAT,
1536 1536 PROCFLAG.DATATYPE_DOUBLE]
1537 1537
1538 1538
1539 1539 for index in range(len(dtypeList)):
1540 1540 if self.dataOutObj.dtype == dtypeList[index]:
1541 1541 dtypeValue = datatypeValueList[index]
1542 1542 break
1543 1543
1544 1544 processFlags += dtypeValue
1545 1545
1546 1546 if self.dataOutObj.flagDecodeData:
1547 1547 processFlags += PROCFLAG.DECODE_DATA
1548 1548
1549 1549 if self.dataOutObj.flagDeflipData:
1550 1550 processFlags += PROCFLAG.DEFLIP_DATA
1551 1551
1552 1552 if self.dataOutObj.code != None:
1553 1553 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1554 1554
1555 1555 if self.dataOutObj.nCohInt > 1:
1556 1556 processFlags += PROCFLAG.COHERENT_INTEGRATION
1557 1557
1558 1558 return processFlags
1559 1559
1560 1560
1561 1561 def __getBlockSize(self):
1562 1562 '''
1563 1563 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
1564 1564 '''
1565 1565
1566 1566 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1567 1567 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1568 1568 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1569 1569 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1570 1570 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1571 1571 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1572 1572
1573 1573 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1574 1574 datatypeValueList = [1,2,4,8,4,8]
1575 1575 for index in range(len(dtypeList)):
1576 1576 if self.dataOutObj.dtype == dtypeList[index]:
1577 1577 datatypeValue = datatypeValueList[index]
1578 1578 break
1579 1579
1580 1580 blocksize = int(self.dataOutObj.nHeights * self.dataOutObj.nChannels * self.dataOutObj.nProfiles * datatypeValue * 2)
1581 1581
1582 1582 return blocksize
1583 1583
1584 1584 def getDataHeader(self):
1585 1585
1586 1586 """
1587 1587 Obtiene una copia del First Header
1588 1588
1589 1589 Affected:
1590 1590 self.systemHeaderObj
1591 1591 self.radarControllerHeaderObj
1592 1592 self.dtype
1593 1593
1594 1594 Return:
1595 1595 None
1596 1596 """
1597 1597
1598 1598 self.systemHeaderObj = self.dataOutObj.systemHeaderObj.copy()
1599 1599 self.systemHeaderObj.nChannels = self.dataOutObj.nChannels
1600 1600 self.radarControllerHeaderObj = self.dataOutObj.radarControllerHeaderObj.copy()
1601 1601
1602 1602 self.getBasicHeader()
1603 1603
1604 1604 processingHeaderSize = 40 # bytes
1605 1605 self.processingHeaderObj.dtype = 0 # Voltage
1606 1606 self.processingHeaderObj.blockSize = self.__getBlockSize()
1607 1607 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
1608 1608 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
1609 1609 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOutObj.processingHeaderObj.nWindows
1610 1610 self.processingHeaderObj.processFlags = self.__getProcessFlags()
1611 1611 self.processingHeaderObj.nCohInt = self.dataOutObj.nCohInt
1612 1612 self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage
1613 1613 self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage
1614 1614
1615 1615 if self.dataOutObj.code != None:
1616 1616 self.processingHeaderObj.code = self.dataOutObj.code
1617 1617 self.processingHeaderObj.nCode = self.dataOutObj.nCode
1618 1618 self.processingHeaderObj.nBaud = self.dataOutObj.nBaud
1619 1619 codesize = int(8 + 4 * self.dataOutObj.nCode * self.dataOutObj.nBaud)
1620 1620 processingHeaderSize += codesize
1621 1621
1622 1622 if self.processingHeaderObj.nWindows != 0:
1623 1623 self.processingHeaderObj.firstHeight = self.dataOutObj.heightList[0]
1624 1624 self.processingHeaderObj.deltaHeight = self.dataOutObj.heightList[1] - self.dataOutObj.heightList[0]
1625 1625 self.processingHeaderObj.nHeights = self.dataOutObj.nHeights
1626 1626 self.processingHeaderObj.samplesWin = self.dataOutObj.nHeights
1627 1627 processingHeaderSize += 12
1628 1628
1629 1629 self.processingHeaderObj.size = processingHeaderSize
1630 1630
1631 1631 class SpectraReader(JRODataReader):
1632 1632 """
1633 1633 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
1634 1634 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
1635 1635 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
1636 1636
1637 1637 paresCanalesIguales * alturas * perfiles (Self Spectra)
1638 1638 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
1639 1639 canales * alturas (DC Channels)
1640 1640
1641 1641 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1642 1642 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
1643 1643 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
1644 1644 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1645 1645
1646 1646 Example:
1647 1647 dpath = "/home/myuser/data"
1648 1648
1649 1649 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1650 1650
1651 1651 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1652 1652
1653 1653 readerObj = SpectraReader()
1654 1654
1655 1655 readerObj.setup(dpath, startTime, endTime)
1656 1656
1657 1657 while(True):
1658 1658
1659 1659 readerObj.getData()
1660 1660
1661 1661 print readerObj.data_spc
1662 1662
1663 1663 print readerObj.data_cspc
1664 1664
1665 1665 print readerObj.data_dc
1666 1666
1667 1667 if readerObj.flagNoMoreFiles:
1668 1668 break
1669 1669
1670 1670 """
1671 1671
1672 1672 pts2read_SelfSpectra = 0
1673 1673
1674 1674 pts2read_CrossSpectra = 0
1675 1675
1676 1676 pts2read_DCchannels = 0
1677 1677
1678 1678 ext = ".pdata"
1679 1679
1680 1680 optchar = "P"
1681 1681
1682 1682 dataOutObj = None
1683 1683
1684 1684 nRdChannels = None
1685 1685
1686 1686 nRdPairs = None
1687 1687
1688 1688 rdPairList = []
1689 1689
1690 1690
1691 1691 def __init__(self, dataOutObj=None):
1692 1692 """
1693 1693 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
1694 1694
1695 1695 Inputs:
1696 1696 dataOutObj : Objeto de la clase Spectra. Este objeto sera utilizado para
1697 1697 almacenar un perfil de datos cada vez que se haga un requerimiento
1698 1698 (getData). El perfil sera obtenido a partir del buffer de datos,
1699 1699 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1700 1700 bloque de datos.
1701 1701 Si este parametro no es pasado se creara uno internamente.
1702 1702
1703 1703 Affected:
1704 1704 self.dataOutObj
1705 1705
1706 1706 Return : None
1707 1707 """
1708 1708
1709 1709 self.__isConfig = False
1710 1710
1711 1711 self.pts2read_SelfSpectra = 0
1712 1712
1713 1713 self.pts2read_CrossSpectra = 0
1714 1714
1715 1715 self.pts2read_DCchannels = 0
1716 1716
1717 1717 self.datablock = None
1718 1718
1719 1719 self.utc = None
1720 1720
1721 1721 self.ext = ".pdata"
1722 1722
1723 1723 self.optchar = "P"
1724 1724
1725 1725 self.basicHeaderObj = BasicHeader()
1726 1726
1727 1727 self.systemHeaderObj = SystemHeader()
1728 1728
1729 1729 self.radarControllerHeaderObj = RadarControllerHeader()
1730 1730
1731 1731 self.processingHeaderObj = ProcessingHeader()
1732 1732
1733 1733 self.online = 0
1734 1734
1735 1735 self.fp = None
1736 1736
1737 1737 self.idFile = None
1738 1738
1739 1739 self.dtype = None
1740 1740
1741 1741 self.fileSizeByHeader = None
1742 1742
1743 1743 self.filenameList = []
1744 1744
1745 1745 self.filename = None
1746 1746
1747 1747 self.fileSize = None
1748 1748
1749 1749 self.firstHeaderSize = 0
1750 1750
1751 1751 self.basicHeaderSize = 24
1752 1752
1753 1753 self.pathList = []
1754 1754
1755 1755 self.lastUTTime = 0
1756 1756
1757 1757 self.maxTimeStep = 30
1758 1758
1759 1759 self.flagNoMoreFiles = 0
1760 1760
1761 1761 self.set = 0
1762 1762
1763 1763 self.path = None
1764 1764
1765 1765 self.delay = 3 #seconds
1766 1766
1767 1767 self.nTries = 3 #quantity tries
1768 1768
1769 1769 self.nFiles = 3 #number of files for searching
1770 1770
1771 1771 self.nReadBlocks = 0
1772 1772
1773 1773 self.flagIsNewFile = 1
1774 1774
1775 1775 self.ippSeconds = 0
1776 1776
1777 1777 self.flagTimeBlock = 0
1778 1778
1779 1779 self.flagIsNewBlock = 0
1780 1780
1781 1781 self.nTotalBlocks = 0
1782 1782
1783 1783 self.blocksize = 0
1784 1784
1785 1785
1786 1786 def createObjByDefault(self):
1787 1787
1788 1788 dataObj = Spectra()
1789 1789
1790 1790 return dataObj
1791 1791
1792 1792 def __hasNotDataInBuffer(self):
1793 1793 return 1
1794 1794
1795 1795
1796 1796 def getBlockDimension(self):
1797 1797 """
1798 1798 Obtiene la cantidad de puntos a leer por cada bloque de datos
1799 1799
1800 1800 Affected:
1801 1801 self.nRdChannels
1802 1802 self.nRdPairs
1803 1803 self.pts2read_SelfSpectra
1804 1804 self.pts2read_CrossSpectra
1805 1805 self.pts2read_DCchannels
1806 1806 self.blocksize
1807 1807 self.dataOutObj.nChannels
1808 1808 self.dataOutObj.nPairs
1809 1809
1810 1810 Return:
1811 1811 None
1812 1812 """
1813 1813 self.nRdChannels = 0
1814 1814 self.nRdPairs = 0
1815 1815 self.rdPairList = []
1816 1816
1817 1817 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
1818 1818 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
1819 1819 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
1820 1820 else:
1821 1821 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
1822 1822 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
1823 1823
1824 1824 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
1825 1825
1826 1826 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
1827 1827 self.blocksize = self.pts2read_SelfSpectra
1828 1828
1829 1829 if self.processingHeaderObj.flag_cspc:
1830 1830 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
1831 1831 self.blocksize += self.pts2read_CrossSpectra
1832 1832
1833 1833 if self.processingHeaderObj.flag_dc:
1834 1834 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
1835 1835 self.blocksize += self.pts2read_DCchannels
1836 1836
1837 1837 # self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels
1838 1838
1839 1839
1840 1840 def readBlock(self):
1841 1841 """
1842 1842 Lee el bloque de datos desde la posicion actual del puntero del archivo
1843 1843 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1844 1844 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1845 1845 es seteado a 0
1846 1846
1847 1847 Return: None
1848 1848
1849 1849 Variables afectadas:
1850 1850
1851 1851 self.flagIsNewFile
1852 1852 self.flagIsNewBlock
1853 1853 self.nTotalBlocks
1854 1854 self.data_spc
1855 1855 self.data_cspc
1856 1856 self.data_dc
1857 1857
1858 1858 Exceptions:
1859 1859 Si un bloque leido no es un bloque valido
1860 1860 """
1861 1861 blockOk_flag = False
1862 1862 fpointer = self.fp.tell()
1863 1863
1864 1864 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
1865 1865 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
1866 1866
1867 1867 if self.processingHeaderObj.flag_cspc:
1868 1868 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
1869 1869 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
1870 1870
1871 1871 if self.processingHeaderObj.flag_dc:
1872 1872 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
1873 1873 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
1874 1874
1875 1875
1876 1876 if not(self.processingHeaderObj.shif_fft):
1877 1877 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
1878 1878
1879 1879 if self.processingHeaderObj.flag_cspc:
1880 1880 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
1881 1881
1882 1882
1883 1883 spc = numpy.transpose( spc, (0,2,1) )
1884 1884 self.data_spc = spc
1885 1885
1886 1886 if self.processingHeaderObj.flag_cspc:
1887 1887 cspc = numpy.transpose( cspc, (0,2,1) )
1888 1888 self.data_cspc = cspc['real'] + cspc['imag']*1j
1889 1889 else:
1890 1890 self.data_cspc = None
1891 1891
1892 1892 if self.processingHeaderObj.flag_dc:
1893 1893 self.data_dc = dc['real'] + dc['imag']*1j
1894 1894 else:
1895 1895 self.data_dc = None
1896 1896
1897 1897 self.flagIsNewFile = 0
1898 1898 self.flagIsNewBlock = 1
1899 1899
1900 1900 self.nTotalBlocks += 1
1901 1901 self.nReadBlocks += 1
1902 1902
1903 1903 return 1
1904 1904
1905 1905
1906 1906 def getData(self):
1907 1907 """
1908 1908 Copia el buffer de lectura a la clase "Spectra",
1909 1909 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
1910 1910 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
1911 1911
1912 1912 Return:
1913 1913 0 : Si no hay mas archivos disponibles
1914 1914 1 : Si hizo una buena copia del buffer
1915 1915
1916 1916 Affected:
1917 1917 self.dataOutObj
1918 1918
1919 1919 self.flagTimeBlock
1920 1920 self.flagIsNewBlock
1921 1921 """
1922 1922
1923 1923 if self.flagNoMoreFiles: return 0
1924 1924
1925 1925 self.flagTimeBlock = 0
1926 1926 self.flagIsNewBlock = 0
1927 1927
1928 1928 if self.__hasNotDataInBuffer():
1929 1929
1930 1930 if not( self.readNextBlock() ):
1931 1931 return 0
1932 1932
1933 1933 # self.updateDataHeader()
1934 1934
1935 1935 if self.flagNoMoreFiles == 1:
1936 1936 print 'Process finished'
1937 1937 return 0
1938 1938
1939 1939 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
1940 1940
1941 1941 if self.data_dc == None:
1942 1942 self.dataOutObj.flagNoData = True
1943 1943 return 0
1944 1944
1945 1945
1946 1946 self.dataOutObj.data_spc = self.data_spc
1947 1947
1948 1948 self.dataOutObj.data_cspc = self.data_cspc
1949 1949
1950 1950 self.dataOutObj.data_dc = self.data_dc
1951 1951
1952 1952 self.dataOutObj.flagTimeBlock = self.flagTimeBlock
1953 1953
1954 1954 self.dataOutObj.flagNoData = False
1955 1955
1956 1956 self.dataOutObj.dtype = self.dtype
1957 1957
1958 1958 self.dataOutObj.nChannels = self.nRdChannels
1959 1959
1960 1960 self.dataOutObj.nPairs = self.nRdPairs
1961 1961
1962 1962 self.dataOutObj.pairsList = self.rdPairList
1963 1963
1964 1964 self.dataOutObj.nHeights = self.processingHeaderObj.nHeights
1965 1965
1966 1966 self.dataOutObj.nProfiles = self.processingHeaderObj.profilesPerBlock
1967 1967
1968 1968 self.dataOutObj.nFFTPoints = self.processingHeaderObj.profilesPerBlock
1969 1969
1970 1970 self.dataOutObj.nIncohInt = self.processingHeaderObj.nIncohInt
1971 1971
1972 1972
1973 1973 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
1974 1974
1975 1975 self.dataOutObj.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
1976 1976
1977 1977 self.dataOutObj.channelList = range(self.systemHeaderObj.nChannels)
1978 1978
1979 1979 self.dataOutObj.channelIndexList = range(self.systemHeaderObj.nChannels)
1980 1980
1981 1981 self.dataOutObj.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000.#+ self.profileIndex * self.ippSeconds
1982 1982
1983 1983 self.dataOutObj.ippSeconds = self.ippSeconds
1984 1984
1985 1985 self.dataOutObj.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt * self.processingHeaderObj.nIncohInt * self.dataOutObj.nFFTPoints
1986 1986
1987 1987 self.dataOutObj.flagShiftFFT = self.processingHeaderObj.shif_fft
1988 1988
1989 1989 # self.profileIndex += 1
1990 1990
1991 1991 self.dataOutObj.systemHeaderObj = self.systemHeaderObj.copy()
1992 1992
1993 1993 self.dataOutObj.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
1994 1994
1995 1995 return self.dataOutObj.data_spc
1996 1996
1997 1997
1998 1998 class SpectraWriter(JRODataWriter):
1999 1999
2000 2000 """
2001 2001 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
2002 2002 de los datos siempre se realiza por bloques.
2003 2003 """
2004 2004
2005 2005 ext = ".pdata"
2006 2006
2007 2007 optchar = "P"
2008 2008
2009 2009 shape_spc_Buffer = None
2010 2010
2011 2011 shape_cspc_Buffer = None
2012 2012
2013 2013 shape_dc_Buffer = None
2014 2014
2015 2015 data_spc = None
2016 2016
2017 2017 data_cspc = None
2018 2018
2019 2019 data_dc = None
2020 2020
2021 2021 # dataOutObj = None
2022 2022
2023 2023 def __init__(self, dataOutObj=None):
2024 2024 """
2025 2025 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
2026 2026
2027 2027 Affected:
2028 2028 self.dataOutObj
2029 2029 self.basicHeaderObj
2030 2030 self.systemHeaderObj
2031 2031 self.radarControllerHeaderObj
2032 2032 self.processingHeaderObj
2033 2033
2034 2034 Return: None
2035 2035 """
2036 2036 if dataOutObj == None:
2037 2037 dataOutObj = Spectra()
2038 2038
2039 2039 if not( isinstance(dataOutObj, Spectra) ):
2040 2040 raise ValueError, "in SpectraReader, dataOutObj must be an Spectra class object"
2041 2041
2042 2042 self.dataOutObj = dataOutObj
2043 2043
2044 2044 self.__isConfig = False
2045 2045
2046 2046 self.nTotalBlocks = 0
2047 2047
2048 2048 self.data_spc = None
2049 2049
2050 2050 self.data_cspc = None
2051 2051
2052 2052 self.data_dc = None
2053 2053
2054 2054 self.fp = None
2055 2055
2056 2056 self.flagIsNewFile = 1
2057 2057
2058 2058 self.nTotalBlocks = 0
2059 2059
2060 2060 self.flagIsNewBlock = 0
2061 2061
2062 2062 self.flagNoMoreFiles = 0
2063 2063
2064 2064 self.setFile = None
2065 2065
2066 2066 self.dtype = None
2067 2067
2068 2068 self.path = None
2069 2069
2070 2070 self.noMoreFiles = 0
2071 2071
2072 2072 self.filename = None
2073 2073
2074 2074 self.basicHeaderObj = BasicHeader()
2075 2075
2076 2076 self.systemHeaderObj = SystemHeader()
2077 2077
2078 2078 self.radarControllerHeaderObj = RadarControllerHeader()
2079 2079
2080 2080 self.processingHeaderObj = ProcessingHeader()
2081 2081
2082 2082
2083 2083 def hasAllDataInBuffer(self):
2084 2084 return 1
2085 2085
2086 2086
2087 2087 def setBlockDimension(self):
2088 2088 """
2089 2089 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
2090 2090
2091 2091 Affected:
2092 2092 self.shape_spc_Buffer
2093 2093 self.shape_cspc_Buffer
2094 2094 self.shape_dc_Buffer
2095 2095
2096 2096 Return: None
2097 2097 """
2098 2098 self.shape_spc_Buffer = (self.dataOutObj.nChannels,
2099 2099 self.processingHeaderObj.nHeights,
2100 2100 self.processingHeaderObj.profilesPerBlock)
2101 2101
2102 2102 self.shape_cspc_Buffer = (self.dataOutObj.nPairs,
2103 2103 self.processingHeaderObj.nHeights,
2104 2104 self.processingHeaderObj.profilesPerBlock)
2105 2105
2106 2106 self.shape_dc_Buffer = (self.dataOutObj.nChannels,
2107 2107 self.processingHeaderObj.nHeights)
2108 2108
2109 2109
2110 2110 def writeBlock(self):
2111 2111 """
2112 2112 Escribe el buffer en el file designado
2113 2113
2114 2114 Affected:
2115 2115 self.data_spc
2116 2116 self.data_cspc
2117 2117 self.data_dc
2118 2118 self.flagIsNewFile
2119 2119 self.flagIsNewBlock
2120 2120 self.nTotalBlocks
2121 2121 self.nWriteBlocks
2122 2122
2123 2123 Return: None
2124 2124 """
2125 2125
2126 2126 spc = numpy.transpose( self.data_spc, (0,2,1) )
2127 2127 if not( self.processingHeaderObj.shif_fft ):
2128 2128 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2129 2129 data = spc.reshape((-1))
2130 2130 data.tofile(self.fp)
2131 2131
2132 2132 if self.data_cspc != None:
2133 2133 data = numpy.zeros( self.shape_cspc_Buffer, self.dtype )
2134 2134 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
2135 2135 if not( self.processingHeaderObj.shif_fft ):
2136 2136 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2137 2137 data['real'] = cspc.real
2138 2138 data['imag'] = cspc.imag
2139 2139 data = data.reshape((-1))
2140 2140 data.tofile(self.fp)
2141 2141
2142 2142 if self.data_dc != None:
2143 2143 data = numpy.zeros( self.shape_dc_Buffer, self.dtype )
2144 2144 dc = self.data_dc
2145 2145 data['real'] = dc.real
2146 2146 data['imag'] = dc.imag
2147 2147 data = data.reshape((-1))
2148 2148 data.tofile(self.fp)
2149 2149
2150 2150 self.data_spc.fill(0)
2151 2151 self.data_dc.fill(0)
2152 2152 if self.data_cspc != None:
2153 2153 self.data_cspc.fill(0)
2154 2154
2155 2155 self.flagIsNewFile = 0
2156 2156 self.flagIsNewBlock = 1
2157 2157 self.nTotalBlocks += 1
2158 2158 self.nWriteBlocks += 1
2159 2159 self.blockIndex += 1
2160 2160
2161 2161
2162 2162 def putData(self):
2163 2163 """
2164 2164 Setea un bloque de datos y luego los escribe en un file
2165 2165
2166 2166 Affected:
2167 2167 self.data_spc
2168 2168 self.data_cspc
2169 2169 self.data_dc
2170 2170
2171 2171 Return:
2172 2172 0 : Si no hay data o no hay mas files que puedan escribirse
2173 2173 1 : Si se escribio la data de un bloque en un file
2174 2174 """
2175 2175
2176 2176 if self.dataOutObj.flagNoData:
2177 2177 return 0
2178 2178
2179 2179 self.flagIsNewBlock = 0
2180 2180
2181 2181 if self.dataOutObj.flagTimeBlock:
2182 2182 self.data_spc.fill(0)
2183 2183 self.data_cspc.fill(0)
2184 2184 self.data_dc.fill(0)
2185 2185 self.setNextFile()
2186 2186
2187 2187 if self.flagIsNewFile == 0:
2188 2188 self.getBasicHeader()
2189 2189
2190 2190 self.data_spc = self.dataOutObj.data_spc
2191 2191 self.data_cspc = self.dataOutObj.data_cspc
2192 2192 self.data_dc = self.dataOutObj.data_dc
2193 2193
2194 2194 # #self.processingHeaderObj.dataBlocksPerFile)
2195 2195 if self.hasAllDataInBuffer():
2196 2196 # self.getDataHeader()
2197 2197 self.writeNextBlock()
2198 2198
2199 2199 if self.flagNoMoreFiles:
2200 2200 #print 'Process finished'
2201 2201 return 0
2202 2202
2203 2203 return 1
2204 2204
2205 2205
2206 2206 def __getProcessFlags(self):
2207 2207
2208 2208 processFlags = 0
2209 2209
2210 2210 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2211 2211 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2212 2212 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2213 2213 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2214 2214 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2215 2215 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2216 2216
2217 2217 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2218 2218
2219 2219
2220 2220
2221 2221 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
2222 2222 PROCFLAG.DATATYPE_SHORT,
2223 2223 PROCFLAG.DATATYPE_LONG,
2224 2224 PROCFLAG.DATATYPE_INT64,
2225 2225 PROCFLAG.DATATYPE_FLOAT,
2226 2226 PROCFLAG.DATATYPE_DOUBLE]
2227 2227
2228 2228
2229 2229 for index in range(len(dtypeList)):
2230 2230 if self.dataOutObj.dtype == dtypeList[index]:
2231 2231 dtypeValue = datatypeValueList[index]
2232 2232 break
2233 2233
2234 2234 processFlags += dtypeValue
2235 2235
2236 2236 if self.dataOutObj.flagDecodeData:
2237 2237 processFlags += PROCFLAG.DECODE_DATA
2238 2238
2239 2239 if self.dataOutObj.flagDeflipData:
2240 2240 processFlags += PROCFLAG.DEFLIP_DATA
2241 2241
2242 2242 if self.dataOutObj.code != None:
2243 2243 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
2244 2244
2245 2245 if self.dataOutObj.nIncohInt > 1:
2246 2246 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
2247 2247
2248 2248 if self.dataOutObj.data_dc != None:
2249 2249 processFlags += PROCFLAG.SAVE_CHANNELS_DC
2250 2250
2251 2251 return processFlags
2252 2252
2253 2253
2254 2254 def __getBlockSize(self):
2255 2255 '''
2256 2256 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
2257 2257 '''
2258 2258
2259 2259 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2260 2260 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2261 2261 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2262 2262 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2263 2263 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2264 2264 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2265 2265
2266 2266 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2267 2267 datatypeValueList = [1,2,4,8,4,8]
2268 2268 for index in range(len(dtypeList)):
2269 2269 if self.dataOutObj.dtype == dtypeList[index]:
2270 2270 datatypeValue = datatypeValueList[index]
2271 2271 break
2272 2272
2273 2273
2274 2274 pts2write = self.dataOutObj.nHeights * self.dataOutObj.nFFTPoints
2275 2275
2276 2276 pts2write_SelfSpectra = int(self.dataOutObj.nChannels * pts2write)
2277 2277 blocksize = (pts2write_SelfSpectra*datatypeValue)
2278 2278
2279 2279 if self.dataOutObj.data_cspc != None:
2280 2280 pts2write_CrossSpectra = int(self.dataOutObj.nPairs * pts2write)
2281 2281 blocksize += (pts2write_CrossSpectra*datatypeValue*2)
2282 2282
2283 2283 if self.dataOutObj.data_dc != None:
2284 2284 pts2write_DCchannels = int(self.dataOutObj.nChannels * self.dataOutObj.nHeights)
2285 2285 blocksize += (pts2write_DCchannels*datatypeValue*2)
2286 2286
2287 2287 blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
2288 2288
2289 2289 return blocksize
2290 2290
2291 2291 def getDataHeader(self):
2292 2292
2293 2293 """
2294 2294 Obtiene una copia del First Header
2295 2295
2296 2296 Affected:
2297 2297 self.systemHeaderObj
2298 2298 self.radarControllerHeaderObj
2299 2299 self.dtype
2300 2300
2301 2301 Return:
2302 2302 None
2303 2303 """
2304 2304
2305 2305 self.systemHeaderObj = self.dataOutObj.systemHeaderObj.copy()
2306 2306 self.systemHeaderObj.nChannels = self.dataOutObj.nChannels
2307 2307 self.radarControllerHeaderObj = self.dataOutObj.radarControllerHeaderObj.copy()
2308 2308
2309 2309 self.getBasicHeader()
2310 2310
2311 2311 processingHeaderSize = 40 # bytes
2312 2312 self.processingHeaderObj.dtype = 0 # Voltage
2313 2313 self.processingHeaderObj.blockSize = self.__getBlockSize()
2314 2314 self.processingHeaderObj.profilesPerBlock = self.dataOutObj.nFFTPoints
2315 2315 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
2316 2316 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOutObj.processingHeaderObj.nWindows
2317 2317 self.processingHeaderObj.processFlags = self.__getProcessFlags()
2318 2318 self.processingHeaderObj.nCohInt = self.dataOutObj.nCohInt# Se requiere para determinar el valor de timeInterval
2319 2319 self.processingHeaderObj.nIncohInt = self.dataOutObj.nIncohInt
2320 2320 self.processingHeaderObj.totalSpectra = self.dataOutObj.nPairs + self.dataOutObj.nChannels
2321 2321
2322 2322 if self.processingHeaderObj.totalSpectra > 0:
2323 2323 channelList = []
2324 2324 for channel in range(self.dataOutObj.nChannels):
2325 2325 channelList.append(channel)
2326 2326 channelList.append(channel)
2327 2327
2328 2328 pairsList = []
2329 2329 for pair in self.dataOutObj.pairsList:
2330 2330 pairsList.append(pair[0])
2331 2331 pairsList.append(pair[1])
2332 2332 spectraComb = channelList + pairsList
2333 2333 spectraComb = numpy.array(spectraComb,dtype="u1")
2334 2334 self.processingHeaderObj.spectraComb = spectraComb
2335 2335 sizeOfSpcComb = len(spectraComb)
2336 2336 processingHeaderSize += sizeOfSpcComb
2337 2337
2338 2338 if self.dataOutObj.code != None:
2339 2339 self.processingHeaderObj.code = self.dataOutObj.code
2340 2340 self.processingHeaderObj.nCode = self.dataOutObj.nCode
2341 2341 self.processingHeaderObj.nBaud = self.dataOutObj.nBaud
2342 2342 nCodeSize = 4 # bytes
2343 2343 nBaudSize = 4 # bytes
2344 2344 codeSize = 4 # bytes
2345 2345 sizeOfCode = int(nCodeSize + nBaudSize + codeSize * self.dataOutObj.nCode * self.dataOutObj.nBaud)
2346 2346 processingHeaderSize += sizeOfCode
2347 2347
2348 2348 if self.processingHeaderObj.nWindows != 0:
2349 2349 self.processingHeaderObj.firstHeight = self.dataOutObj.heightList[0]
2350 2350 self.processingHeaderObj.deltaHeight = self.dataOutObj.heightList[1] - self.dataOutObj.heightList[0]
2351 2351 self.processingHeaderObj.nHeights = self.dataOutObj.nHeights
2352 2352 self.processingHeaderObj.samplesWin = self.dataOutObj.nHeights
2353 2353 sizeOfFirstHeight = 4
2354 2354 sizeOfdeltaHeight = 4
2355 2355 sizeOfnHeights = 4
2356 2356 sizeOfWindows = (sizeOfFirstHeight + sizeOfdeltaHeight + sizeOfnHeights)*self.processingHeaderObj.nWindows
2357 2357 processingHeaderSize += sizeOfWindows
2358 2358
2359 2359 self.processingHeaderObj.size = processingHeaderSize
2360 2360
2361 2361 class SpectraHeisWriter():
2362 2362
2363 2363 i=0
2364 2364
2365 2365 def __init__(self, dataOutObj):
2366 2366
2367 2367 self.wrObj = FITS()
2368 2368 self.dataOutObj = dataOutObj
2369 2369
2370 2370 def isNumber(str):
2371 2371 """
2372 2372 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
2373 2373
2374 2374 Excepciones:
2375 2375 Si un determinado string no puede ser convertido a numero
2376 2376 Input:
2377 2377 str, string al cual se le analiza para determinar si convertible a un numero o no
2378 2378
2379 2379 Return:
2380 2380 True : si el string es uno numerico
2381 2381 False : no es un string numerico
2382 2382 """
2383 2383 try:
2384 2384 float( str )
2385 2385 return True
2386 2386 except:
2387 2387 return False
2388 2388
2389 2389 def setup(self, wrpath,):
2390 2390
2391 2391 if not(os.path.exists(wrpath)):
2392 2392 os.mkdir(wrpath)
2393 2393
2394 2394 self.wrpath = wrpath
2395 2395 self.setFile = 0
2396 2396
2397 2397 def putData(self):
2398 2398 # self.wrObj.writeHeader(nChannels=self.dataOutObj.nChannels, nFFTPoints=self.dataOutObj.nFFTPoints)
2399 2399 #name = self.dataOutObj.utctime
2400 2400 name= time.localtime( self.dataOutObj.utctime)
2401 2401 ext=".fits"
2402 2402 #folder='D%4.4d%3.3d'%(name.tm_year,name.tm_yday)
2403 2403 subfolder = 'D%4.4d%3.3d' % (name.tm_year,name.tm_yday)
2404 2404
2405 2405 doypath = os.path.join( self.wrpath, subfolder )
2406 2406 if not( os.path.exists(doypath) ):
2407 2407 os.mkdir(doypath)
2408 2408 self.setFile += 1
2409 2409 file = 'D%4.4d%3.3d%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext)
2410 2410
2411 2411 filename = os.path.join(self.wrpath,subfolder, file)
2412 2412
2413 2413 # print self.dataOutObj.ippSeconds
2414 2414 freq=numpy.arange(-1*self.dataOutObj.nHeights/2.,self.dataOutObj.nHeights/2.)/(2*self.dataOutObj.ippSeconds)
2415 2415
2416 2416 col1=self.wrObj.setColF(name="freq", format=str(self.dataOutObj.nFFTPoints)+'E', array=freq)
2417 2417 col2=self.wrObj.writeData(name="P_Ch1",format=str(self.dataOutObj.nFFTPoints)+'E',data=10*numpy.log10(self.dataOutObj.data_spc[0,:]))
2418 2418 col3=self.wrObj.writeData(name="P_Ch2",format=str(self.dataOutObj.nFFTPoints)+'E',data=10*numpy.log10(self.dataOutObj.data_spc[1,:]))
2419 2419 col4=self.wrObj.writeData(name="P_Ch3",format=str(self.dataOutObj.nFFTPoints)+'E',data=10*numpy.log10(self.dataOutObj.data_spc[2,:]))
2420 2420 col5=self.wrObj.writeData(name="P_Ch4",format=str(self.dataOutObj.nFFTPoints)+'E',data=10*numpy.log10(self.dataOutObj.data_spc[3,:]))
2421 2421 col6=self.wrObj.writeData(name="P_Ch5",format=str(self.dataOutObj.nFFTPoints)+'E',data=10*numpy.log10(self.dataOutObj.data_spc[4,:]))
2422 2422 col7=self.wrObj.writeData(name="P_Ch6",format=str(self.dataOutObj.nFFTPoints)+'E',data=10*numpy.log10(self.dataOutObj.data_spc[5,:]))
2423 2423 col8=self.wrObj.writeData(name="P_Ch7",format=str(self.dataOutObj.nFFTPoints)+'E',data=10*numpy.log10(self.dataOutObj.data_spc[6,:]))
2424 2424 col9=self.wrObj.writeData(name="P_Ch8",format=str(self.dataOutObj.nFFTPoints)+'E',data=10*numpy.log10(self.dataOutObj.data_spc[7,:]))
2425 2425 #n=numpy.arange((100))
2426 2426 n=self.dataOutObj.data_spc[6,:]
2427 2427 a=self.wrObj.cFImage(n)
2428 2428 b=self.wrObj.Ctable(col1,col2,col3,col4,col5,col6,col7,col8,col9)
2429 2429 self.wrObj.CFile(a,b)
2430 2430 self.wrObj.wFile(filename)
2431 2431 return 1
2432 2432
2433 2433 class FITS:
2434 2434
2435 2435 name=None
2436 2436 format=None
2437 2437 array =None
2438 2438 data =None
2439 2439 thdulist=None
2440 2440
2441 2441 def __init__(self):
2442 2442
2443 2443 pass
2444 2444
2445 2445 def setColF(self,name,format,array):
2446 2446 self.name=name
2447 2447 self.format=format
2448 2448 self.array=array
2449 2449 a1=numpy.array([self.array],dtype=numpy.float32)
2450 2450 self.col1 = pyfits.Column(name=self.name, format=self.format, array=a1)
2451 2451 return self.col1
2452 2452
2453 2453 # def setColP(self,name,format,data):
2454 2454 # self.name=name
2455 2455 # self.format=format
2456 2456 # self.data=data
2457 2457 # a2=numpy.array([self.data],dtype=numpy.float32)
2458 2458 # self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2459 2459 # return self.col2
2460 2460
2461 2461 def writeHeader(self,):
2462 2462 pass
2463 2463
2464 2464 def writeData(self,name,format,data):
2465 2465 self.name=name
2466 2466 self.format=format
2467 2467 self.data=data
2468 2468 a2=numpy.array([self.data],dtype=numpy.float32)
2469 2469 self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2470 2470 return self.col2
2471 2471
2472 2472 def cFImage(self,n):
2473 2473 self.hdu= pyfits.PrimaryHDU(n)
2474 2474 return self.hdu
2475 2475
2476 2476 def Ctable(self,col1,col2,col3,col4,col5,col6,col7,col8,col9):
2477 2477 self.cols=pyfits.ColDefs( [col1,col2,col3,col4,col5,col6,col7,col8,col9])
2478 2478 self.tbhdu = pyfits.new_table(self.cols)
2479 2479 return self.tbhdu
2480 2480
2481 2481 def CFile(self,hdu,tbhdu):
2482 2482 self.thdulist=pyfits.HDUList([hdu,tbhdu])
2483 2483
2484 2484 def wFile(self,filename):
2485 2485 self.thdulist.writeto(filename) No newline at end of file
@@ -1,444 +1,444
1 1 '''
2 2
3 3 $Author: dsuarez $
4 4 $Id: Processor.py 1 2012-11-12 18:56:07Z dsuarez $
5 5 '''
6 6 import os
7 7 import numpy
8 8 import datetime
9 9 import time
10 10
11 from JROData import *
12 from JRODataIO import *
13 from JROPlot import *
11 from jrodata import *
12 from jrodataIO import *
13 from jroplot import *
14 14
15 15 class ProcessingUnit:
16 16
17 17 """
18 18 Esta es la clase base para el procesamiento de datos.
19 19
20 20 Contiene el metodo "call" para llamar operaciones. Las operaciones pueden ser:
21 21 - Metodos internos (callMethod)
22 22 - Objetos del tipo Operation (callObject). Antes de ser llamados, estos objetos
23 23 tienen que ser agreagados con el metodo "add".
24 24
25 25 """
26 26 # objeto de datos de entrada (Voltage, Spectra o Correlation)
27 27 dataIn = None
28 28
29 29 # objeto de datos de entrada (Voltage, Spectra o Correlation)
30 30 dataOut = None
31 31
32 32
33 33 objectDict = None
34 34
35 35 def __init__(self):
36 36
37 37 self.objectDict = {}
38 38
39 39 def addOperation(self, object, objId):
40 40
41 41 """
42 42 Agrega el objeto "object" a la lista de objetos "self.objectList" y retorna el
43 43 identificador asociado a este objeto.
44 44
45 45 Input:
46 46
47 47 object : objeto de la clase "Operation"
48 48
49 49 Return:
50 50
51 51 objId : identificador del objeto, necesario para ejecutar la operacion
52 52 """
53 53
54 54 self.object[objId] = object
55 55
56 56 return objId
57 57
58 58 def operation(self, **kwargs):
59 59
60 60 """
61 61 Operacion directa sobre la data (dataout.data). Es necesario actualizar los valores de los
62 62 atributos del objeto dataOut
63 63
64 64 Input:
65 65
66 66 **kwargs : Diccionario de argumentos de la funcion a ejecutar
67 67 """
68 68
69 69 if self.dataIn.isEmpty():
70 70 return None
71 71
72 72 raise ValueError, "ImplementedError"
73 73
74 74 def callMethod(self, name, **kwargs):
75 75
76 76 """
77 77 Ejecuta el metodo con el nombre "name" y con argumentos **kwargs de la propia clase.
78 78
79 79 Input:
80 80 name : nombre del metodo a ejecutar
81 81
82 82 **kwargs : diccionario con los nombres y valores de la funcion a ejecutar.
83 83
84 84 """
85 85
86 86 if self.dataIn.isEmpty():
87 87 return None
88 88
89 89 methodToCall = getattr(self, name)
90 90
91 91 methodToCall(**kwargs)
92 92
93 93 def callObject(self, objId, **kwargs):
94 94
95 95 """
96 96 Ejecuta la operacion asociada al identificador del objeto "objId"
97 97
98 98 Input:
99 99
100 100 objId : identificador del objeto a ejecutar
101 101
102 102 **kwargs : diccionario con los nombres y valores de la funcion a ejecutar.
103 103
104 104 Return:
105 105
106 106 None
107 107 """
108 108
109 109 if self.dataIn.isEmpty():
110 110 return None
111 111
112 112 object = self.objectList[objId]
113 113
114 114 object.run(self.dataOut, **kwargs)
115 115
116 116 def call(self, operation, **kwargs):
117 117
118 118 """
119 119 Ejecuta la operacion "operation" con los argumentos "**kwargs". La operacion puede
120 120 ser de dos tipos:
121 121
122 122 1. Un metodo propio de esta clase:
123 123
124 124 operation.type = "self"
125 125
126 126 2. El metodo "run" de un objeto del tipo Operation o de un derivado de ella:
127 127 operation.type = "other".
128 128
129 129 Este objeto de tipo Operation debe de haber sido agregado antes con el metodo:
130 130 "addOperation" e identificado con el operation.id
131 131
132 132
133 133 con el id de la operacion.
134 134 """
135 135 if self.dataIn.isEmpty():
136 136 return None
137 137
138 138 if operation.type == 'self':
139 139 self.callMethod(operation.name, **kwargs)
140 140 return
141 141
142 142 if operation.type == 'other':
143 143 self.callObject(operation.id, **kwargs)
144 144 return
145 145
146 146 class Operation():
147 147
148 148 """
149 149 Clase base para definir las operaciones adicionales que se pueden agregar a la clase ProcessingUnit
150 150 y necesiten acumular informaciΓ³n previa de los datos a procesar. De preferencia usar un buffer de
151 151 acumulacion dentro de esta clase
152 152
153 153 Ejemplo: Integraciones coherentes, necesita la informaciΓ³n previa de los n perfiles anteriores (bufffer)
154 154
155 155 """
156 156
157 157 __buffer = None
158 158 __isConfig = False
159 159
160 160 def __init__(self):
161 161
162 162 pass
163 163
164 164 def run(self, dataIn, **kwargs):
165 165
166 166 """
167 167 Realiza las operaciones necesarias sobre la dataIn.data y actualiza los atributos del objeto dataIn.
168 168
169 169 Input:
170 170
171 171 dataIn : objeto del tipo JROData
172 172
173 173 Return:
174 174
175 175 None
176 176
177 177 Affected:
178 178 __buffer : buffer de recepcion de datos.
179 179
180 180 """
181 181
182 182 raise ValueError, "ImplementedError"
183 183
184 184 class VoltageProc(ProcessingUnit):
185 185
186 186
187 187 def __init__(self):
188 188
189 189 pass
190 190
191 191 def setup(self, dataInObj=None, dataOutObj=None):
192 192
193 193 self.dataInObj = dataInObj
194 194
195 195 if self.dataOutObj == None:
196 196 dataOutObj = Voltage()
197 197
198 198 self.dataOutObj = dataOutObj
199 199
200 200 return self.dataOutObj
201 201
202 202 def init(self):
203 203
204 204 if self.dataInObj.isEmpty():
205 205 return 0
206 206
207 207 self.dataOutObj.copy(self.dataInObj)
208 208 # No necesita copiar en cada init() los atributos de dataInObj
209 209 # la copia deberia hacerse por cada nuevo bloque de datos
210 210
211 211 def selectChannels(self, channelList):
212 212
213 213 if self.dataInObj.isEmpty():
214 214 return 0
215 215
216 216 self.selectChannelsByIndex(channelList)
217 217
218 218 def selectChannelsByIndex(self, channelIndexList):
219 219 """
220 220 Selecciona un bloque de datos en base a canales segun el channelIndexList
221 221
222 222 Input:
223 223 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
224 224
225 225 Affected:
226 226 self.dataOutObj.data
227 227 self.dataOutObj.channelIndexList
228 228 self.dataOutObj.nChannels
229 229 self.dataOutObj.m_ProcessingHeader.totalSpectra
230 230 self.dataOutObj.systemHeaderObj.numChannels
231 231 self.dataOutObj.m_ProcessingHeader.blockSize
232 232
233 233 Return:
234 234 None
235 235 """
236 236
237 237 for channel in channelIndexList:
238 238 if channel not in self.dataOutObj.channelIndexList:
239 239 raise ValueError, "The value %d in channelIndexList is not valid" %channel
240 240
241 241 nChannels = len(channelIndexList)
242 242
243 243 data = self.dataOutObj.data[channelIndexList,:]
244 244
245 245 self.dataOutObj.data = data
246 246 self.dataOutObj.channelIndexList = channelIndexList
247 247 self.dataOutObj.channelList = [self.dataOutObj.channelList[i] for i in channelIndexList]
248 248 self.dataOutObj.nChannels = nChannels
249 249
250 250 return 1
251 251
252 252 class CohInt(Operation):
253 253
254 254 __profIndex = 0
255 255 __withOverapping = False
256 256
257 257 __byTime = False
258 258 __initime = None
259 259 __lastdatatime = None
260 260 __integrationtime = None
261 261
262 262 __buffer = None
263 263
264 264 __dataReady = False
265 265
266 266 nCohInt = None
267 267
268 268
269 269 def __init__(self):
270 270
271 271 self.__isConfig = False
272 272
273 273 def setup(self, nCohInt=None, timeInterval=None, overlapping=False):
274 274 """
275 275 Set the parameters of the integration class.
276 276
277 277 Inputs:
278 278
279 279 nCohInt : Number of coherent integrations
280 280 timeInterval : Time of integration. If the parameter "nCohInt" is selected this one does not work
281 281 overlapping :
282 282
283 283 """
284 284
285 285 self.__initime = None
286 286 self.__lastdatatime = 0
287 287 self.__buffer = None
288 288 self.__dataReady = False
289 289
290 290
291 291 if nCohInt == None and timeInterval == None:
292 292 raise ValueError, "nCohInt or timeInterval should be specified ..."
293 293
294 294 if nCohInt != None:
295 295 self.nCohInt = nCohInt
296 296 self.__byTime = False
297 297 else:
298 298 self.__integrationtime = timeInterval * 60. #if (type(timeInterval)!=integer) -> change this line
299 299 self.nCohInt = 9999
300 300 self.__byTime = True
301 301
302 302 if overlapping:
303 303 self.__withOverapping = True
304 304 self.__buffer = None
305 305 else:
306 306 self.__withOverapping = False
307 307 self.__buffer = 0
308 308
309 309 self.__profIndex = 0
310 310
311 311 def putData(self, data):
312 312
313 313 """
314 314 Add a profile to the __buffer and increase in one the __profileIndex
315 315
316 316 """
317 317
318 318 if not self.__withOverapping:
319 319 self.__buffer += data
320 320 self.__profIndex += 1
321 321 return
322 322
323 323 #Overlapping data
324 324 nChannels, nHeis = data.shape
325 325 data = numpy.reshape(data, (1, nChannels, nHeis))
326 326
327 327 #If the buffer is empty then it takes the data value
328 328 if self.__buffer == None:
329 329 self.__buffer = data
330 330 self.__profIndex += 1
331 331 return
332 332
333 333 #If the buffer length is lower than nCohInt then stakcing the data value
334 334 if self.__profIndex < self.nCohInt:
335 335 self.__buffer = numpy.vstack((self.__buffer, data))
336 336 self.__profIndex += 1
337 337 return
338 338
339 339 #If the buffer length is equal to nCohInt then replacing the last buffer value with the data value
340 340 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
341 341 self.__buffer[self.nCohInt-1] = data
342 342 self.__profIndex = self.nCohInt
343 343 return
344 344
345 345
346 346 def pushData(self):
347 347 """
348 348 Return the sum of the last profiles and the profiles used in the sum.
349 349
350 350 Affected:
351 351
352 352 self.__profileIndex
353 353
354 354 """
355 355
356 356 if not self.__withOverapping:
357 357 data = self.__buffer
358 358 nCohInt = self.__profIndex
359 359
360 360 self.__buffer = 0
361 361 self.__profIndex = 0
362 362
363 363 return data, nCohInt
364 364
365 365 #Integration with Overlapping
366 366 data = numpy.sum(self.__buffer, axis=0)
367 367 nCohInt = self.__profIndex
368 368
369 369 return data, nCohInt
370 370
371 371 def byProfiles(self, data):
372 372
373 373 self.__dataReady = False
374 374 avg_data = None
375 375 nCohInt = None
376 376
377 377 self.putData(data)
378 378
379 379 if self.__profIndex == self.nCohInt:
380 380
381 381 avgdata, nCohInt = self.pushData()
382 382 self.__dataReady = True
383 383
384 384 return avgdata, nCohInt
385 385
386 386 def byTime(self, data, datatime):
387 387
388 388 self.__dataReady = False
389 389 avg_data = None
390 390 nCohInt = None
391 391
392 392 self.putData(data)
393 393
394 394 if (datatime - self.__initime) >= self.__integrationtime:
395 395 avgdata, nCohInt = self.pushData()
396 396 self.nCohInt = nCohInt
397 397 self.__dataReady = True
398 398
399 399 return avgdata, nCohInt
400 400
401 401 def integrate(self, data, datatime=None):
402 402
403 403 if self.__initime == None:
404 404 self.__initime = datatime
405 405
406 406 if self.__byTime:
407 407 avgdata = self.byTime(data, datatime)
408 408 else:
409 409 avgdata = self.byProfiles(data)
410 410
411 411
412 412 self.__lastdatatime = datatime
413 413
414 414 if avgdata == None:
415 415 return None
416 416
417 417 avgdatatime = self.__initime
418 418
419 419 deltatime = datatime -self.__lastdatatime
420 420
421 421 if not self.__withOverapping:
422 422 self.__initime = datatime
423 423 else:
424 424 self.__initime += deltatime
425 425
426 426 return avgdata, avgdatatime
427 427
428 428 def run(self, dataOut, nCohInt=None, timeInterval=None, overlapping=False):
429 429
430 430 if not self.__isConfig:
431 431 self.setup(nCohInt, timeInterval, overlapping)
432 432 self.__isConfig = True
433 433
434 434 avgdata, avgdatatime = self.integrate(dataOut.data, dataOut.utctime)
435 435
436 436 # self.dataOutObj.timeInterval *= nCohInt
437 437 self.dataOutObj.flagNoData = True
438 438
439 439 if self.__dataReady:
440 440 dataOutObj.data = avgdata
441 441 dataOutObj.timeInterval *= self.nCohInt
442 442 dataOutObj.nCohInt *= self.nCohInt
443 443 dataOutObj.utctime = avgdatatime
444 444 dataOutObj.flagNoData = False No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now