##// END OF EJS Templates
En graphics:...
Daniel Valdez -
r192:4eb49bc6aa53
parent child
Show More
@@ -1,61 +1,81
1 1 import mpldriver
2 2
3 3 class Figure:
4 4 axesList = None
5 width = None
6 height = None
5 7 def __init__(self):
6 8 pass
7 9
8 10 def init(self, idfigure, wintitle, width, height, nplots):
9 11 self.idfigure = idfigure
10 12 self.wintitle = wintitle
11 13 self.width = width
12 14 self.height = height
13 15 self.nplots = nplots
14 16 mpldriver.init(idfigure, wintitle, width, height)
15 17
16 18 self.axesList = []
17 19
18 20 def setTitle(self, title):
19 21 mpldriver.setTitle(self.idfigure, title)
20 22
21 23 def setTextFromAxes(self, title):
22 24 mpldriver.setTextFromAxes(self.idfigure, self.axesList[0].ax, title)
23 25
24 26 def makeAxes(self, nrow, ncol, xpos, ypos, colspan, rowspan):
25 27 ax = mpldriver.makeAxes(self.idfigure, nrow, ncol, xpos, ypos, colspan, rowspan)
26 28 axesObj = Axes(ax)
27 29 self.axesList.append(axesObj)
28 30
29 31 def draw(self):
30 32 mpldriver.draw(self.idfigure)
31 33
32 34 def run(self):
33 35 pass
34 36
35 37
36 38 class Axes:
37 39 firsttime = None
38 40 ax = None
41 mesh = None
39 42
40 43 def __init__(self, ax):
41 44 self.firsttime = True
42 45 self.ax = ax
46 self.mesh = None
43 47
44 48 def pline(self, x, y, xmin, xmax, ymin, ymax, xlabel, ylabel, title):
45 49
46 50 mpldriver.pline(ax=self.ax,
47 51 x=x,
48 52 y=y,
49 53 xmin=xmin,
50 54 xmax=xmax,
51 55 ymin=ymin,
52 56 ymax=ymax,
53 57 xlabel=xlabel,
54 58 ylabel=ylabel,
55 59 title=title,
56 60 firsttime=self.firsttime)
57 61
58 62 self.firsttime = False
59 63
60 def pcolor(self):
61 pass
64 def pcolor(self, x, y, z, xmin, xmax, ymin, ymax, zmin, zmax, xlabel, ylabel, title):
65 meshfromaxes=mpldriver.pcolor(ax=self.ax,
66 x=x,
67 y=y,
68 z=z,
69 xmin=xmin,
70 xmax=xmax,
71 ymin=ymin,
72 ymax=ymax,
73 zmin=zmin,
74 zmax=zmax,
75 xlabel=xlabel,
76 ylabel=ylabel,
77 title=title,
78 firsttime=self.firsttime,
79 mesh=self.mesh)
80 self.mesh = meshfromaxes
81 self.firsttime = False
@@ -1,50 +1,75
1 1 import matplotlib
2 2 matplotlib.use("TKAgg")
3 3 import matplotlib.pyplot
4 4 import scitools.numpyutils
5 from mpl_toolkits.axes_grid1 import make_axes_locatable
5 6
6 7 def init(idfigure, wintitle, width, height):
7 8 matplotlib.pyplot.ioff()
8 9 fig = matplotlib.pyplot.matplotlib.pyplot.figure(num=idfigure, facecolor="w")
9 10 fig.canvas.manager.set_window_title(wintitle)
10 11 fig.canvas.manager.resize(width,height)
11 12 matplotlib.pyplot.ion()
12 13
13 14 def setTextFromAxes(idfigure, ax, title):
14 15 fig = matplotlib.pyplot.figure(idfigure)
15 16 ax.annotate(title, xy=(.1, .99),
16 17 xycoords='figure fraction',
17 18 horizontalalignment='left', verticalalignment='top',
18 19 fontsize=10)
19 20
20 21 def setTitle(idfigure, title):
21 22 fig = matplotlib.pyplot.figure(idfigure)
22 23 fig.suptitle(title)
23 24
24 25 def makeAxes(idfigure, nrow, ncol, xpos, ypos, colspan, rowspan):
25 26 fig = matplotlib.pyplot.figure(idfigure)
26 27 ax = matplotlib.pyplot.subplot2grid((nrow, ncol), (xpos, ypos), colspan=colspan, rowspan=rowspan)
27 28 return ax
28 29
29 30 def pline(ax, x, y, xmin, xmax, ymin, ymax, xlabel, ylabel, title, firsttime):
30 31 if firsttime:
31 32 ax.plot(x, y)
32 33 ax.set_xlim([xmin,xmax])
33 34 ax.set_ylim([ymin,ymax])
34 35 ax.set_xlabel(xlabel, size=8)
35 36 ax.set_ylabel(ylabel, size=8)
36 37 ax.set_title(title, size=10)
37 38 matplotlib.pyplot.tight_layout()
38 39 else:
39 40 ax.lines[0].set_data(x,y)
40 41
41 42 def draw(idfigure):
42 43 fig = matplotlib.pyplot.figure(idfigure)
43 44 fig.canvas.draw()
44 45
45 def pcolor():
46 pass
46 def pcolor(ax, x, y, z, xmin, xmax, ymin, ymax, zmin, zmax, xlabel, ylabel, title, firsttime, mesh):
47 if firsttime:
48 divider = make_axes_locatable(ax)
49 ax_cb = divider.new_horizontal(size="5%", pad=0.05)
50 fig1 = ax.get_figure()
51 fig1.add_axes(ax_cb)
52
53 ax.set_xlim([xmin,xmax])
54 ax.set_ylim([ymin,ymax])
55 ax.set_xlabel(xlabel)
56 ax.set_ylabel(ylabel)
57 ax.set_title(title)
58
59 imesh=ax.pcolormesh(x,y,z,vmin=zmin,vmax=zmax)
60 matplotlib.pyplot.colorbar(imesh, cax=ax_cb)
61 ax_cb.yaxis.tick_right()
62 for tl in ax_cb.get_yticklabels():
63 tl.set_visible(True)
64 ax_cb.yaxis.tick_right()
65 matplotlib.pyplot.tight_layout()
66 return imesh
67 else:
68 tmp = z[0:-1,0:-1]
69 mesh.set_array(tmp.ravel())
70
71 return mesh
47 72
48 73
49 74
50 75 No newline at end of file
@@ -1,2473 +1,2473
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JRODataIO.py 169 2012-11-19 21:57:03Z murco $
5 5 '''
6 6
7 7 import os, sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import time, datetime
13 13
14 14 from jrodata import *
15 15 from jroheaderIO import *
16 16
17 17 def isNumber(str):
18 18 """
19 19 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
20 20
21 21 Excepciones:
22 22 Si un determinado string no puede ser convertido a numero
23 23 Input:
24 24 str, string al cual se le analiza para determinar si convertible a un numero o no
25 25
26 26 Return:
27 27 True : si el string es uno numerico
28 28 False : no es un string numerico
29 29 """
30 30 try:
31 31 float( str )
32 32 return True
33 33 except:
34 34 return False
35 35
36 36 def isThisFileinRange(filename, startUTSeconds, endUTSeconds):
37 37 """
38 38 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
39 39
40 40 Inputs:
41 41 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
42 42
43 43 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
44 44 segundos contados desde 01/01/1970.
45 45 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
46 46 segundos contados desde 01/01/1970.
47 47
48 48 Return:
49 49 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
50 50 fecha especificado, de lo contrario retorna False.
51 51
52 52 Excepciones:
53 53 Si el archivo no existe o no puede ser abierto
54 54 Si la cabecera no puede ser leida.
55 55
56 56 """
57 57 basicHeaderObj = BasicHeader()
58 58
59 59 try:
60 60 fp = open(filename,'rb')
61 61 except:
62 62 raise IOError, "The file %s can't be opened" %(filename)
63 63
64 64 sts = basicHeaderObj.read(fp)
65 65 fp.close()
66 66
67 67 if not(sts):
68 68 print "Skipping the file %s because it has not a valid header" %(filename)
69 69 return 0
70 70
71 71 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
72 72 return 0
73 73
74 74 return 1
75 75
76 76 def getlastFileFromPath(path, ext):
77 77 """
78 78 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
79 79 al final de la depuracion devuelve el ultimo file de la lista que quedo.
80 80
81 81 Input:
82 82 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
83 83 ext : extension de los files contenidos en una carpeta
84 84
85 85 Return:
86 86 El ultimo file de una determinada carpeta, no se considera el path.
87 87 """
88 88 validFilelist = []
89 89 fileList = os.listdir(path)
90 90
91 91 # 0 1234 567 89A BCDE
92 92 # H YYYY DDD SSS .ext
93 93
94 94 for file in fileList:
95 95 try:
96 96 year = int(file[1:5])
97 97 doy = int(file[5:8])
98 98
99 99 if (os.path.splitext(file)[-1].upper() != ext.upper()) : continue
100 100 except:
101 101 continue
102 102
103 103 validFilelist.append(file)
104 104
105 105 if validFilelist:
106 106 validFilelist = sorted( validFilelist, key=str.lower )
107 107 return validFilelist[-1]
108 108
109 109 return None
110 110
111 111 def checkForRealPath(path, year, doy, set, ext):
112 112 """
113 113 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
114 114 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
115 115 el path exacto de un determinado file.
116 116
117 117 Example :
118 118 nombre correcto del file es .../.../D2009307/P2009307367.ext
119 119
120 120 Entonces la funcion prueba con las siguientes combinaciones
121 121 .../.../x2009307/y2009307367.ext
122 122 .../.../x2009307/Y2009307367.ext
123 123 .../.../X2009307/y2009307367.ext
124 124 .../.../X2009307/Y2009307367.ext
125 125 siendo para este caso, la ultima combinacion de letras, identica al file buscado
126 126
127 127 Return:
128 128 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
129 129 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
130 130 para el filename
131 131 """
132 132 filepath = None
133 133 find_flag = False
134 134 filename = None
135 135
136 136 if ext.lower() == ".r": #voltage
137 137 header1 = "dD"
138 138 header2 = "dD"
139 139 elif ext.lower() == ".pdata": #spectra
140 140 header1 = "dD"
141 141 header2 = "pP"
142 142 else:
143 143 return None, filename
144 144
145 145 for dir in header1: #barrido por las dos combinaciones posibles de "D"
146 146 for fil in header2: #barrido por las dos combinaciones posibles de "D"
147 147 doypath = "%s%04d%03d" % ( dir, year, doy ) #formo el nombre del directorio xYYYYDDD (x=d o x=D)
148 148 filename = "%s%04d%03d%03d%s" % ( fil, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
149 149 filepath = os.path.join( path, doypath, filename ) #formo el path completo
150 150 if os.path.exists( filepath ): #verifico que exista
151 151 find_flag = True
152 152 break
153 153 if find_flag:
154 154 break
155 155
156 156 if not(find_flag):
157 157 return None, filename
158 158
159 159 return filepath, filename
160 160
161 161 class JRODataIO:
162 162
163 163 c = 3E8
164 164
165 165 isConfig = False
166 166
167 167 basicHeaderObj = BasicHeader()
168 168
169 169 systemHeaderObj = SystemHeader()
170 170
171 171 radarControllerHeaderObj = RadarControllerHeader()
172 172
173 173 processingHeaderObj = ProcessingHeader()
174 174
175 175 online = 0
176 176
177 177 dtype = None
178 178
179 179 pathList = []
180 180
181 181 filenameList = []
182 182
183 183 filename = None
184 184
185 185 ext = None
186 186
187 187 flagNoMoreFiles = 0
188 188
189 189 flagIsNewFile = 1
190 190
191 191 flagTimeBlock = 0
192 192
193 193 flagIsNewBlock = 0
194 194
195 195 fp = None
196 196
197 197 firstHeaderSize = 0
198 198
199 199 basicHeaderSize = 24
200 200
201 201 versionFile = 1103
202 202
203 203 fileSize = None
204 204
205 205 ippSeconds = None
206 206
207 207 fileSizeByHeader = None
208 208
209 209 fileIndex = None
210 210
211 211 profileIndex = None
212 212
213 213 blockIndex = None
214 214
215 215 nTotalBlocks = None
216 216
217 217 maxTimeStep = 30
218 218
219 219 lastUTTime = None
220 220
221 221 datablock = None
222 222
223 223 dataOut = None
224 224
225 225 blocksize = None
226 226
227 227 def __init__(self):
228 228
229 229 raise ValueError, "Not implemented"
230 230
231 231 def run(self):
232 232
233 233 raise ValueError, "Not implemented"
234 234
235 def getOuput(self):
235 def getOutput(self):
236 236
237 237 return self.dataOut
238 238
239 239 class JRODataReader(JRODataIO):
240 240
241 241 nReadBlocks = 0
242 242
243 243 delay = 60 #number of seconds waiting a new file
244 244
245 245 nTries = 3 #quantity tries
246 246
247 247 nFiles = 3 #number of files for searching
248 248
249 249
250 250 def __init__(self):
251 251
252 252 """
253 253
254 254 """
255 255
256 256 raise ValueError, "This method has not been implemented"
257 257
258 258
259 259 def createObjByDefault(self):
260 260 """
261 261
262 262 """
263 263 raise ValueError, "This method has not been implemented"
264 264
265 265 def getBlockDimension(self):
266 266
267 267 raise ValueError, "No implemented"
268 268
269 269 def __searchFilesOffLine(self,
270 270 path,
271 271 startDate,
272 272 endDate,
273 273 startTime=datetime.time(0,0,0),
274 274 endTime=datetime.time(23,59,59),
275 275 set=None,
276 276 expLabel="",
277 277 ext=".r"):
278 278 dirList = []
279 279 for thisPath in os.listdir(path):
280 280 if os.path.isdir(os.path.join(path,thisPath)):
281 281 dirList.append(thisPath)
282 282
283 283 if not(dirList):
284 284 return None, None
285 285
286 286 pathList = []
287 287 dateList = []
288 288
289 289 thisDate = startDate
290 290
291 291 while(thisDate <= endDate):
292 292 year = thisDate.timetuple().tm_year
293 293 doy = thisDate.timetuple().tm_yday
294 294
295 295 match = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy))
296 296 if len(match) == 0:
297 297 thisDate += datetime.timedelta(1)
298 298 continue
299 299
300 300 pathList.append(os.path.join(path,match[0],expLabel))
301 301 dateList.append(thisDate)
302 302 thisDate += datetime.timedelta(1)
303 303
304 304 filenameList = []
305 305 for index in range(len(pathList)):
306 306
307 307 thisPath = pathList[index]
308 308 fileList = glob.glob1(thisPath, "*%s" %ext)
309 309 fileList.sort()
310 310
311 311 #Busqueda de datos en el rango de horas indicados
312 312 thisDate = dateList[index]
313 313 startDT = datetime.datetime.combine(thisDate, startTime)
314 314 endDT = datetime.datetime.combine(thisDate, endTime)
315 315
316 316 startUtSeconds = time.mktime(startDT.timetuple())
317 317 endUtSeconds = time.mktime(endDT.timetuple())
318 318
319 319 for file in fileList:
320 320
321 321 filename = os.path.join(thisPath,file)
322 322
323 323 if isThisFileinRange(filename, startUtSeconds, endUtSeconds):
324 324 filenameList.append(filename)
325 325
326 326 if not(filenameList):
327 327 return None, None
328 328
329 329 self.filenameList = filenameList
330 330
331 331 return pathList, filenameList
332 332
333 333 def __searchFilesOnLine(self, path, startDate=None, endDate=None, startTime=None, endTime=None, expLabel = "", ext = None):
334 334
335 335 """
336 336 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
337 337 devuelve el archivo encontrado ademas de otros datos.
338 338
339 339 Input:
340 340 path : carpeta donde estan contenidos los files que contiene data
341 341
342 342 startDate : Fecha inicial. Rechaza todos los directorios donde
343 343 file end time < startDate (obejto datetime.date)
344 344
345 345 endDate : Fecha final. Rechaza todos los directorios donde
346 346 file start time > endDate (obejto datetime.date)
347 347
348 348 startTime : Tiempo inicial. Rechaza todos los archivos donde
349 349 file end time < startTime (obejto datetime.time)
350 350
351 351 endTime : Tiempo final. Rechaza todos los archivos donde
352 352 file start time > endTime (obejto datetime.time)
353 353
354 354 expLabel : Nombre del subexperimento (subfolder)
355 355
356 356 ext : extension de los files
357 357
358 358 Return:
359 359 directory : eL directorio donde esta el file encontrado
360 360 filename : el ultimo file de una determinada carpeta
361 361 year : el anho
362 362 doy : el numero de dia del anho
363 363 set : el set del archivo
364 364
365 365
366 366 """
367 367 dirList = []
368 368 pathList = []
369 369 directory = None
370 370
371 371 #Filtra solo los directorios
372 372 for thisPath in os.listdir(path):
373 373 if os.path.isdir(os.path.join(path, thisPath)):
374 374 dirList.append(thisPath)
375 375
376 376 if not(dirList):
377 377 return None, None, None, None, None
378 378
379 379 dirList = sorted( dirList, key=str.lower )
380 380
381 381 if startDate:
382 382 startDateTime = datetime.datetime.combine(startDate, startTime)
383 383 thisDateTime = startDateTime
384 384 if endDate == None: endDateTime = startDateTime
385 385 else: endDateTime = datetime.datetime.combine(endDate, endTime)
386 386
387 387 while(thisDateTime <= endDateTime):
388 388 year = thisDateTime.timetuple().tm_year
389 389 doy = thisDateTime.timetuple().tm_yday
390 390
391 391 match = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy))
392 392 if len(match) == 0:
393 393 thisDateTime += datetime.timedelta(1)
394 394 continue
395 395
396 396 pathList.append(os.path.join(path,match[0], expLabel))
397 397 thisDateTime += datetime.timedelta(1)
398 398
399 399 if not(pathList):
400 400 print "\tNo files in range: %s - %s" %(startDateTime.ctime(), endDateTime.ctime())
401 401 return None, None, None, None, None
402 402
403 403 directory = pathList[0]
404 404
405 405 else:
406 406 directory = dirList[-1]
407 407 directory = os.path.join(path,directory)
408 408
409 409 filename = getlastFileFromPath(directory, ext)
410 410
411 411 if not(filename):
412 412 return None, None, None, None, None
413 413
414 414 if not(self.__verifyFile(os.path.join(directory, filename))):
415 415 return None, None, None, None, None
416 416
417 417 year = int( filename[1:5] )
418 418 doy = int( filename[5:8] )
419 419 set = int( filename[8:11] )
420 420
421 421 return directory, filename, year, doy, set
422 422
423 423 def setup(self,
424 424 path=None,
425 425 startDate=None,
426 426 endDate=None,
427 427 startTime=datetime.time(0,0,0),
428 428 endTime=datetime.time(23,59,59),
429 429 set=0,
430 430 expLabel = "",
431 431 ext = None,
432 432 online = False,
433 433 delay = 60):
434 434
435 435 if path == None:
436 436 raise ValueError, "The path is not valid"
437 437
438 438 if ext == None:
439 439 ext = self.ext
440 440
441 441 if online:
442 442 print "Searching files in online mode..."
443 443 doypath, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext)
444 444
445 445 if not(doypath):
446 446 for nTries in range( self.nTries ):
447 447 print '\tWaiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
448 448 time.sleep( self.delay )
449 449 doypath, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=exp)
450 450 if doypath:
451 451 break
452 452
453 453 if not(doypath):
454 454 print "There 'isn't valied files in %s" % path
455 455 return None
456 456
457 457 self.year = year
458 458 self.doy = doy
459 459 self.set = set - 1
460 460 self.path = path
461 461
462 462 else:
463 463 print "Searching files in offline mode ..."
464 464 pathList, filenameList = self.__searchFilesOffLine(path, startDate, endDate, startTime, endTime, set, expLabel, ext)
465 465
466 466 if not(pathList):
467 467 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
468 468 datetime.datetime.combine(startDate,startTime).ctime(),
469 469 datetime.datetime.combine(endDate,endTime).ctime())
470 470
471 471 sys.exit(-1)
472 472
473 473
474 474 self.fileIndex = -1
475 475 self.pathList = pathList
476 476 self.filenameList = filenameList
477 477
478 478 self.online = online
479 479 self.delay = delay
480 480 ext = ext.lower()
481 481 self.ext = ext
482 482
483 483 if not(self.setNextFile()):
484 484 if (startDate!=None) and (endDate!=None):
485 485 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
486 486 elif startDate != None:
487 487 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
488 488 else:
489 489 print "No files"
490 490
491 491 sys.exit(-1)
492 492
493 493 # self.updateDataHeader()
494 494
495 495 return self.dataOut
496 496
497 497 def __setNextFileOffline(self):
498 498
499 499 idFile = self.fileIndex
500 500
501 501 while (True):
502 502 idFile += 1
503 503 if not(idFile < len(self.filenameList)):
504 504 self.flagNoMoreFiles = 1
505 505 print "No more Files"
506 506 return 0
507 507
508 508 filename = self.filenameList[idFile]
509 509
510 510 if not(self.__verifyFile(filename)):
511 511 continue
512 512
513 513 fileSize = os.path.getsize(filename)
514 514 fp = open(filename,'rb')
515 515 break
516 516
517 517 self.flagIsNewFile = 1
518 518 self.fileIndex = idFile
519 519 self.filename = filename
520 520 self.fileSize = fileSize
521 521 self.fp = fp
522 522
523 523 print "Setting the file: %s"%self.filename
524 524
525 525 return 1
526 526
527 527 def __setNextFileOnline(self):
528 528 """
529 529 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
530 530 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
531 531 siguientes.
532 532
533 533 Affected:
534 534 self.flagIsNewFile
535 535 self.filename
536 536 self.fileSize
537 537 self.fp
538 538 self.set
539 539 self.flagNoMoreFiles
540 540
541 541 Return:
542 542 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
543 543 1 : si el file fue abierto con exito y esta listo a ser leido
544 544
545 545 Excepciones:
546 546 Si un determinado file no puede ser abierto
547 547 """
548 548 nFiles = 0
549 549 fileOk_flag = False
550 550 firstTime_flag = True
551 551
552 552 self.set += 1
553 553
554 554 #busca el 1er file disponible
555 555 file, filename = checkForRealPath( self.path, self.year, self.doy, self.set, self.ext )
556 556 if file:
557 557 if self.__verifyFile(file, False):
558 558 fileOk_flag = True
559 559
560 560 #si no encuentra un file entonces espera y vuelve a buscar
561 561 if not(fileOk_flag):
562 562 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
563 563
564 564 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
565 565 tries = self.nTries
566 566 else:
567 567 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
568 568
569 569 for nTries in range( tries ):
570 570 if firstTime_flag:
571 571 print "\tWaiting %0.2f sec for the file \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
572 572 time.sleep( self.delay )
573 573 else:
574 574 print "\tSearching next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
575 575
576 576 file, filename = checkForRealPath( self.path, self.year, self.doy, self.set, self.ext )
577 577 if file:
578 578 if self.__verifyFile(file):
579 579 fileOk_flag = True
580 580 break
581 581
582 582 if fileOk_flag:
583 583 break
584 584
585 585 firstTime_flag = False
586 586
587 587 print "\tSkipping the file \"%s\" due to this file doesn't exist" % filename
588 588 self.set += 1
589 589
590 590 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
591 591 self.set = 0
592 592 self.doy += 1
593 593
594 594 if fileOk_flag:
595 595 self.fileSize = os.path.getsize( file )
596 596 self.filename = file
597 597 self.flagIsNewFile = 1
598 598 if self.fp != None: self.fp.close()
599 599 self.fp = open(file)
600 600 self.flagNoMoreFiles = 0
601 601 print 'Setting the file: %s' % file
602 602 else:
603 603 self.fileSize = 0
604 604 self.filename = None
605 605 self.flagIsNewFile = 0
606 606 self.fp = None
607 607 self.flagNoMoreFiles = 1
608 608 print 'No more Files'
609 609
610 610 return fileOk_flag
611 611
612 612
613 613 def setNextFile(self):
614 614 if self.fp != None:
615 615 self.fp.close()
616 616
617 617 if self.online:
618 618 newFile = self.__setNextFileOnline()
619 619 else:
620 620 newFile = self.__setNextFileOffline()
621 621
622 622 if not(newFile):
623 623 return 0
624 624
625 625 self.__readFirstHeader()
626 626 self.nReadBlocks = 0
627 627 return 1
628 628
629 629 def __setNewBlock(self):
630 630 if self.fp == None:
631 631 return 0
632 632
633 633 if self.flagIsNewFile:
634 634 return 1
635 635
636 636 self.lastUTTime = self.basicHeaderObj.utc
637 637 currentSize = self.fileSize - self.fp.tell()
638 638 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
639 639
640 640 if (currentSize >= neededSize):
641 641 self.__rdBasicHeader()
642 642 return 1
643 643
644 644 if not(self.setNextFile()):
645 645 return 0
646 646
647 647 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
648 648
649 649 self.flagTimeBlock = 0
650 650
651 651 if deltaTime > self.maxTimeStep:
652 652 self.flagTimeBlock = 1
653 653
654 654 return 1
655 655
656 656
657 657 def readNextBlock(self):
658 658 if not(self.__setNewBlock()):
659 659 return 0
660 660
661 661 if not(self.readBlock()):
662 662 return 0
663 663
664 664 return 1
665 665
666 666 def __rdProcessingHeader(self, fp=None):
667 667 if fp == None:
668 668 fp = self.fp
669 669
670 670 self.processingHeaderObj.read(fp)
671 671
672 672 def __rdRadarControllerHeader(self, fp=None):
673 673 if fp == None:
674 674 fp = self.fp
675 675
676 676 self.radarControllerHeaderObj.read(fp)
677 677
678 678 def __rdSystemHeader(self, fp=None):
679 679 if fp == None:
680 680 fp = self.fp
681 681
682 682 self.systemHeaderObj.read(fp)
683 683
684 684 def __rdBasicHeader(self, fp=None):
685 685 if fp == None:
686 686 fp = self.fp
687 687
688 688 self.basicHeaderObj.read(fp)
689 689
690 690
691 691 def __readFirstHeader(self):
692 692 self.__rdBasicHeader()
693 693 self.__rdSystemHeader()
694 694 self.__rdRadarControllerHeader()
695 695 self.__rdProcessingHeader()
696 696
697 697 self.firstHeaderSize = self.basicHeaderObj.size
698 698
699 699 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
700 700 if datatype == 0:
701 701 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
702 702 elif datatype == 1:
703 703 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
704 704 elif datatype == 2:
705 705 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
706 706 elif datatype == 3:
707 707 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
708 708 elif datatype == 4:
709 709 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
710 710 elif datatype == 5:
711 711 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
712 712 else:
713 713 raise ValueError, 'Data type was not defined'
714 714
715 715 self.dtype = datatype_str
716 716 self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
717 717 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
718 718 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
719 719 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
720 720 self.getBlockDimension()
721 721
722 722
723 723 def __verifyFile(self, filename, msgFlag=True):
724 724 msg = None
725 725 try:
726 726 fp = open(filename, 'rb')
727 727 currentPosition = fp.tell()
728 728 except:
729 729 if msgFlag:
730 730 print "The file %s can't be opened" % (filename)
731 731 return False
732 732
733 733 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
734 734
735 735 if neededSize == 0:
736 736 basicHeaderObj = BasicHeader()
737 737 systemHeaderObj = SystemHeader()
738 738 radarControllerHeaderObj = RadarControllerHeader()
739 739 processingHeaderObj = ProcessingHeader()
740 740
741 741 try:
742 742 if not( basicHeaderObj.read(fp) ): raise ValueError
743 743 if not( systemHeaderObj.read(fp) ): raise ValueError
744 744 if not( radarControllerHeaderObj.read(fp) ): raise ValueError
745 745 if not( processingHeaderObj.read(fp) ): raise ValueError
746 746 data_type = int(numpy.log2((processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
747 747
748 748 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
749 749
750 750 except:
751 751 if msgFlag:
752 752 print "\tThe file %s is empty or it hasn't enough data" % filename
753 753
754 754 fp.close()
755 755 return False
756 756 else:
757 757 msg = "\tSkipping the file %s due to it hasn't enough data" %filename
758 758
759 759 fp.close()
760 760 fileSize = os.path.getsize(filename)
761 761 currentSize = fileSize - currentPosition
762 762 if currentSize < neededSize:
763 763 if msgFlag and (msg != None):
764 764 print msg #print"\tSkipping the file %s due to it hasn't enough data" %filename
765 765 return False
766 766
767 767 return True
768 768
769 769 def getData():
770 770 pass
771 771
772 772 def hasNotDataInBuffer():
773 773 pass
774 774
775 775 def readBlock():
776 776 pass
777 777
778 778 def run(self, **kwargs):
779 779
780 780 if not(self.isConfig):
781 781
782 782 # self.dataOut = dataOut
783 783 self.setup(**kwargs)
784 784 self.isConfig = True
785 785
786 786 self.getData()
787 787
788 788 class JRODataWriter(JRODataIO):
789 789
790 790 """
791 791 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
792 792 de los datos siempre se realiza por bloques.
793 793 """
794 794
795 795 blockIndex = 0
796 796
797 797 path = None
798 798
799 799 setFile = None
800 800
801 801 profilesPerBlock = None
802 802
803 803 blocksPerFile = None
804 804
805 805 nWriteBlocks = 0
806 806
807 807 def __init__(self, dataOut=None):
808 808 raise ValueError, "Not implemented"
809 809
810 810
811 811 def hasAllDataInBuffer(self):
812 812 raise ValueError, "Not implemented"
813 813
814 814
815 815 def setBlockDimension(self):
816 816 raise ValueError, "Not implemented"
817 817
818 818
819 819 def writeBlock(self):
820 820 raise ValueError, "No implemented"
821 821
822 822
823 823 def putData(self):
824 824 raise ValueError, "No implemented"
825 825
826 826 def getDataHeader(self):
827 827 """
828 828 Obtiene una copia del First Header
829 829
830 830 Affected:
831 831
832 832 self.basicHeaderObj
833 833 self.systemHeaderObj
834 834 self.radarControllerHeaderObj
835 835 self.processingHeaderObj self.
836 836
837 837 Return:
838 838 None
839 839 """
840 840
841 841 raise ValueError, "No implemented"
842 842
843 843 def getBasicHeader(self):
844 844
845 845 self.basicHeaderObj.size = self.basicHeaderSize #bytes
846 846 self.basicHeaderObj.version = self.versionFile
847 847 self.basicHeaderObj.dataBlock = self.nTotalBlocks
848 848
849 849 utc = numpy.floor(self.dataOut.utctime)
850 850 milisecond = (self.dataOut.utctime - utc)* 1000.0
851 851
852 852 self.basicHeaderObj.utc = utc
853 853 self.basicHeaderObj.miliSecond = milisecond
854 854 self.basicHeaderObj.timeZone = 0
855 855 self.basicHeaderObj.dstFlag = 0
856 856 self.basicHeaderObj.errorCount = 0
857 857
858 858 def __writeFirstHeader(self):
859 859 """
860 860 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
861 861
862 862 Affected:
863 863 __dataType
864 864
865 865 Return:
866 866 None
867 867 """
868 868
869 869 # CALCULAR PARAMETROS
870 870
871 871 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
872 872 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
873 873
874 874 self.basicHeaderObj.write(self.fp)
875 875 self.systemHeaderObj.write(self.fp)
876 876 self.radarControllerHeaderObj.write(self.fp)
877 877 self.processingHeaderObj.write(self.fp)
878 878
879 879 self.dtype = self.dataOut.dtype
880 880
881 881 def __setNewBlock(self):
882 882 """
883 883 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
884 884
885 885 Return:
886 886 0 : si no pudo escribir nada
887 887 1 : Si escribio el Basic el First Header
888 888 """
889 889 if self.fp == None:
890 890 self.setNextFile()
891 891
892 892 if self.flagIsNewFile:
893 893 return 1
894 894
895 895 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
896 896 self.basicHeaderObj.write(self.fp)
897 897 return 1
898 898
899 899 if not( self.setNextFile() ):
900 900 return 0
901 901
902 902 return 1
903 903
904 904
905 905 def writeNextBlock(self):
906 906 """
907 907 Selecciona el bloque siguiente de datos y los escribe en un file
908 908
909 909 Return:
910 910 0 : Si no hizo pudo escribir el bloque de datos
911 911 1 : Si no pudo escribir el bloque de datos
912 912 """
913 913 if not( self.__setNewBlock() ):
914 914 return 0
915 915
916 916 self.writeBlock()
917 917
918 918 return 1
919 919
920 920 def setNextFile(self):
921 921 """
922 922 Determina el siguiente file que sera escrito
923 923
924 924 Affected:
925 925 self.filename
926 926 self.subfolder
927 927 self.fp
928 928 self.setFile
929 929 self.flagIsNewFile
930 930
931 931 Return:
932 932 0 : Si el archivo no puede ser escrito
933 933 1 : Si el archivo esta listo para ser escrito
934 934 """
935 935 ext = self.ext
936 936 path = self.path
937 937
938 938 if self.fp != None:
939 939 self.fp.close()
940 940
941 941 timeTuple = time.localtime( self.dataOut.dataUtcTime)
942 942 subfolder = 'D%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
943 943
944 944 doypath = os.path.join( path, subfolder )
945 945 if not( os.path.exists(doypath) ):
946 946 os.mkdir(doypath)
947 947 self.setFile = -1 #inicializo mi contador de seteo
948 948 else:
949 949 filesList = os.listdir( doypath )
950 950 if len( filesList ) > 0:
951 951 filesList = sorted( filesList, key=str.lower )
952 952 filen = filesList[-1]
953 953 # el filename debera tener el siguiente formato
954 954 # 0 1234 567 89A BCDE (hex)
955 955 # x YYYY DDD SSS .ext
956 956 if isNumber( filen[8:11] ):
957 957 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
958 958 else:
959 959 self.setFile = -1
960 960 else:
961 961 self.setFile = -1 #inicializo mi contador de seteo
962 962
963 963 setFile = self.setFile
964 964 setFile += 1
965 965
966 966 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
967 967 timeTuple.tm_year,
968 968 timeTuple.tm_yday,
969 969 setFile,
970 970 ext )
971 971
972 972 filename = os.path.join( path, subfolder, file )
973 973
974 974 fp = open( filename,'wb' )
975 975
976 976 self.blockIndex = 0
977 977
978 978 #guardando atributos
979 979 self.filename = filename
980 980 self.subfolder = subfolder
981 981 self.fp = fp
982 982 self.setFile = setFile
983 983 self.flagIsNewFile = 1
984 984
985 985 self.getDataHeader()
986 986
987 987 print 'Writing the file: %s'%self.filename
988 988
989 989 self.__writeFirstHeader()
990 990
991 991 return 1
992 992
993 993 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=None, set=0, ext=None):
994 994 """
995 995 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
996 996
997 997 Inputs:
998 998 path : el path destino en el cual se escribiran los files a crear
999 999 format : formato en el cual sera salvado un file
1000 1000 set : el setebo del file
1001 1001
1002 1002 Return:
1003 1003 0 : Si no realizo un buen seteo
1004 1004 1 : Si realizo un buen seteo
1005 1005 """
1006 1006
1007 1007 if ext == None:
1008 1008 ext = self.ext
1009 1009
1010 1010 ext = ext.lower()
1011 1011
1012 1012 self.ext = ext
1013 1013
1014 1014 self.path = path
1015 1015
1016 1016 self.setFile = set - 1
1017 1017
1018 1018 self.blocksPerFile = blocksPerFile
1019 1019
1020 1020 self.profilesPerBlock = profilesPerBlock
1021 1021
1022 1022 self.dataOut = dataOut
1023 1023
1024 1024 if not(self.setNextFile()):
1025 1025 print "There isn't a next file"
1026 1026 return 0
1027 1027
1028 1028 self.setBlockDimension()
1029 1029
1030 1030 return 1
1031 1031
1032 1032 def run(self, dataOut, **kwargs):
1033 1033
1034 1034 if not(self.isConfig):
1035 1035
1036 1036 self.setup(dataOut, **kwargs)
1037 1037 self.isConfig = True
1038 1038
1039 1039 self.putData()
1040 1040
1041 1041 class VoltageReader(JRODataReader):
1042 1042 """
1043 1043 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
1044 1044 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
1045 1045 perfiles*alturas*canales) son almacenados en la variable "buffer".
1046 1046
1047 1047 perfiles * alturas * canales
1048 1048
1049 1049 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1050 1050 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
1051 1051 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
1052 1052 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1053 1053
1054 1054 Example:
1055 1055
1056 1056 dpath = "/home/myuser/data"
1057 1057
1058 1058 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1059 1059
1060 1060 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1061 1061
1062 1062 readerObj = VoltageReader()
1063 1063
1064 1064 readerObj.setup(dpath, startTime, endTime)
1065 1065
1066 1066 while(True):
1067 1067
1068 1068 #to get one profile
1069 1069 profile = readerObj.getData()
1070 1070
1071 1071 #print the profile
1072 1072 print profile
1073 1073
1074 1074 #If you want to see all datablock
1075 1075 print readerObj.datablock
1076 1076
1077 1077 if readerObj.flagNoMoreFiles:
1078 1078 break
1079 1079
1080 1080 """
1081 1081
1082 1082 ext = ".r"
1083 1083
1084 1084 optchar = "D"
1085 1085 dataOut = None
1086 1086
1087 1087
1088 1088 def __init__(self):
1089 1089 """
1090 1090 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
1091 1091
1092 1092 Input:
1093 1093 dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para
1094 1094 almacenar un perfil de datos cada vez que se haga un requerimiento
1095 1095 (getData). El perfil sera obtenido a partir del buffer de datos,
1096 1096 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1097 1097 bloque de datos.
1098 1098 Si este parametro no es pasado se creara uno internamente.
1099 1099
1100 1100 Variables afectadas:
1101 1101 self.dataOut
1102 1102
1103 1103 Return:
1104 1104 None
1105 1105 """
1106 1106
1107 1107 self.isConfig = False
1108 1108
1109 1109 self.datablock = None
1110 1110
1111 1111 self.utc = 0
1112 1112
1113 1113 self.ext = ".r"
1114 1114
1115 1115 self.optchar = "D"
1116 1116
1117 1117 self.basicHeaderObj = BasicHeader()
1118 1118
1119 1119 self.systemHeaderObj = SystemHeader()
1120 1120
1121 1121 self.radarControllerHeaderObj = RadarControllerHeader()
1122 1122
1123 1123 self.processingHeaderObj = ProcessingHeader()
1124 1124
1125 1125 self.online = 0
1126 1126
1127 1127 self.fp = None
1128 1128
1129 1129 self.idFile = None
1130 1130
1131 1131 self.dtype = None
1132 1132
1133 1133 self.fileSizeByHeader = None
1134 1134
1135 1135 self.filenameList = []
1136 1136
1137 1137 self.filename = None
1138 1138
1139 1139 self.fileSize = None
1140 1140
1141 1141 self.firstHeaderSize = 0
1142 1142
1143 1143 self.basicHeaderSize = 24
1144 1144
1145 1145 self.pathList = []
1146 1146
1147 1147 self.filenameList = []
1148 1148
1149 1149 self.lastUTTime = 0
1150 1150
1151 1151 self.maxTimeStep = 30
1152 1152
1153 1153 self.flagNoMoreFiles = 0
1154 1154
1155 1155 self.set = 0
1156 1156
1157 1157 self.path = None
1158 1158
1159 1159 self.profileIndex = 9999
1160 1160
1161 1161 self.delay = 3 #seconds
1162 1162
1163 1163 self.nTries = 3 #quantity tries
1164 1164
1165 1165 self.nFiles = 3 #number of files for searching
1166 1166
1167 1167 self.nReadBlocks = 0
1168 1168
1169 1169 self.flagIsNewFile = 1
1170 1170
1171 1171 self.ippSeconds = 0
1172 1172
1173 1173 self.flagTimeBlock = 0
1174 1174
1175 1175 self.flagIsNewBlock = 0
1176 1176
1177 1177 self.nTotalBlocks = 0
1178 1178
1179 1179 self.blocksize = 0
1180 1180
1181 1181 self.dataOut = self.createObjByDefault()
1182 1182
1183 1183 def createObjByDefault(self):
1184 1184
1185 1185 dataObj = Voltage()
1186 1186
1187 1187 return dataObj
1188 1188
1189 1189 def __hasNotDataInBuffer(self):
1190 1190 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1191 1191 return 1
1192 1192 return 0
1193 1193
1194 1194
1195 1195 def getBlockDimension(self):
1196 1196 """
1197 1197 Obtiene la cantidad de puntos a leer por cada bloque de datos
1198 1198
1199 1199 Affected:
1200 1200 self.blocksize
1201 1201
1202 1202 Return:
1203 1203 None
1204 1204 """
1205 1205 pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
1206 1206 self.blocksize = pts2read
1207 1207
1208 1208
1209 1209 def readBlock(self):
1210 1210 """
1211 1211 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
1212 1212 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1213 1213 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1214 1214 es seteado a 0
1215 1215
1216 1216 Inputs:
1217 1217 None
1218 1218
1219 1219 Return:
1220 1220 None
1221 1221
1222 1222 Affected:
1223 1223 self.profileIndex
1224 1224 self.datablock
1225 1225 self.flagIsNewFile
1226 1226 self.flagIsNewBlock
1227 1227 self.nTotalBlocks
1228 1228
1229 1229 Exceptions:
1230 1230 Si un bloque leido no es un bloque valido
1231 1231 """
1232 1232
1233 1233 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
1234 1234
1235 1235 try:
1236 1236 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
1237 1237 except:
1238 1238 print "The read block (%3d) has not enough data" %self.nReadBlocks
1239 1239 return 0
1240 1240
1241 1241 junk = numpy.transpose(junk, (2,0,1))
1242 1242 self.datablock = junk['real'] + junk['imag']*1j
1243 1243
1244 1244 self.profileIndex = 0
1245 1245
1246 1246 self.flagIsNewFile = 0
1247 1247 self.flagIsNewBlock = 1
1248 1248
1249 1249 self.nTotalBlocks += 1
1250 1250 self.nReadBlocks += 1
1251 1251
1252 1252 return 1
1253 1253
1254 1254
1255 1255 def getData(self):
1256 1256 """
1257 1257 getData obtiene una unidad de datos del buffer de lectura y la copia a la clase "Voltage"
1258 1258 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
1259 1259 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
1260 1260
1261 1261 Ademas incrementa el contador del buffer en 1.
1262 1262
1263 1263 Return:
1264 1264 data : retorna un perfil de voltages (alturas * canales) copiados desde el
1265 1265 buffer. Si no hay mas archivos a leer retorna None.
1266 1266
1267 1267 Variables afectadas:
1268 1268 self.dataOut
1269 1269 self.profileIndex
1270 1270
1271 1271 Affected:
1272 1272 self.dataOut
1273 1273 self.profileIndex
1274 1274 self.flagTimeBlock
1275 1275 self.flagIsNewBlock
1276 1276 """
1277 1277 if self.flagNoMoreFiles: return 0
1278 1278
1279 1279 self.flagTimeBlock = 0
1280 1280 self.flagIsNewBlock = 0
1281 1281
1282 1282 if self.__hasNotDataInBuffer():
1283 1283
1284 1284 if not( self.readNextBlock() ):
1285 1285 return 0
1286 1286
1287 1287 # self.updateDataHeader()
1288 1288
1289 1289 if self.flagNoMoreFiles == 1:
1290 1290 print 'Process finished'
1291 1291 return 0
1292 1292
1293 1293 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
1294 1294
1295 1295 if self.datablock == None:
1296 1296 self.dataOut.flagNoData = True
1297 1297 return 0
1298 1298
1299 1299 self.dataOut.data = self.datablock[:,self.profileIndex,:]
1300 1300
1301 1301 self.dataOut.dtype = self.dtype
1302 1302
1303 1303 self.dataOut.nChannels = self.systemHeaderObj.nChannels
1304 1304
1305 1305 self.dataOut.nHeights = self.processingHeaderObj.nHeights
1306 1306
1307 1307 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
1308 1308
1309 1309 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
1310 1310
1311 1311 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
1312 1312
1313 1313 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
1314 1314
1315 1315 self.dataOut.channelIndexList = range(self.systemHeaderObj.nChannels)
1316 1316
1317 1317 self.dataOut.flagTimeBlock = self.flagTimeBlock
1318 1318
1319 1319 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.ippSeconds
1320 1320
1321 1321 self.dataOut.ippSeconds = self.ippSeconds
1322 1322
1323 1323 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt
1324 1324
1325 1325 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
1326 1326
1327 1327 self.dataOut.flagShiftFFT = False
1328 1328
1329 1329 if self.processingHeaderObj.code != None:
1330 1330 self.dataOut.nCode = self.processingHeaderObj.nCode
1331 1331
1332 1332 self.dataOut.nBaud = self.processingHeaderObj.nBaud
1333 1333
1334 1334 self.dataOut.code = self.processingHeaderObj.code
1335 1335
1336 1336 self.profileIndex += 1
1337 1337
1338 1338 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
1339 1339
1340 1340 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
1341 1341
1342 1342 self.dataOut.flagNoData = False
1343 1343
1344 1344 # print self.profileIndex, self.dataOut.utctime
1345 1345 # if self.profileIndex == 800:
1346 1346 # a=1
1347 1347
1348 1348 return self.dataOut.data
1349 1349
1350 1350
1351 1351 class VoltageWriter(JRODataWriter):
1352 1352 """
1353 1353 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
1354 1354 de los datos siempre se realiza por bloques.
1355 1355 """
1356 1356
1357 1357 ext = ".r"
1358 1358
1359 1359 optchar = "D"
1360 1360
1361 1361 shapeBuffer = None
1362 1362
1363 1363
1364 1364 def __init__(self):
1365 1365 """
1366 1366 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
1367 1367
1368 1368 Affected:
1369 1369 self.dataOut
1370 1370
1371 1371 Return: None
1372 1372 """
1373 1373
1374 1374 self.nTotalBlocks = 0
1375 1375
1376 1376 self.profileIndex = 0
1377 1377
1378 1378 self.isConfig = False
1379 1379
1380 1380 self.fp = None
1381 1381
1382 1382 self.flagIsNewFile = 1
1383 1383
1384 1384 self.nTotalBlocks = 0
1385 1385
1386 1386 self.flagIsNewBlock = 0
1387 1387
1388 1388 self.flagNoMoreFiles = 0
1389 1389
1390 1390 self.setFile = None
1391 1391
1392 1392 self.dtype = None
1393 1393
1394 1394 self.path = None
1395 1395
1396 1396 self.noMoreFiles = 0
1397 1397
1398 1398 self.filename = None
1399 1399
1400 1400 self.basicHeaderObj = BasicHeader()
1401 1401
1402 1402 self.systemHeaderObj = SystemHeader()
1403 1403
1404 1404 self.radarControllerHeaderObj = RadarControllerHeader()
1405 1405
1406 1406 self.processingHeaderObj = ProcessingHeader()
1407 1407
1408 1408 def hasAllDataInBuffer(self):
1409 1409 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1410 1410 return 1
1411 1411 return 0
1412 1412
1413 1413
1414 1414 def setBlockDimension(self):
1415 1415 """
1416 1416 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
1417 1417
1418 1418 Affected:
1419 1419 self.shape_spc_Buffer
1420 1420 self.shape_cspc_Buffer
1421 1421 self.shape_dc_Buffer
1422 1422
1423 1423 Return: None
1424 1424 """
1425 1425 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
1426 1426 self.processingHeaderObj.nHeights,
1427 1427 self.systemHeaderObj.nChannels)
1428 1428
1429 1429 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
1430 1430 self.processingHeaderObj.profilesPerBlock,
1431 1431 self.processingHeaderObj.nHeights),
1432 1432 dtype=numpy.dtype('complex'))
1433 1433
1434 1434
1435 1435 def writeBlock(self):
1436 1436 """
1437 1437 Escribe el buffer en el file designado
1438 1438
1439 1439 Affected:
1440 1440 self.profileIndex
1441 1441 self.flagIsNewFile
1442 1442 self.flagIsNewBlock
1443 1443 self.nTotalBlocks
1444 1444 self.blockIndex
1445 1445
1446 1446 Return: None
1447 1447 """
1448 1448 data = numpy.zeros( self.shapeBuffer, self.dtype )
1449 1449
1450 1450 junk = numpy.transpose(self.datablock, (1,2,0))
1451 1451
1452 1452 data['real'] = junk.real
1453 1453 data['imag'] = junk.imag
1454 1454
1455 1455 data = data.reshape( (-1) )
1456 1456
1457 1457 data.tofile( self.fp )
1458 1458
1459 1459 self.datablock.fill(0)
1460 1460
1461 1461 self.profileIndex = 0
1462 1462 self.flagIsNewFile = 0
1463 1463 self.flagIsNewBlock = 1
1464 1464
1465 1465 self.blockIndex += 1
1466 1466 self.nTotalBlocks += 1
1467 1467
1468 1468 def putData(self):
1469 1469 """
1470 1470 Setea un bloque de datos y luego los escribe en un file
1471 1471
1472 1472 Affected:
1473 1473 self.flagIsNewBlock
1474 1474 self.profileIndex
1475 1475
1476 1476 Return:
1477 1477 0 : Si no hay data o no hay mas files que puedan escribirse
1478 1478 1 : Si se escribio la data de un bloque en un file
1479 1479 """
1480 1480 if self.dataOut.flagNoData:
1481 1481 return 0
1482 1482
1483 1483 self.flagIsNewBlock = 0
1484 1484
1485 1485 if self.dataOut.flagTimeBlock:
1486 1486
1487 1487 self.datablock.fill(0)
1488 1488 self.profileIndex = 0
1489 1489 self.setNextFile()
1490 1490
1491 1491 if self.profileIndex == 0:
1492 1492 self.getBasicHeader()
1493 1493
1494 1494 self.datablock[:,self.profileIndex,:] = self.dataOut.data
1495 1495
1496 1496 self.profileIndex += 1
1497 1497
1498 1498 if self.hasAllDataInBuffer():
1499 1499 #if self.flagIsNewFile:
1500 1500 self.writeNextBlock()
1501 1501 # self.getDataHeader()
1502 1502
1503 1503 if self.flagNoMoreFiles:
1504 1504 #print 'Process finished'
1505 1505 return 0
1506 1506
1507 1507 return 1
1508 1508
1509 1509 def __getProcessFlags(self):
1510 1510
1511 1511 processFlags = 0
1512 1512
1513 1513 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1514 1514 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1515 1515 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1516 1516 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1517 1517 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1518 1518 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1519 1519
1520 1520 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1521 1521
1522 1522
1523 1523
1524 1524 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
1525 1525 PROCFLAG.DATATYPE_SHORT,
1526 1526 PROCFLAG.DATATYPE_LONG,
1527 1527 PROCFLAG.DATATYPE_INT64,
1528 1528 PROCFLAG.DATATYPE_FLOAT,
1529 1529 PROCFLAG.DATATYPE_DOUBLE]
1530 1530
1531 1531
1532 1532 for index in range(len(dtypeList)):
1533 1533 if self.dataOut.dtype == dtypeList[index]:
1534 1534 dtypeValue = datatypeValueList[index]
1535 1535 break
1536 1536
1537 1537 processFlags += dtypeValue
1538 1538
1539 1539 if self.dataOut.flagDecodeData:
1540 1540 processFlags += PROCFLAG.DECODE_DATA
1541 1541
1542 1542 if self.dataOut.flagDeflipData:
1543 1543 processFlags += PROCFLAG.DEFLIP_DATA
1544 1544
1545 1545 if self.dataOut.code != None:
1546 1546 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1547 1547
1548 1548 if self.dataOut.nCohInt > 1:
1549 1549 processFlags += PROCFLAG.COHERENT_INTEGRATION
1550 1550
1551 1551 return processFlags
1552 1552
1553 1553
1554 1554 def __getBlockSize(self):
1555 1555 '''
1556 1556 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
1557 1557 '''
1558 1558
1559 1559 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1560 1560 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1561 1561 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1562 1562 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1563 1563 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1564 1564 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1565 1565
1566 1566 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1567 1567 datatypeValueList = [1,2,4,8,4,8]
1568 1568 for index in range(len(dtypeList)):
1569 1569 if self.dataOut.dtype == dtypeList[index]:
1570 1570 datatypeValue = datatypeValueList[index]
1571 1571 break
1572 1572
1573 1573 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels * self.dataOut.nProfiles * datatypeValue * 2)
1574 1574
1575 1575 return blocksize
1576 1576
1577 1577 def getDataHeader(self):
1578 1578
1579 1579 """
1580 1580 Obtiene una copia del First Header
1581 1581
1582 1582 Affected:
1583 1583 self.systemHeaderObj
1584 1584 self.radarControllerHeaderObj
1585 1585 self.dtype
1586 1586
1587 1587 Return:
1588 1588 None
1589 1589 """
1590 1590
1591 1591 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
1592 1592 self.systemHeaderObj.nChannels = self.dataOut.nChannels
1593 1593 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
1594 1594
1595 1595 self.getBasicHeader()
1596 1596
1597 1597 processingHeaderSize = 40 # bytes
1598 1598 self.processingHeaderObj.dtype = 0 # Voltage
1599 1599 self.processingHeaderObj.blockSize = self.__getBlockSize()
1600 1600 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
1601 1601 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
1602 1602 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
1603 1603 self.processingHeaderObj.processFlags = self.__getProcessFlags()
1604 1604 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
1605 1605 self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage
1606 1606 self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage
1607 1607
1608 1608 if self.dataOut.code != None:
1609 1609 self.processingHeaderObj.code = self.dataOut.code
1610 1610 self.processingHeaderObj.nCode = self.dataOut.nCode
1611 1611 self.processingHeaderObj.nBaud = self.dataOut.nBaud
1612 1612 codesize = int(8 + 4 * self.dataOut.nCode * self.dataOut.nBaud)
1613 1613 processingHeaderSize += codesize
1614 1614
1615 1615 if self.processingHeaderObj.nWindows != 0:
1616 1616 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
1617 1617 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
1618 1618 self.processingHeaderObj.nHeights = self.dataOut.nHeights
1619 1619 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
1620 1620 processingHeaderSize += 12
1621 1621
1622 1622 self.processingHeaderObj.size = processingHeaderSize
1623 1623
1624 1624 class SpectraReader(JRODataReader):
1625 1625 """
1626 1626 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
1627 1627 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
1628 1628 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
1629 1629
1630 1630 paresCanalesIguales * alturas * perfiles (Self Spectra)
1631 1631 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
1632 1632 canales * alturas (DC Channels)
1633 1633
1634 1634 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1635 1635 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
1636 1636 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
1637 1637 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1638 1638
1639 1639 Example:
1640 1640 dpath = "/home/myuser/data"
1641 1641
1642 1642 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1643 1643
1644 1644 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1645 1645
1646 1646 readerObj = SpectraReader()
1647 1647
1648 1648 readerObj.setup(dpath, startTime, endTime)
1649 1649
1650 1650 while(True):
1651 1651
1652 1652 readerObj.getData()
1653 1653
1654 1654 print readerObj.data_spc
1655 1655
1656 1656 print readerObj.data_cspc
1657 1657
1658 1658 print readerObj.data_dc
1659 1659
1660 1660 if readerObj.flagNoMoreFiles:
1661 1661 break
1662 1662
1663 1663 """
1664 1664
1665 1665 pts2read_SelfSpectra = 0
1666 1666
1667 1667 pts2read_CrossSpectra = 0
1668 1668
1669 1669 pts2read_DCchannels = 0
1670 1670
1671 1671 ext = ".pdata"
1672 1672
1673 1673 optchar = "P"
1674 1674
1675 1675 dataOut = None
1676 1676
1677 1677 nRdChannels = None
1678 1678
1679 1679 nRdPairs = None
1680 1680
1681 1681 rdPairList = []
1682 1682
1683 1683
1684 1684 def __init__(self):
1685 1685 """
1686 1686 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
1687 1687
1688 1688 Inputs:
1689 1689 dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para
1690 1690 almacenar un perfil de datos cada vez que se haga un requerimiento
1691 1691 (getData). El perfil sera obtenido a partir del buffer de datos,
1692 1692 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1693 1693 bloque de datos.
1694 1694 Si este parametro no es pasado se creara uno internamente.
1695 1695
1696 1696 Affected:
1697 1697 self.dataOut
1698 1698
1699 1699 Return : None
1700 1700 """
1701 1701
1702 1702 self.isConfig = False
1703 1703
1704 1704 self.pts2read_SelfSpectra = 0
1705 1705
1706 1706 self.pts2read_CrossSpectra = 0
1707 1707
1708 1708 self.pts2read_DCchannels = 0
1709 1709
1710 1710 self.datablock = None
1711 1711
1712 1712 self.utc = None
1713 1713
1714 1714 self.ext = ".pdata"
1715 1715
1716 1716 self.optchar = "P"
1717 1717
1718 1718 self.basicHeaderObj = BasicHeader()
1719 1719
1720 1720 self.systemHeaderObj = SystemHeader()
1721 1721
1722 1722 self.radarControllerHeaderObj = RadarControllerHeader()
1723 1723
1724 1724 self.processingHeaderObj = ProcessingHeader()
1725 1725
1726 1726 self.online = 0
1727 1727
1728 1728 self.fp = None
1729 1729
1730 1730 self.idFile = None
1731 1731
1732 1732 self.dtype = None
1733 1733
1734 1734 self.fileSizeByHeader = None
1735 1735
1736 1736 self.filenameList = []
1737 1737
1738 1738 self.filename = None
1739 1739
1740 1740 self.fileSize = None
1741 1741
1742 1742 self.firstHeaderSize = 0
1743 1743
1744 1744 self.basicHeaderSize = 24
1745 1745
1746 1746 self.pathList = []
1747 1747
1748 1748 self.lastUTTime = 0
1749 1749
1750 1750 self.maxTimeStep = 30
1751 1751
1752 1752 self.flagNoMoreFiles = 0
1753 1753
1754 1754 self.set = 0
1755 1755
1756 1756 self.path = None
1757 1757
1758 1758 self.delay = 3 #seconds
1759 1759
1760 1760 self.nTries = 3 #quantity tries
1761 1761
1762 1762 self.nFiles = 3 #number of files for searching
1763 1763
1764 1764 self.nReadBlocks = 0
1765 1765
1766 1766 self.flagIsNewFile = 1
1767 1767
1768 1768 self.ippSeconds = 0
1769 1769
1770 1770 self.flagTimeBlock = 0
1771 1771
1772 1772 self.flagIsNewBlock = 0
1773 1773
1774 1774 self.nTotalBlocks = 0
1775 1775
1776 1776 self.blocksize = 0
1777 1777
1778 1778 self.dataOut = self.createObjByDefault()
1779 1779
1780 1780
1781 1781 def createObjByDefault(self):
1782 1782
1783 1783 dataObj = Spectra()
1784 1784
1785 1785 return dataObj
1786 1786
1787 1787 def __hasNotDataInBuffer(self):
1788 1788 return 1
1789 1789
1790 1790
1791 1791 def getBlockDimension(self):
1792 1792 """
1793 1793 Obtiene la cantidad de puntos a leer por cada bloque de datos
1794 1794
1795 1795 Affected:
1796 1796 self.nRdChannels
1797 1797 self.nRdPairs
1798 1798 self.pts2read_SelfSpectra
1799 1799 self.pts2read_CrossSpectra
1800 1800 self.pts2read_DCchannels
1801 1801 self.blocksize
1802 1802 self.dataOut.nChannels
1803 1803 self.dataOut.nPairs
1804 1804
1805 1805 Return:
1806 1806 None
1807 1807 """
1808 1808 self.nRdChannels = 0
1809 1809 self.nRdPairs = 0
1810 1810 self.rdPairList = []
1811 1811
1812 1812 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
1813 1813 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
1814 1814 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
1815 1815 else:
1816 1816 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
1817 1817 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
1818 1818
1819 1819 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
1820 1820
1821 1821 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
1822 1822 self.blocksize = self.pts2read_SelfSpectra
1823 1823
1824 1824 if self.processingHeaderObj.flag_cspc:
1825 1825 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
1826 1826 self.blocksize += self.pts2read_CrossSpectra
1827 1827
1828 1828 if self.processingHeaderObj.flag_dc:
1829 1829 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
1830 1830 self.blocksize += self.pts2read_DCchannels
1831 1831
1832 1832 # self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels
1833 1833
1834 1834
1835 1835 def readBlock(self):
1836 1836 """
1837 1837 Lee el bloque de datos desde la posicion actual del puntero del archivo
1838 1838 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1839 1839 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1840 1840 es seteado a 0
1841 1841
1842 1842 Return: None
1843 1843
1844 1844 Variables afectadas:
1845 1845
1846 1846 self.flagIsNewFile
1847 1847 self.flagIsNewBlock
1848 1848 self.nTotalBlocks
1849 1849 self.data_spc
1850 1850 self.data_cspc
1851 1851 self.data_dc
1852 1852
1853 1853 Exceptions:
1854 1854 Si un bloque leido no es un bloque valido
1855 1855 """
1856 1856 blockOk_flag = False
1857 1857 fpointer = self.fp.tell()
1858 1858
1859 1859 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
1860 1860 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
1861 1861
1862 1862 if self.processingHeaderObj.flag_cspc:
1863 1863 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
1864 1864 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
1865 1865
1866 1866 if self.processingHeaderObj.flag_dc:
1867 1867 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
1868 1868 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
1869 1869
1870 1870
1871 1871 if not(self.processingHeaderObj.shif_fft):
1872 1872 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
1873 1873
1874 1874 if self.processingHeaderObj.flag_cspc:
1875 1875 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
1876 1876
1877 1877
1878 1878 spc = numpy.transpose( spc, (0,2,1) )
1879 1879 self.data_spc = spc
1880 1880
1881 1881 if self.processingHeaderObj.flag_cspc:
1882 1882 cspc = numpy.transpose( cspc, (0,2,1) )
1883 1883 self.data_cspc = cspc['real'] + cspc['imag']*1j
1884 1884 else:
1885 1885 self.data_cspc = None
1886 1886
1887 1887 if self.processingHeaderObj.flag_dc:
1888 1888 self.data_dc = dc['real'] + dc['imag']*1j
1889 1889 else:
1890 1890 self.data_dc = None
1891 1891
1892 1892 self.flagIsNewFile = 0
1893 1893 self.flagIsNewBlock = 1
1894 1894
1895 1895 self.nTotalBlocks += 1
1896 1896 self.nReadBlocks += 1
1897 1897
1898 1898 return 1
1899 1899
1900 1900
1901 1901 def getData(self):
1902 1902 """
1903 1903 Copia el buffer de lectura a la clase "Spectra",
1904 1904 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
1905 1905 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
1906 1906
1907 1907 Return:
1908 1908 0 : Si no hay mas archivos disponibles
1909 1909 1 : Si hizo una buena copia del buffer
1910 1910
1911 1911 Affected:
1912 1912 self.dataOut
1913 1913
1914 1914 self.flagTimeBlock
1915 1915 self.flagIsNewBlock
1916 1916 """
1917 1917
1918 1918 if self.flagNoMoreFiles: return 0
1919 1919
1920 1920 self.flagTimeBlock = 0
1921 1921 self.flagIsNewBlock = 0
1922 1922
1923 1923 if self.__hasNotDataInBuffer():
1924 1924
1925 1925 if not( self.readNextBlock() ):
1926 1926 return 0
1927 1927
1928 1928 # self.updateDataHeader()
1929 1929
1930 1930 if self.flagNoMoreFiles == 1:
1931 1931 print 'Process finished'
1932 1932 return 0
1933 1933
1934 1934 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
1935 1935
1936 1936 if self.data_dc == None:
1937 1937 self.dataOut.flagNoData = True
1938 1938 return 0
1939 1939
1940 1940
1941 1941 self.dataOut.data_spc = self.data_spc
1942 1942
1943 1943 self.dataOut.data_cspc = self.data_cspc
1944 1944
1945 1945 self.dataOut.data_dc = self.data_dc
1946 1946
1947 1947 self.dataOut.flagTimeBlock = self.flagTimeBlock
1948 1948
1949 1949 self.dataOut.flagNoData = False
1950 1950
1951 1951 self.dataOut.dtype = self.dtype
1952 1952
1953 1953 self.dataOut.nChannels = self.nRdChannels
1954 1954
1955 1955 self.dataOut.nPairs = self.nRdPairs
1956 1956
1957 1957 self.dataOut.pairsList = self.rdPairList
1958 1958
1959 1959 self.dataOut.nHeights = self.processingHeaderObj.nHeights
1960 1960
1961 1961 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
1962 1962
1963 1963 self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
1964 1964
1965 1965 self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
1966 1966
1967 1967
1968 1968 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
1969 1969
1970 1970 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
1971 1971
1972 1972 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
1973 1973
1974 1974 self.dataOut.channelIndexList = range(self.systemHeaderObj.nChannels)
1975 1975
1976 1976 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000.#+ self.profileIndex * self.ippSeconds
1977 1977
1978 1978 self.dataOut.ippSeconds = self.ippSeconds
1979 1979
1980 1980 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt * self.processingHeaderObj.nIncohInt * self.dataOut.nFFTPoints
1981 1981
1982 1982 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
1983 1983
1984 1984 # self.profileIndex += 1
1985 1985
1986 1986 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
1987 1987
1988 1988 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
1989 1989
1990 1990 return self.dataOut.data_spc
1991 1991
1992 1992
1993 1993 class SpectraWriter(JRODataWriter):
1994 1994
1995 1995 """
1996 1996 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
1997 1997 de los datos siempre se realiza por bloques.
1998 1998 """
1999 1999
2000 2000 ext = ".pdata"
2001 2001
2002 2002 optchar = "P"
2003 2003
2004 2004 shape_spc_Buffer = None
2005 2005
2006 2006 shape_cspc_Buffer = None
2007 2007
2008 2008 shape_dc_Buffer = None
2009 2009
2010 2010 data_spc = None
2011 2011
2012 2012 data_cspc = None
2013 2013
2014 2014 data_dc = None
2015 2015
2016 2016 # dataOut = None
2017 2017
2018 2018 def __init__(self):
2019 2019 """
2020 2020 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
2021 2021
2022 2022 Affected:
2023 2023 self.dataOut
2024 2024 self.basicHeaderObj
2025 2025 self.systemHeaderObj
2026 2026 self.radarControllerHeaderObj
2027 2027 self.processingHeaderObj
2028 2028
2029 2029 Return: None
2030 2030 """
2031 2031
2032 2032 self.isConfig = False
2033 2033
2034 2034 self.nTotalBlocks = 0
2035 2035
2036 2036 self.data_spc = None
2037 2037
2038 2038 self.data_cspc = None
2039 2039
2040 2040 self.data_dc = None
2041 2041
2042 2042 self.fp = None
2043 2043
2044 2044 self.flagIsNewFile = 1
2045 2045
2046 2046 self.nTotalBlocks = 0
2047 2047
2048 2048 self.flagIsNewBlock = 0
2049 2049
2050 2050 self.flagNoMoreFiles = 0
2051 2051
2052 2052 self.setFile = None
2053 2053
2054 2054 self.dtype = None
2055 2055
2056 2056 self.path = None
2057 2057
2058 2058 self.noMoreFiles = 0
2059 2059
2060 2060 self.filename = None
2061 2061
2062 2062 self.basicHeaderObj = BasicHeader()
2063 2063
2064 2064 self.systemHeaderObj = SystemHeader()
2065 2065
2066 2066 self.radarControllerHeaderObj = RadarControllerHeader()
2067 2067
2068 2068 self.processingHeaderObj = ProcessingHeader()
2069 2069
2070 2070
2071 2071 def hasAllDataInBuffer(self):
2072 2072 return 1
2073 2073
2074 2074
2075 2075 def setBlockDimension(self):
2076 2076 """
2077 2077 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
2078 2078
2079 2079 Affected:
2080 2080 self.shape_spc_Buffer
2081 2081 self.shape_cspc_Buffer
2082 2082 self.shape_dc_Buffer
2083 2083
2084 2084 Return: None
2085 2085 """
2086 2086 self.shape_spc_Buffer = (self.dataOut.nChannels,
2087 2087 self.processingHeaderObj.nHeights,
2088 2088 self.processingHeaderObj.profilesPerBlock)
2089 2089
2090 2090 self.shape_cspc_Buffer = (self.dataOut.nPairs,
2091 2091 self.processingHeaderObj.nHeights,
2092 2092 self.processingHeaderObj.profilesPerBlock)
2093 2093
2094 2094 self.shape_dc_Buffer = (self.dataOut.nChannels,
2095 2095 self.processingHeaderObj.nHeights)
2096 2096
2097 2097
2098 2098 def writeBlock(self):
2099 2099 """
2100 2100 Escribe el buffer en el file designado
2101 2101
2102 2102 Affected:
2103 2103 self.data_spc
2104 2104 self.data_cspc
2105 2105 self.data_dc
2106 2106 self.flagIsNewFile
2107 2107 self.flagIsNewBlock
2108 2108 self.nTotalBlocks
2109 2109 self.nWriteBlocks
2110 2110
2111 2111 Return: None
2112 2112 """
2113 2113
2114 2114 spc = numpy.transpose( self.data_spc, (0,2,1) )
2115 2115 if not( self.processingHeaderObj.shif_fft ):
2116 2116 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2117 2117 data = spc.reshape((-1))
2118 2118 data.tofile(self.fp)
2119 2119
2120 2120 if self.data_cspc != None:
2121 2121 data = numpy.zeros( self.shape_cspc_Buffer, self.dtype )
2122 2122 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
2123 2123 if not( self.processingHeaderObj.shif_fft ):
2124 2124 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2125 2125 data['real'] = cspc.real
2126 2126 data['imag'] = cspc.imag
2127 2127 data = data.reshape((-1))
2128 2128 data.tofile(self.fp)
2129 2129
2130 2130 if self.data_dc != None:
2131 2131 data = numpy.zeros( self.shape_dc_Buffer, self.dtype )
2132 2132 dc = self.data_dc
2133 2133 data['real'] = dc.real
2134 2134 data['imag'] = dc.imag
2135 2135 data = data.reshape((-1))
2136 2136 data.tofile(self.fp)
2137 2137
2138 2138 self.data_spc.fill(0)
2139 2139 self.data_dc.fill(0)
2140 2140 if self.data_cspc != None:
2141 2141 self.data_cspc.fill(0)
2142 2142
2143 2143 self.flagIsNewFile = 0
2144 2144 self.flagIsNewBlock = 1
2145 2145 self.nTotalBlocks += 1
2146 2146 self.nWriteBlocks += 1
2147 2147 self.blockIndex += 1
2148 2148
2149 2149
2150 2150 def putData(self):
2151 2151 """
2152 2152 Setea un bloque de datos y luego los escribe en un file
2153 2153
2154 2154 Affected:
2155 2155 self.data_spc
2156 2156 self.data_cspc
2157 2157 self.data_dc
2158 2158
2159 2159 Return:
2160 2160 0 : Si no hay data o no hay mas files que puedan escribirse
2161 2161 1 : Si se escribio la data de un bloque en un file
2162 2162 """
2163 2163
2164 2164 if self.dataOut.flagNoData:
2165 2165 return 0
2166 2166
2167 2167 self.flagIsNewBlock = 0
2168 2168
2169 2169 if self.dataOut.flagTimeBlock:
2170 2170 self.data_spc.fill(0)
2171 2171 self.data_cspc.fill(0)
2172 2172 self.data_dc.fill(0)
2173 2173 self.setNextFile()
2174 2174
2175 2175 if self.flagIsNewFile == 0:
2176 2176 self.getBasicHeader()
2177 2177
2178 2178 self.data_spc = self.dataOut.data_spc
2179 2179 self.data_cspc = self.dataOut.data_cspc
2180 2180 self.data_dc = self.dataOut.data_dc
2181 2181
2182 2182 # #self.processingHeaderObj.dataBlocksPerFile)
2183 2183 if self.hasAllDataInBuffer():
2184 2184 # self.getDataHeader()
2185 2185 self.writeNextBlock()
2186 2186
2187 2187 if self.flagNoMoreFiles:
2188 2188 #print 'Process finished'
2189 2189 return 0
2190 2190
2191 2191 return 1
2192 2192
2193 2193
2194 2194 def __getProcessFlags(self):
2195 2195
2196 2196 processFlags = 0
2197 2197
2198 2198 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2199 2199 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2200 2200 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2201 2201 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2202 2202 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2203 2203 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2204 2204
2205 2205 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2206 2206
2207 2207
2208 2208
2209 2209 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
2210 2210 PROCFLAG.DATATYPE_SHORT,
2211 2211 PROCFLAG.DATATYPE_LONG,
2212 2212 PROCFLAG.DATATYPE_INT64,
2213 2213 PROCFLAG.DATATYPE_FLOAT,
2214 2214 PROCFLAG.DATATYPE_DOUBLE]
2215 2215
2216 2216
2217 2217 for index in range(len(dtypeList)):
2218 2218 if self.dataOut.dtype == dtypeList[index]:
2219 2219 dtypeValue = datatypeValueList[index]
2220 2220 break
2221 2221
2222 2222 processFlags += dtypeValue
2223 2223
2224 2224 if self.dataOut.flagDecodeData:
2225 2225 processFlags += PROCFLAG.DECODE_DATA
2226 2226
2227 2227 if self.dataOut.flagDeflipData:
2228 2228 processFlags += PROCFLAG.DEFLIP_DATA
2229 2229
2230 2230 if self.dataOut.code != None:
2231 2231 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
2232 2232
2233 2233 if self.dataOut.nIncohInt > 1:
2234 2234 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
2235 2235
2236 2236 if self.dataOut.data_dc != None:
2237 2237 processFlags += PROCFLAG.SAVE_CHANNELS_DC
2238 2238
2239 2239 return processFlags
2240 2240
2241 2241
2242 2242 def __getBlockSize(self):
2243 2243 '''
2244 2244 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
2245 2245 '''
2246 2246
2247 2247 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2248 2248 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2249 2249 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2250 2250 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2251 2251 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2252 2252 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2253 2253
2254 2254 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2255 2255 datatypeValueList = [1,2,4,8,4,8]
2256 2256 for index in range(len(dtypeList)):
2257 2257 if self.dataOut.dtype == dtypeList[index]:
2258 2258 datatypeValue = datatypeValueList[index]
2259 2259 break
2260 2260
2261 2261
2262 2262 pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints
2263 2263
2264 2264 pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write)
2265 2265 blocksize = (pts2write_SelfSpectra*datatypeValue)
2266 2266
2267 2267 if self.dataOut.data_cspc != None:
2268 2268 pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write)
2269 2269 blocksize += (pts2write_CrossSpectra*datatypeValue*2)
2270 2270
2271 2271 if self.dataOut.data_dc != None:
2272 2272 pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights)
2273 2273 blocksize += (pts2write_DCchannels*datatypeValue*2)
2274 2274
2275 2275 blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
2276 2276
2277 2277 return blocksize
2278 2278
2279 2279 def getDataHeader(self):
2280 2280
2281 2281 """
2282 2282 Obtiene una copia del First Header
2283 2283
2284 2284 Affected:
2285 2285 self.systemHeaderObj
2286 2286 self.radarControllerHeaderObj
2287 2287 self.dtype
2288 2288
2289 2289 Return:
2290 2290 None
2291 2291 """
2292 2292
2293 2293 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
2294 2294 self.systemHeaderObj.nChannels = self.dataOut.nChannels
2295 2295 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
2296 2296
2297 2297 self.getBasicHeader()
2298 2298
2299 2299 processingHeaderSize = 40 # bytes
2300 2300 self.processingHeaderObj.dtype = 0 # Voltage
2301 2301 self.processingHeaderObj.blockSize = self.__getBlockSize()
2302 2302 self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints
2303 2303 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
2304 2304 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
2305 2305 self.processingHeaderObj.processFlags = self.__getProcessFlags()
2306 2306 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval
2307 2307 self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt
2308 2308 self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
2309 2309
2310 2310 if self.processingHeaderObj.totalSpectra > 0:
2311 2311 channelList = []
2312 2312 for channel in range(self.dataOut.nChannels):
2313 2313 channelList.append(channel)
2314 2314 channelList.append(channel)
2315 2315
2316 2316 pairsList = []
2317 2317 for pair in self.dataOut.pairsList:
2318 2318 pairsList.append(pair[0])
2319 2319 pairsList.append(pair[1])
2320 2320 spectraComb = channelList + pairsList
2321 2321 spectraComb = numpy.array(spectraComb,dtype="u1")
2322 2322 self.processingHeaderObj.spectraComb = spectraComb
2323 2323 sizeOfSpcComb = len(spectraComb)
2324 2324 processingHeaderSize += sizeOfSpcComb
2325 2325
2326 2326 if self.dataOut.code != None:
2327 2327 self.processingHeaderObj.code = self.dataOut.code
2328 2328 self.processingHeaderObj.nCode = self.dataOut.nCode
2329 2329 self.processingHeaderObj.nBaud = self.dataOut.nBaud
2330 2330 nCodeSize = 4 # bytes
2331 2331 nBaudSize = 4 # bytes
2332 2332 codeSize = 4 # bytes
2333 2333 sizeOfCode = int(nCodeSize + nBaudSize + codeSize * self.dataOut.nCode * self.dataOut.nBaud)
2334 2334 processingHeaderSize += sizeOfCode
2335 2335
2336 2336 if self.processingHeaderObj.nWindows != 0:
2337 2337 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
2338 2338 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
2339 2339 self.processingHeaderObj.nHeights = self.dataOut.nHeights
2340 2340 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
2341 2341 sizeOfFirstHeight = 4
2342 2342 sizeOfdeltaHeight = 4
2343 2343 sizeOfnHeights = 4
2344 2344 sizeOfWindows = (sizeOfFirstHeight + sizeOfdeltaHeight + sizeOfnHeights)*self.processingHeaderObj.nWindows
2345 2345 processingHeaderSize += sizeOfWindows
2346 2346
2347 2347 self.processingHeaderObj.size = processingHeaderSize
2348 2348
2349 2349 class SpectraHeisWriter():
2350 2350
2351 2351 i=0
2352 2352
2353 2353 def __init__(self, dataOut):
2354 2354
2355 2355 self.wrObj = FITS()
2356 2356 self.dataOut = dataOut
2357 2357
2358 2358 def isNumber(str):
2359 2359 """
2360 2360 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
2361 2361
2362 2362 Excepciones:
2363 2363 Si un determinado string no puede ser convertido a numero
2364 2364 Input:
2365 2365 str, string al cual se le analiza para determinar si convertible a un numero o no
2366 2366
2367 2367 Return:
2368 2368 True : si el string es uno numerico
2369 2369 False : no es un string numerico
2370 2370 """
2371 2371 try:
2372 2372 float( str )
2373 2373 return True
2374 2374 except:
2375 2375 return False
2376 2376
2377 2377 def setup(self, wrpath,):
2378 2378
2379 2379 if not(os.path.exists(wrpath)):
2380 2380 os.mkdir(wrpath)
2381 2381
2382 2382 self.wrpath = wrpath
2383 2383 self.setFile = 0
2384 2384
2385 2385 def putData(self):
2386 2386 # self.wrObj.writeHeader(nChannels=self.dataOut.nChannels, nFFTPoints=self.dataOut.nFFTPoints)
2387 2387 #name = self.dataOut.utctime
2388 2388 name= time.localtime( self.dataOut.utctime)
2389 2389 ext=".fits"
2390 2390 #folder='D%4.4d%3.3d'%(name.tm_year,name.tm_yday)
2391 2391 subfolder = 'D%4.4d%3.3d' % (name.tm_year,name.tm_yday)
2392 2392
2393 2393 doypath = os.path.join( self.wrpath, subfolder )
2394 2394 if not( os.path.exists(doypath) ):
2395 2395 os.mkdir(doypath)
2396 2396 self.setFile += 1
2397 2397 file = 'D%4.4d%3.3d%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext)
2398 2398
2399 2399 filename = os.path.join(self.wrpath,subfolder, file)
2400 2400
2401 2401 # print self.dataOut.ippSeconds
2402 2402 freq=numpy.arange(-1*self.dataOut.nHeights/2.,self.dataOut.nHeights/2.)/(2*self.dataOut.ippSeconds)
2403 2403
2404 2404 col1=self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints)+'E', array=freq)
2405 2405 col2=self.wrObj.writeData(name="P_Ch1",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[0,:]))
2406 2406 col3=self.wrObj.writeData(name="P_Ch2",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[1,:]))
2407 2407 col4=self.wrObj.writeData(name="P_Ch3",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[2,:]))
2408 2408 col5=self.wrObj.writeData(name="P_Ch4",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[3,:]))
2409 2409 col6=self.wrObj.writeData(name="P_Ch5",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[4,:]))
2410 2410 col7=self.wrObj.writeData(name="P_Ch6",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[5,:]))
2411 2411 col8=self.wrObj.writeData(name="P_Ch7",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[6,:]))
2412 2412 col9=self.wrObj.writeData(name="P_Ch8",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[7,:]))
2413 2413 #n=numpy.arange((100))
2414 2414 n=self.dataOut.data_spc[6,:]
2415 2415 a=self.wrObj.cFImage(n)
2416 2416 b=self.wrObj.Ctable(col1,col2,col3,col4,col5,col6,col7,col8,col9)
2417 2417 self.wrObj.CFile(a,b)
2418 2418 self.wrObj.wFile(filename)
2419 2419 return 1
2420 2420
2421 2421 class FITS:
2422 2422
2423 2423 name=None
2424 2424 format=None
2425 2425 array =None
2426 2426 data =None
2427 2427 thdulist=None
2428 2428
2429 2429 def __init__(self):
2430 2430
2431 2431 pass
2432 2432
2433 2433 def setColF(self,name,format,array):
2434 2434 self.name=name
2435 2435 self.format=format
2436 2436 self.array=array
2437 2437 a1=numpy.array([self.array],dtype=numpy.float32)
2438 2438 self.col1 = pyfits.Column(name=self.name, format=self.format, array=a1)
2439 2439 return self.col1
2440 2440
2441 2441 # def setColP(self,name,format,data):
2442 2442 # self.name=name
2443 2443 # self.format=format
2444 2444 # self.data=data
2445 2445 # a2=numpy.array([self.data],dtype=numpy.float32)
2446 2446 # self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2447 2447 # return self.col2
2448 2448
2449 2449 def writeHeader(self,):
2450 2450 pass
2451 2451
2452 2452 def writeData(self,name,format,data):
2453 2453 self.name=name
2454 2454 self.format=format
2455 2455 self.data=data
2456 2456 a2=numpy.array([self.data],dtype=numpy.float32)
2457 2457 self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2458 2458 return self.col2
2459 2459
2460 2460 def cFImage(self,n):
2461 2461 self.hdu= pyfits.PrimaryHDU(n)
2462 2462 return self.hdu
2463 2463
2464 2464 def Ctable(self,col1,col2,col3,col4,col5,col6,col7,col8,col9):
2465 2465 self.cols=pyfits.ColDefs( [col1,col2,col3,col4,col5,col6,col7,col8,col9])
2466 2466 self.tbhdu = pyfits.new_table(self.cols)
2467 2467 return self.tbhdu
2468 2468
2469 2469 def CFile(self,hdu,tbhdu):
2470 2470 self.thdulist=pyfits.HDUList([hdu,tbhdu])
2471 2471
2472 2472 def wFile(self,filename):
2473 2473 self.thdulist.writeto(filename) No newline at end of file
@@ -1,85 +1,182
1 1 import numpy
2 2 import datetime
3 3 from graphics.figure import *
4 4
5 class SpectraPlot(Figure):
6 __isConfig = None
7
8 def __init__(self):
9 self.__isConfig = False
10 self.width = 850
11 self.height = 800
12
13 def getSubplots(self):
14 ncol = int(numpy.sqrt(self.nplots)+0.9)
15 nrow = int(self.nplots*1./ncol + 0.9)
16 return nrow, ncol
17
18
19 def setAxesWithOutProfiles(self, nrow, ncol):
20 colspan = 1
21 rowspan = 1
22 counter = 0
23
24 for y in range(nrow):
25 for x in range(ncol):
26 if counter < self.nplots:
27 # plt.subplot2grid((nrow, ncol), (y, x), colspan=colspan, rowspan=rowspan)
28 self.makeAxes(nrow, ncol, y, x, colspan, rowspan)
29 counter += 1
30
31 def setAxesWithProfiles(self, nrow, ncol):
32 colspan = 1
33 rowspan = 1
34 factor = 2
35 ncol = ncol*factor
36 counter = 0
37
38 for y in range(nrow):
39 for x in range(ncol):
40 if counter < self.nplots*factor:
41 # plt.subplot2grid((nrow, ncol), (y, x), colspan=colspan, rowspan=rowspan)
42 self.makeAxes(nrow, ncol, y, x, colspan, rowspan)
43 counter += 1
44
45 def setup(self, idfigure, wintitle, width, height, nplots, profile):
46 self.init(idfigure, wintitle, width, height, nplots)
47
48 nrow,ncol = self.getSubplots()
49
50 if profile:
51 self.setAxesWithProfiles(nrow, ncol)
52 else:
53 self.setAxesWithOutProfiles(nrow, ncol)
54
55 def run(self, dataOut, idfigure, wintitle="", channelList=None, xmin=None, xmax=None, ymin=None, ymax=None, zmin=None, zmax=None, profile=False):
56 if dataOut.isEmpty():
57 return None
58
59 if channelList == None:
60 channelList = dataOut.channelList
61
62 nplots = len(channelList)
63
64 z = 10.*numpy.log10(dataOut.data_spc[channelList,:,:])
65
66 y = dataOut.heightList
67
68 x = numpy.arange(dataOut.nFFTPoints)
69
70 if not self.__isConfig:
71 self.setup(idfigure=idfigure,
72 wintitle=wintitle,
73 width=self.width,
74 height=self.height,
75 nplots=nplots,
76 profile=profile)
77
78 if xmin == None: self.xmin = numpy.min(x)
79 if xmax == None: self.xmax = numpy.max(x)
80 if ymin == None: self.ymin = numpy.min(y)
81 if ymax == None: self.ymax = numpy.max(y)
82 if zmin == None: self.zmin = 0
83 if zmax == None: self.zmax = 90
84
85 self.__isConfig = True
86
87 ylabel = "Range[Km]"
88
89 xlabel = "m/s"
90
91 for i in range(len(self.axesList)):
92 title = "Channel %d"%i
93 axes = self.axesList[i]
94 z2 = z[i,:,:]
95 axes.pcolor(x, y, z, self.xmin, self.xmax, self.ymin, self.ymax, self.zmin, self.zmax, xlabel, ylabel, title)
96
97
98 self.draw()
99
100
101
102
103
5 104 class Scope(Figure):
6 105 __isConfig = None
7 width = None
8 height = None
9 106
10 107 def __init__(self):
11 108 self.__isConfig = False
12 109 self.width = 850
13 110 self.height = 800
14 111
15 112 def getSubplots(self):
16 113 nrow = self.nplots
17 114 ncol = 3
18 115 return nrow, ncol
19 116
20 117 def setup(self, idfigure, wintitle, width, height, nplots):
21 118 self.init(idfigure, wintitle, width, height, nplots)
22 119
23 120 nrow,ncol = self.getSubplots()
24 121 colspan = 3
25 122 rowspan = 1
26 123
27 124 for i in range(nplots):
28 125 self.makeAxes(nrow, ncol, i, 0, colspan, rowspan)
29 126
30 127
31 128
32 129 def run(self, dataOut, idfigure, wintitle="", channelList=None, xmin=None, xmax=None, ymin=None, ymax=None):
33 130
34 131 if dataOut.isEmpty():
35 132 return None
36 133
37 134 if channelList == None:
38 135 channelList = dataOut.channelList
39 136
40 137 nplots = len(channelList)
41 138
42 139 y = dataOut.data[channelList,:] * numpy.conjugate(dataOut.data[channelList,:])
43 140 y = y.real
44 141
45 142 x = dataOut.heightList
46 143
47 144 if not self.__isConfig:
48 145 self.setup(idfigure=idfigure,
49 wintitle="Figura 1",
146 wintitle=wintitle,
50 147 width=self.width,
51 148 height=self.height,
52 149 nplots=nplots)
53 150
54 151 if xmin == None: self.xmin = numpy.min(x)
55 152 if xmax == None: self.xmax = numpy.max(x)
56 153 if ymin == None: self.ymin = numpy.min(y)
57 154 if ymax == None: self.ymax = numpy.max(y)
58 155
59 156 self.__isConfig = True
60 157
61 158
62 159
63 160 thisDatetime = datetime.datetime.fromtimestamp(dataOut.utctime)
64 161 dateTime = "%s"%(thisDatetime.strftime("%d-%b-%Y %H:%M:%S"))
65 162 date = "%s"%(thisDatetime.strftime("%d-%b-%Y"))
66 163 figuretitle = "Scope: " + dateTime
67 164
68 165 self.setTitle(title=figuretitle)
69 166
70 167 # self.setTextFromAxes(title=figuretitle)
71 168
72 169 ylabel = "Intensity"
73 170
74 171 xlabel = "Range[Km]"
75 172
76 173 for i in range(len(self.axesList)):
77 174 title = "Channel %d"%i
78 175 axes = self.axesList[i]
79 176 y2 = y[i,:]
80 177 axes.pline(x, y2, self.xmin, self.xmax, self.ymin, self.ymax, xlabel, ylabel, title)
81 178
82 179 self.draw()
83 180
84 181
85 182 No newline at end of file
@@ -1,435 +1,575
1 1 '''
2 2
3 3 $Author: dsuarez $
4 4 $Id: Processor.py 1 2012-11-12 18:56:07Z dsuarez $
5 5 '''
6 6 import os
7 7 import numpy
8 8 import datetime
9 9 import time
10 10
11 11 from jrodata import *
12 12 from jrodataIO import *
13 13 from jroplot import *
14 14
15 15 class ProcessingUnit:
16 16
17 17 """
18 18 Esta es la clase base para el procesamiento de datos.
19 19
20 20 Contiene el metodo "call" para llamar operaciones. Las operaciones pueden ser:
21 21 - Metodos internos (callMethod)
22 22 - Objetos del tipo Operation (callObject). Antes de ser llamados, estos objetos
23 23 tienen que ser agreagados con el metodo "add".
24 24
25 25 """
26 26 # objeto de datos de entrada (Voltage, Spectra o Correlation)
27 27 dataIn = None
28 28
29 29 # objeto de datos de entrada (Voltage, Spectra o Correlation)
30 30 dataOut = None
31 31
32 32
33 33 objectDict = None
34 34
35 35 def __init__(self):
36 36
37 37 self.objectDict = {}
38 38
39 39 def addOperation(self, object, objId):
40 40
41 41 """
42 42 Agrega el objeto "object" a la lista de objetos "self.objectList" y retorna el
43 43 identificador asociado a este objeto.
44 44
45 45 Input:
46 46
47 47 object : objeto de la clase "Operation"
48 48
49 49 Return:
50 50
51 51 objId : identificador del objeto, necesario para ejecutar la operacion
52 52 """
53 53
54 54 self.objectDict[objId] = object
55 55
56 56 return objId
57 57
58 58 def operation(self, **kwargs):
59 59
60 60 """
61 61 Operacion directa sobre la data (dataout.data). Es necesario actualizar los valores de los
62 62 atributos del objeto dataOut
63 63
64 64 Input:
65 65
66 66 **kwargs : Diccionario de argumentos de la funcion a ejecutar
67 67 """
68 68
69 69 raise ValueError, "ImplementedError"
70 70
71 71 def callMethod(self, name, **kwargs):
72 72
73 73 """
74 74 Ejecuta el metodo con el nombre "name" y con argumentos **kwargs de la propia clase.
75 75
76 76 Input:
77 77 name : nombre del metodo a ejecutar
78 78
79 79 **kwargs : diccionario con los nombres y valores de la funcion a ejecutar.
80 80
81 81 """
82 82
83 83 methodToCall = getattr(self, name)
84 84
85 85 methodToCall(**kwargs)
86 86
87 87 def callObject(self, objId, **kwargs):
88 88
89 89 """
90 90 Ejecuta la operacion asociada al identificador del objeto "objId"
91 91
92 92 Input:
93 93
94 94 objId : identificador del objeto a ejecutar
95 95
96 96 **kwargs : diccionario con los nombres y valores de la funcion a ejecutar.
97 97
98 98 Return:
99 99
100 100 None
101 101 """
102 102
103 103 object = self.objectDict[objId]
104 104
105 105 object.run(self.dataOut, **kwargs)
106 106
107 107 def call(self, operationConf, **kwargs):
108 108
109 109 """
110 110 Ejecuta la operacion "operationConf.name" con los argumentos "**kwargs". La operacion puede
111 111 ser de dos tipos:
112 112
113 113 1. Un metodo propio de esta clase:
114 114
115 115 operation.type = "self"
116 116
117 117 2. El metodo "run" de un objeto del tipo Operation o de un derivado de ella:
118 118 operation.type = "other".
119 119
120 120 Este objeto de tipo Operation debe de haber sido agregado antes con el metodo:
121 121 "addOperation" e identificado con el operation.id
122 122
123 123
124 124 con el id de la operacion.
125 125
126 126 Input:
127 127
128 128 Operation : Objeto del tipo operacion con los atributos: name, type y id.
129 129
130 130 """
131 131 if self.dataIn.isEmpty():
132 132 return None
133 133
134 134 if operationConf.type == 'self':
135 135 self.callMethod(operationConf.name, **kwargs)
136 136 return
137 137
138 138 if operationConf.type == 'other':
139 139 self.callObject(operationConf.id, **kwargs)
140 140 return
141 141
142 142 def setInput(self, dataIn):
143 143
144 144 self.dataIn = dataIn
145 145
146 146 def getOutput(self):
147 147
148 148 return self.dataOut
149 149
150 150 class Operation():
151 151
152 152 """
153 153 Clase base para definir las operaciones adicionales que se pueden agregar a la clase ProcessingUnit
154 154 y necesiten acumular informacion previa de los datos a procesar. De preferencia usar un buffer de
155 155 acumulacion dentro de esta clase
156 156
157 157 Ejemplo: Integraciones coherentes, necesita la informacion previa de los n perfiles anteriores (bufffer)
158 158
159 159 """
160 160
161 161 __buffer = None
162 162 __isConfig = False
163 163
164 164 def __init__(self):
165 165
166 166 pass
167 167
168 168 def run(self, dataIn, **kwargs):
169 169
170 170 """
171 171 Realiza las operaciones necesarias sobre la dataIn.data y actualiza los atributos del objeto dataIn.
172 172
173 173 Input:
174 174
175 175 dataIn : objeto del tipo JROData
176 176
177 177 Return:
178 178
179 179 None
180 180
181 181 Affected:
182 182 __buffer : buffer de recepcion de datos.
183 183
184 184 """
185 185
186 186 raise ValueError, "ImplementedError"
187 187
188 188 class VoltageProc(ProcessingUnit):
189 189
190 190
191 191 def __init__(self):
192 192
193 193 self.objectDict = {}
194 194 self.dataOut = Voltage()
195 195
196 196 def init(self):
197 197
198 198 self.dataOut.copy(self.dataIn)
199 199 # No necesita copiar en cada init() los atributos de dataIn
200 200 # la copia deberia hacerse por cada nuevo bloque de datos
201 201
202 202 def selectChannels(self, channelList):
203 203
204 204 if self.dataIn.isEmpty():
205 205 return 0
206 206
207 207 self.selectChannelsByIndex(channelList)
208 208
209 209 def selectChannelsByIndex(self, channelIndexList):
210 210 """
211 211 Selecciona un bloque de datos en base a canales segun el channelIndexList
212 212
213 213 Input:
214 214 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
215 215
216 216 Affected:
217 217 self.dataOut.data
218 218 self.dataOut.channelIndexList
219 219 self.dataOut.nChannels
220 220 self.dataOut.m_ProcessingHeader.totalSpectra
221 221 self.dataOut.systemHeaderObj.numChannels
222 222 self.dataOut.m_ProcessingHeader.blockSize
223 223
224 224 Return:
225 225 None
226 226 """
227 227
228 228 for channel in channelIndexList:
229 229 if channel not in self.dataOut.channelIndexList:
230 230 raise ValueError, "The value %d in channelIndexList is not valid" %channel
231 231
232 232 nChannels = len(channelIndexList)
233 233
234 234 data = self.dataOut.data[channelIndexList,:]
235 235
236 236 self.dataOut.data = data
237 237 self.dataOut.channelIndexList = channelIndexList
238 238 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
239 239 self.dataOut.nChannels = nChannels
240 240
241 241 return 1
242 242
243 243 class CohInt(Operation):
244 244
245 245 __profIndex = 0
246 246 __withOverapping = False
247 247
248 248 __byTime = False
249 249 __initime = None
250 250 __lastdatatime = None
251 251 __integrationtime = None
252 252
253 253 __buffer = None
254 254
255 255 __dataReady = False
256 256
257 257 nCohInt = None
258 258
259 259
260 260 def __init__(self):
261 261
262 262 self.__isConfig = False
263 263
264 264 def setup(self, nCohInt=None, timeInterval=None, overlapping=False):
265 265 """
266 266 Set the parameters of the integration class.
267 267
268 268 Inputs:
269 269
270 270 nCohInt : Number of coherent integrations
271 271 timeInterval : Time of integration. If the parameter "nCohInt" is selected this one does not work
272 272 overlapping :
273 273
274 274 """
275 275
276 276 self.__initime = None
277 277 self.__lastdatatime = 0
278 278 self.__buffer = None
279 279 self.__dataReady = False
280 280
281 281
282 282 if nCohInt == None and timeInterval == None:
283 283 raise ValueError, "nCohInt or timeInterval should be specified ..."
284 284
285 285 if nCohInt != None:
286 286 self.nCohInt = nCohInt
287 287 self.__byTime = False
288 288 else:
289 289 self.__integrationtime = timeInterval * 60. #if (type(timeInterval)!=integer) -> change this line
290 290 self.nCohInt = 9999
291 291 self.__byTime = True
292 292
293 293 if overlapping:
294 294 self.__withOverapping = True
295 295 self.__buffer = None
296 296 else:
297 297 self.__withOverapping = False
298 298 self.__buffer = 0
299 299
300 300 self.__profIndex = 0
301 301
302 302 def putData(self, data):
303 303
304 304 """
305 305 Add a profile to the __buffer and increase in one the __profileIndex
306 306
307 307 """
308 308
309 309 if not self.__withOverapping:
310 310 self.__buffer += data
311 311 self.__profIndex += 1
312 312 return
313 313
314 314 #Overlapping data
315 315 nChannels, nHeis = data.shape
316 316 data = numpy.reshape(data, (1, nChannels, nHeis))
317 317
318 318 #If the buffer is empty then it takes the data value
319 319 if self.__buffer == None:
320 320 self.__buffer = data
321 321 self.__profIndex += 1
322 322 return
323 323
324 324 #If the buffer length is lower than nCohInt then stakcing the data value
325 325 if self.__profIndex < self.nCohInt:
326 326 self.__buffer = numpy.vstack((self.__buffer, data))
327 327 self.__profIndex += 1
328 328 return
329 329
330 330 #If the buffer length is equal to nCohInt then replacing the last buffer value with the data value
331 331 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
332 332 self.__buffer[self.nCohInt-1] = data
333 333 self.__profIndex = self.nCohInt
334 334 return
335 335
336 336
337 337 def pushData(self):
338 338 """
339 339 Return the sum of the last profiles and the profiles used in the sum.
340 340
341 341 Affected:
342 342
343 343 self.__profileIndex
344 344
345 345 """
346 346
347 347 if not self.__withOverapping:
348 348 data = self.__buffer
349 349 nCohInt = self.__profIndex
350 350
351 351 self.__buffer = 0
352 352 self.__profIndex = 0
353 353
354 354 return data, nCohInt
355 355
356 356 #Integration with Overlapping
357 357 data = numpy.sum(self.__buffer, axis=0)
358 358 nCohInt = self.__profIndex
359 359
360 360 return data, nCohInt
361 361
362 362 def byProfiles(self, data):
363 363
364 364 self.__dataReady = False
365 365 avgdata = None
366 366 nCohInt = None
367 367
368 368 self.putData(data)
369 369
370 370 if self.__profIndex == self.nCohInt:
371 371
372 372 avgdata, nCohInt = self.pushData()
373 373 self.__dataReady = True
374 374
375 375 return avgdata
376 376
377 377 def byTime(self, data, datatime):
378 378
379 379 self.__dataReady = False
380 380 avgdata = None
381 381 nCohInt = None
382 382
383 383 self.putData(data)
384 384
385 385 if (datatime - self.__initime) >= self.__integrationtime:
386 386 avgdata, nCohInt = self.pushData()
387 387 self.nCohInt = nCohInt
388 388 self.__dataReady = True
389 389
390 390 return avgdata
391 391
392 392 def integrate(self, data, datatime=None):
393 393
394 394 if self.__initime == None:
395 395 self.__initime = datatime
396 396
397 397 if self.__byTime:
398 398 avgdata = self.byTime(data, datatime)
399 399 else:
400 400 avgdata = self.byProfiles(data)
401 401
402 402
403 403 self.__lastdatatime = datatime
404 404
405 405 if avgdata == None:
406 406 return None, None
407 407
408 408 avgdatatime = self.__initime
409 409
410 410 deltatime = datatime -self.__lastdatatime
411 411
412 412 if not self.__withOverapping:
413 413 self.__initime = datatime
414 414 else:
415 415 self.__initime += deltatime
416 416
417 417 return avgdata, avgdatatime
418 418
419 419 def run(self, dataOut, nCohInt=None, timeInterval=None, overlapping=False):
420 420
421 421 if not self.__isConfig:
422 422 self.setup(nCohInt, timeInterval, overlapping)
423 423 self.__isConfig = True
424 424
425 425 avgdata, avgdatatime = self.integrate(dataOut.data, dataOut.utctime)
426 426
427 427 # dataOut.timeInterval *= nCohInt
428 428 dataOut.flagNoData = True
429 429
430 430 if self.__dataReady:
431 431 dataOut.data = avgdata
432 432 dataOut.timeInterval *= self.nCohInt
433 433 dataOut.nCohInt *= self.nCohInt
434 434 dataOut.utctime = avgdatatime
435 dataOut.flagNoData = False No newline at end of file
435 dataOut.flagNoData = False
436
437
438 class SpectraProc(ProcessingUnit):
439
440 def __init__(self):
441 self.objectDict = {}
442 self.buffer = None
443 self.firstdatatime = None
444 self.profIndex = 0
445 self.dataOut = Spectra()
446
447 def init(self, nFFTPoints=None, pairsList=None):
448 if self.dataIn.type == "Spectra":
449 self.dataOut.copy(self.dataIn)
450 return
451
452 if self.dataIn.type == "Voltage":
453
454 if nFFTPoints == None:
455 raise ValueError, "This SpectraProc.setup() need nFFTPoints input variable"
456
457 if pairsList == None:
458 nPairs = 0
459 else:
460 nPairs = len(pairsList)
461
462 self.dataOut.nFFTPoints = nFFTPoints
463 self.dataOut.pairsList = pairsList
464 self.dataOut.nPairs = nPairs
465
466 if self.buffer == None:
467 self.buffer = numpy.zeros((self.dataIn.nChannels,
468 self.dataOut.nFFTPoints,
469 self.dataIn.nHeights),
470 dtype='complex')
471
472
473 self.buffer[:,self.profIndex,:] = self.dataIn.data
474 self.profIndex += 1
475
476 if self.firstdatatime == None:
477 self.firstdatatime = self.dataIn.utctime
478
479 if self.profIndex == self.dataOut.nFFTPoints:
480 self.__updateObjFromInput()
481 self.__getFft()
482
483 self.dataOut.flagNoData = False
484
485 self.buffer = None
486 self.firstdatatime = None
487 self.profIndex = 0
488
489 return
490
491 raise ValuError, "The type object %s is not valid"%(self.dataIn.type)
492
493 def __updateObjFromInput(self):
494
495 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
496 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
497 self.dataOut.channelList = self.dataIn.channelList
498 self.dataOut.heightList = self.dataIn.heightList
499 self.dataOut.dtype = self.dataIn.dtype
500 self.dataOut.nHeights = self.dataIn.nHeights
501 self.dataOut.nChannels = self.dataIn.nChannels
502 self.dataOut.nBaud = self.dataIn.nBaud
503 self.dataOut.nCode = self.dataIn.nCode
504 self.dataOut.code = self.dataIn.code
505 self.dataOut.nProfiles = self.dataOut.nFFTPoints
506 self.dataOut.channelIndexList = self.dataIn.channelIndexList
507 self.dataOut.flagTimeBlock = self.dataIn.flagTimeBlock
508 self.dataOut.utctime = self.firstdatatime
509 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
510 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
511 self.dataOut.flagShiftFFT = self.dataIn.flagShiftFFT
512 self.dataOut.nCohInt = self.dataIn.nCohInt
513 self.dataOut.nIncohInt = 1
514 self.dataOut.ippSeconds = self.dataIn.ippSeconds
515 self.dataOut.timeInterval = self.dataIn.timeInterval*self.dataOut.nFFTPoints
516
517 def __getFft(self):
518 """
519 Convierte valores de Voltaje a Spectra
520
521 Affected:
522 self.dataOut.data_spc
523 self.dataOut.data_cspc
524 self.dataOut.data_dc
525 self.dataOut.heightList
526 self.dataOut.m_BasicHeader
527 self.dataOut.m_ProcessingHeader
528 self.dataOut.radarControllerHeaderObj
529 self.dataOut.systemHeaderObj
530 self.profIndex
531 self.buffer
532 self.dataOut.flagNoData
533 self.dataOut.dtype
534 self.dataOut.nPairs
535 self.dataOut.nChannels
536 self.dataOut.nProfiles
537 self.dataOut.systemHeaderObj.numChannels
538 self.dataOut.m_ProcessingHeader.totalSpectra
539 self.dataOut.m_ProcessingHeader.profilesPerBlock
540 self.dataOut.m_ProcessingHeader.numHeights
541 self.dataOut.m_ProcessingHeader.spectraComb
542 self.dataOut.m_ProcessingHeader.shif_fft
543 """
544 fft_volt = numpy.fft.fft(self.buffer,axis=1)
545 dc = fft_volt[:,0,:]
546
547 #calculo de self-spectra
548 fft_volt = numpy.fft.fftshift(fft_volt,axes=(1,))
549 spc = fft_volt * numpy.conjugate(fft_volt)
550 spc = spc.real
551
552 blocksize = 0
553 blocksize += dc.size
554 blocksize += spc.size
555
556 cspc = None
557 pairIndex = 0
558 if self.dataOut.pairsList != None:
559 #calculo de cross-spectra
560 cspc = numpy.zeros((self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex')
561 for pair in self.dataOut.pairsList:
562 cspc[pairIndex,:,:] = numpy.abs(fft_volt[pair[0],:,:] * numpy.conjugate(fft_volt[pair[1],:,:]))
563 pairIndex += 1
564 blocksize += cspc.size
565
566 self.dataOut.data_spc = spc
567 self.dataOut.data_cspc = cspc
568 self.dataOut.data_dc = dc
569 self.dataOut.blockSize = blocksize
570
571
572 class IncohInt(Operation):
573
574 def __init__(self):
575 pass No newline at end of file
@@ -1,80 +1,103
1 1 """
2 2 $Author$
3 3 $Id$
4 4
5 5 """
6 6 import datetime
7 7 from controller import *
8 8 from model import *
9 9
10 10
11 11 class Test():
12 12 def __init__(self):
13 13 self.createObjects()
14 14 self.run()
15 15
16 16 def createObjects(self):
17 17
18 18 self.upConfig = controller.UPConf(id=1, name="voltageproc", type="voltage")
19 19
20 20 opConf = self.upConfig.addOperation(name="init", priority=0)
21 21
22 22 opConf1 = self.upConfig.addOperation(name="CohInt", priority=1, type="other")
23 23 opConf1.addParameter(name="nCohInt", value=100)
24 24
25 25 opConf2 = self.upConfig.addOperation(name="Scope", priority=2, type="other")
26 26 opConf2.addParameter(name="idfigure", value=1)
27 27
28
29 self.upConfigSpc = controller.UPConf(id=2, name="spectraproc", type="spectra")
30 opConf = self.upConfigSpc.addOperation(name="init", priority=0)
31 opConf.addParameter(name="nFFTPoints", value=8)
32
33 opConf3 = self.upConfigSpc.addOperation(name="SpectraPlot", priority=1, type="other")
34 opConf3.addParameter(name="idfigure", value=2)
35
28 36 # opConf = self.upConfig.addOperation(name="selectChannels", priority=3)
29 37 # opConf.addParameter(name="channelList", value=[0,1])
30 38
31 39
32 40 #########################################
33 41 self.objR = jrodataIO.VoltageReader()
34 42 self.objP = jroprocessing.VoltageProc()
43 self.objSpc = jroprocessing.SpectraProc()
35 44
36 45 self.objInt = jroprocessing.CohInt()
37 46
38 47 self.objP.addOperation(self.objInt, opConf1.id)
39 48
40 49 self.objScope = jroplot.Scope()
41 50
42 51 self.objP.addOperation(self.objScope, opConf2.id)
43 52
53 self.objSpcPlot = jroplot.SpectraPlot()
54
55 self.objSpc.addOperation(self.objSpcPlot, opConf3.id)
56
44 57 self.connect(self.objR, self.objP)
45 58
59 self.connect(self.objP, self.objSpc)
60
46 61 def connect(self, obj1, obj2):
47 obj2.dataIn = obj1.dataOut
62 obj2.setInput(obj1.getOutput())
48 63
49 64 def run(self):
50 65
51 66 while(True):
52 67 self.objR.run(path="/Users/dsuarez/Remote/Meteors",
53 68 startDate=datetime.date(2012,1,1),
54 69 endDate=datetime.date(2012,12,30),
55 70 startTime=datetime.time(0,0,0),
56 71 endTime=datetime.time(23,59,59),
57 72 set=0,
58 73 expLabel = "",
59 74 ext = None,
60 75 online = False)
61 76
62 77 for opConf in self.upConfig.getOperationObjList():
63 78 kwargs={}
64 79 for parm in opConf.getParameterObjList():
65 80 kwargs[parm.name]=parm.value
66 81
67 82 self.objP.call(opConf,**kwargs)
83
84 ############################
85 for opConfSpc in self.upConfigSpc.getOperationObjList():
86 kwargs={}
87 for parm in opConfSpc.getParameterObjList():
88 kwargs[parm.name]=parm.value
89
90 self.objSpc.call(opConfSpc,**kwargs)
68 91
69 92 if self.objR.flagNoMoreFiles:
70 93 break
71 94
72 95 if self.objR.flagIsNewBlock:
73 96 print 'Block No %04d, Time: %s' %(self.objR.nTotalBlocks,
74 97 datetime.datetime.fromtimestamp(self.objR.basicHeaderObj.utc + self.objR.basicHeaderObj.miliSecond/1000.0),)
75 98
76 99
77 100
78 101
79 102 if __name__ == "__main__":
80 103 Test() No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now