##// END OF EJS Templates
-Agrupamiento de las clases Voltage y Spectra dentro del módulo JROData...
Miguel Valdez -
r137:6bce11d32050
parent child
Show More
@@ -1,76 +1,171
1 1 '''
2 2
3 3 $Author$
4 4 $Id$
5 5 '''
6 6
7 7 import os, sys
8 8 import copy
9 9 import numpy
10 10
11 11 path = os.path.split(os.getcwd())[0]
12 12 sys.path.append(path)
13 13
14 from IO.JROHeader import SystemHeader, RadarControllerHeader
14 from IO.JROHeaderIO import SystemHeader, RadarControllerHeader
15 15
16 16 class JROData:
17 17
18 18 # m_BasicHeader = BasicHeader()
19 19 # m_ProcessingHeader = ProcessingHeader()
20 20
21 21 systemHeaderObj = SystemHeader()
22 22
23 23 radarControllerHeaderObj = RadarControllerHeader()
24 24
25 25 # data = None
26 26
27 27 type = None
28 28
29 29 dtype = None
30 30
31 31 nChannels = None
32 32
33 33 nHeights = None
34 34
35 35 nProfiles = None
36 36
37 37 heightList = None
38 38
39 39 channelList = None
40 40
41 41 channelIndexList = None
42 42
43 43 flagNoData = False
44 44
45 45 flagTimeBlock = False
46 46
47 47 dataUtcTime = None
48 48
49 49 nCode = None
50 50
51 51 nBaud = None
52 52
53 53 code = None
54 54
55 55 flagDecodeData = True #asumo q la data esta decodificada
56 56
57 57 flagDeflipData = True #asumo q la data esta sin flip
58 58
59 59 flagShiftFFT = False
60 60
61 61
62 62 def __init__(self):
63 63
64 64 raise ValueError, "This class has not been implemented"
65 65
66 66 def copy(self, inputObj=None):
67 67
68 68 if inputObj == None:
69 69 return copy.deepcopy(self)
70 70
71 71 for key in inputObj.__dict__.keys():
72 72 self.__dict__[key] = inputObj.__dict__[key]
73 73
74 74 def deepcopy(self):
75 75
76 return copy.deepcopy(self) No newline at end of file
76 return copy.deepcopy(self)
77
78 class Voltage(JROData):
79
80 nCohInt = None
81
82 data = None
83
84 def __init__(self):
85 '''
86 Constructor
87 '''
88
89 self.m_RadarControllerHeader = RadarControllerHeader()
90
91 self.m_SystemHeader = SystemHeader()
92
93 self.type = "Voltage"
94
95 #data es un numpy array de 2 dmensiones ( canales, alturas)
96 self.data = None
97
98 self.dtype = None
99
100 self.nChannels = 0
101
102 self.nHeights = 0
103
104 self.nProfiles = None
105
106 self.heightList = None
107
108 self.channelList = None
109
110 self.channelIndexList = None
111
112 self.flagNoData = True
113
114 self.flagTimeBlock = False
115
116 self.dataUtcTime = None
117
118 self.nCohInt = None
119
120 class Spectra(JROData):
121
122 data_spc = None
123
124 data_cspc = None
125
126 data_dc = None
127
128 nFFTPoints = None
129
130 nPairs = None
131
132 pairsList = None
133
134 nIncohInt = None
135
136 def __init__(self):
137 '''
138 Constructor
139 '''
140
141 self.m_RadarControllerHeader = RadarControllerHeader()
142
143 self.m_SystemHeader = SystemHeader()
144
145 self.type = "Spectra"
146
147 #data es un numpy array de 2 dmensiones ( canales, alturas)
148 # self.data = None
149
150 self.dtype = None
151
152 self.nChannels = 0
153
154 self.nHeights = 0
155
156 self.nProfiles = None
157
158 self.heightList = None
159
160 self.channelList = None
161
162 self.channelIndexList = None
163
164 self.flagNoData = True
165
166 self.flagTimeBlock = False
167
168 self.dataUtcTime = None
169
170 self.nIncohInt = None
171 No newline at end of file
@@ -1,757 +1,757
1 1 '''
2 2
3 3 $Author$
4 4 $Id$
5 5 '''
6 6
7 7 import os, sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import time, datetime
13 13
14 14 path = os.path.split(os.getcwd())[0]
15 15 sys.path.append(path)
16 16
17 from JROHeader import *
17 from JROHeaderIO import *
18 18 from Data.JROData import JROData
19 19
20 20 def isNumber(str):
21 21 """
22 22 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
23 23
24 24 Excepciones:
25 25 Si un determinado string no puede ser convertido a numero
26 26 Input:
27 27 str, string al cual se le analiza para determinar si convertible a un numero o no
28 28
29 29 Return:
30 30 True : si el string es uno numerico
31 31 False : no es un string numerico
32 32 """
33 33 try:
34 34 float( str )
35 35 return True
36 36 except:
37 37 return False
38 38
39 39 def isThisFileinRange(filename, startUTSeconds, endUTSeconds):
40 40 """
41 41 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
42 42
43 43 Inputs:
44 44 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
45 45
46 46 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
47 47 segundos contados desde 01/01/1970.
48 48 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
49 49 segundos contados desde 01/01/1970.
50 50
51 51 Return:
52 52 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
53 53 fecha especificado, de lo contrario retorna False.
54 54
55 55 Excepciones:
56 56 Si el archivo no existe o no puede ser abierto
57 57 Si la cabecera no puede ser leida.
58 58
59 59 """
60 60 basicHeaderObj = BasicHeader()
61 61
62 62 try:
63 63 fp = open(filename,'rb')
64 64 except:
65 65 raise IOError, "The file %s can't be opened" %(filename)
66 66
67 67 sts = basicHeaderObj.read(fp)
68 68 fp.close()
69 69
70 70 if not(sts):
71 71 print "Skipping the file %s because it has not a valid header" %(filename)
72 72 return 0
73 73
74 74 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
75 75 return 0
76 76
77 77 return 1
78 78
79 79
80 80
81 81
82 82 class JRODataIO:
83 83
84 84 c = 3E8
85 85
86 86 basicHeaderObj = BasicHeader()
87 87
88 88 systemHeaderObj = SystemHeader()
89 89
90 90 radarControllerHeaderObj = RadarControllerHeader()
91 91
92 92 processingHeaderObj = ProcessingHeader()
93 93
94 94 online = 0
95 95
96 96 dtype = None
97 97
98 98 pathList = []
99 99
100 100 filenameList = []
101 101
102 102 filename = None
103 103
104 104 ext = None
105 105
106 106 flagNoMoreFiles = 0
107 107
108 108 flagIsNewFile = 1
109 109
110 110 flagTimeBlock = 0
111 111
112 112 flagIsNewBlock = 0
113 113
114 114 fp = None
115 115
116 116 firstHeaderSize = 0
117 117
118 118 basicHeaderSize = 24
119 119
120 120 versionFile = 1103
121 121
122 122 fileSize = None
123 123
124 124 ippSeconds = None
125 125
126 126 fileSizeByHeader = None
127 127
128 128 fileIndex = None
129 129
130 130 profileIndex = None
131 131
132 132 blockIndex = None
133 133
134 134 nTotalBlocks = None
135 135
136 136 maxTimeStep = 30
137 137
138 138 lastUTTime = None
139 139
140 140 datablock = None
141 141
142 142 dataOutObj = None
143 143
144 144 blocksize = None
145 145
146 146 def __init__(self):
147 147 pass
148 148
149 149 class JRODataReader(JRODataIO):
150 150
151 151 nReadBlocks = 0
152 152
153 153 def __init__(self):
154 154
155 155 pass
156 156
157 157 def createObjByDefault(self):
158 158 """
159 159
160 160 """
161 161 raise ValueError, "This method has not been implemented"
162 162
163 163 def getBlockDimension(self):
164 164
165 165 raise ValueError, "No implemented"
166 166
167 167 def __searchFilesOffLine(self,
168 168 path,
169 169 startDate,
170 170 endDate,
171 171 startTime=datetime.time(0,0,0),
172 172 endTime=datetime.time(23,59,59),
173 173 set=None,
174 174 expLabel="",
175 175 ext=".r"):
176 176 dirList = []
177 177 for thisPath in os.listdir(path):
178 178 if os.path.isdir(os.path.join(path,thisPath)):
179 179 dirList.append(thisPath)
180 180
181 181 if not(dirList):
182 182 return None, None
183 183
184 184 pathList = []
185 185 dateList = []
186 186
187 187 thisDate = startDate
188 188
189 189 while(thisDate <= endDate):
190 190 year = thisDate.timetuple().tm_year
191 191 doy = thisDate.timetuple().tm_yday
192 192
193 193 match = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy))
194 194 if len(match) == 0:
195 195 thisDate += datetime.timedelta(1)
196 196 continue
197 197
198 198 pathList.append(os.path.join(path,match[0],expLabel))
199 199 dateList.append(thisDate)
200 200 thisDate += datetime.timedelta(1)
201 201
202 202 filenameList = []
203 203 for index in range(len(pathList)):
204 204
205 205 thisPath = pathList[index]
206 206 fileList = glob.glob1(thisPath, "*%s" %ext)
207 207 fileList.sort()
208 208
209 209 #Busqueda de datos en el rango de horas indicados
210 210 thisDate = dateList[index]
211 211 startDT = datetime.datetime.combine(thisDate, startTime)
212 212 endDT = datetime.datetime.combine(thisDate, endTime)
213 213
214 214 startUtSeconds = time.mktime(startDT.timetuple())
215 215 endUtSeconds = time.mktime(endDT.timetuple())
216 216
217 217 for file in fileList:
218 218
219 219 filename = os.path.join(thisPath,file)
220 220
221 221 if isThisFileinRange(filename, startUtSeconds, endUtSeconds):
222 222 filenameList.append(filename)
223 223
224 224 if not(filenameList):
225 225 return None, None
226 226
227 227 self.filenameList = filenameList
228 228
229 229 return pathList, filenameList
230 230
231 231 def setup(self,dataOutObj=None,
232 232 path=None,
233 233 startDate=None,
234 234 endDate=None,
235 235 startTime=datetime.time(0,0,0),
236 236 endTime=datetime.time(23,59,59),
237 237 set=0,
238 238 expLabel = "",
239 239 ext = None,
240 240 online = 0):
241 241
242 242 if path == None:
243 243 raise ValueError, "The path is not valid"
244 244
245 245 if ext == None:
246 246 ext = self.ext
247 247
248 248 if dataOutObj == None:
249 249 dataOutObj = self.createObjByDefault()
250 250
251 251 self.dataOutObj = dataOutObj
252 252
253 253 if online:
254 254 pass
255 255
256 256 else:
257 257 print "Searching file in offline mode"
258 258 pathList, filenameList = self.__searchFilesOffLine(path, startDate, endDate, startTime, endTime, set, expLabel, ext)
259 259 if not(pathList):
260 260 print "No files in range: %s - %s"%(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
261 261 return None
262 262 self.fileIndex = -1
263 263 self.pathList = pathList
264 264 self.filenameList = filenameList
265 265
266 266 self.online = online
267 267 ext = ext.lower()
268 268 self.ext = ext
269 269
270 270 if not(self.setNextFile()):
271 271 if (startDate!=None) and (endDate!=None):
272 272 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
273 273 elif startDate != None:
274 274 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
275 275 else:
276 276 print "No files"
277 277
278 278 return None
279 279
280 280 # self.updateDataHeader()
281 281
282 282 return self.dataOutObj
283 283
284 284 def __setNextFileOffline(self):
285 285 idFile = self.fileIndex
286 286
287 287 while (True):
288 288 idFile += 1
289 289 if not(idFile < len(self.filenameList)):
290 290 self.flagNoMoreFiles = 1
291 291 print "No more Files"
292 292 return 0
293 293
294 294 filename = self.filenameList[idFile]
295 295
296 296 if not(self.__verifyFile(filename)):
297 297 continue
298 298
299 299 fileSize = os.path.getsize(filename)
300 300 fp = open(filename,'rb')
301 301 break
302 302
303 303 self.flagIsNewFile = 1
304 304 self.fileIndex = idFile
305 305 self.filename = filename
306 306 self.fileSize = fileSize
307 307 self.fp = fp
308 308
309 309 print "Setting the file: %s"%self.filename
310 310
311 311 return 1
312 312
313 313
314 314
315 315 def setNextFile(self):
316 316 if self.fp != None:
317 317 self.fp.close()
318 318
319 319 if self.online:
320 320 newFile = self.__setNextFileOnline()
321 321 else:
322 322 newFile = self.__setNextFileOffline()
323 323
324 324 if not(newFile):
325 325 return 0
326 326
327 327 self.__readFirstHeader()
328 328 self.nReadBlocks = 0
329 329 return 1
330 330
331 331 def __setNewBlock(self):
332 332 if self.fp == None:
333 333 return 0
334 334
335 335 if self.flagIsNewFile:
336 336 return 1
337 337
338 338 self.lastUTTime = self.basicHeaderObj.utc
339 339 currentSize = self.fileSize - self.fp.tell()
340 340 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
341 341
342 342 if (currentSize >= neededSize):
343 343 self.__rdBasicHeader()
344 344 return 1
345 345
346 346 if not(self.setNextFile()):
347 347 return 0
348 348
349 349 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
350 350
351 351 self.flagTimeBlock = 0
352 352
353 353 if deltaTime > self.maxTimeStep:
354 354 self.flagTimeBlock = 1
355 355
356 356 return 1
357 357
358 358
359 359 def readNextBlock(self):
360 360 if not(self.__setNewBlock()):
361 361 return 0
362 362
363 363 if not(self.readBlock()):
364 364 return 0
365 365
366 366 return 1
367 367
368 368 def __rdProcessingHeader(self, fp=None):
369 369 if fp == None:
370 370 fp = self.fp
371 371
372 372 self.processingHeaderObj.read(fp)
373 373
374 374 def __rdRadarControllerHeader(self, fp=None):
375 375 if fp == None:
376 376 fp = self.fp
377 377
378 378 self.radarControllerHeaderObj.read(fp)
379 379
380 380 def __rdSystemHeader(self, fp=None):
381 381 if fp == None:
382 382 fp = self.fp
383 383
384 384 self.systemHeaderObj.read(fp)
385 385
386 386 def __rdBasicHeader(self, fp=None):
387 387 if fp == None:
388 388 fp = self.fp
389 389
390 390 self.basicHeaderObj.read(fp)
391 391
392 392
393 393 def __readFirstHeader(self):
394 394 self.__rdBasicHeader()
395 395 self.__rdSystemHeader()
396 396 self.__rdRadarControllerHeader()
397 397 self.__rdProcessingHeader()
398 398
399 399 self.firstHeaderSize = self.basicHeaderObj.size
400 400
401 401 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
402 402 if datatype == 0:
403 403 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
404 404 elif datatype == 1:
405 405 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
406 406 elif datatype == 2:
407 407 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
408 408 elif datatype == 3:
409 409 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
410 410 elif datatype == 4:
411 411 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
412 412 elif datatype == 5:
413 413 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
414 414 else:
415 415 raise ValueError, 'Data type was not defined'
416 416
417 417 self.dtype = datatype_str
418 418 self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
419 419 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
420 420 # self.dataOutObj.channelList = numpy.arange(self.systemHeaderObj.numChannels)
421 421 # self.dataOutObj.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
422 422 self.getBlockDimension()
423 423
424 424
425 425 def __verifyFile(self, filename, msgFlag=True):
426 426 msg = None
427 427 try:
428 428 fp = open(filename, 'rb')
429 429 currentPosition = fp.tell()
430 430 except:
431 431 if msgFlag:
432 432 print "The file %s can't be opened" % (filename)
433 433 return False
434 434
435 435 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
436 436
437 437 if neededSize == 0:
438 438 basicHeaderObj = BasicHeader()
439 439 systemHeaderObj = SystemHeader()
440 440 radarControllerHeaderObj = RadarControllerHeader()
441 441 processingHeaderObj = ProcessingHeader()
442 442
443 443 try:
444 444 if not( basicHeaderObj.read(fp) ): raise ValueError
445 445 if not( systemHeaderObj.read(fp) ): raise ValueError
446 446 if not( radarControllerHeaderObj.read(fp) ): raise ValueError
447 447 if not( processingHeaderObj.read(fp) ): raise ValueError
448 448 data_type = int(numpy.log2((processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
449 449
450 450 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
451 451
452 452 except:
453 453 if msgFlag:
454 454 print "\tThe file %s is empty or it hasn't enough data" % filename
455 455
456 456 fp.close()
457 457 return False
458 458 else:
459 459 msg = "\tSkipping the file %s due to it hasn't enough data" %filename
460 460
461 461 fp.close()
462 462 fileSize = os.path.getsize(filename)
463 463 currentSize = fileSize - currentPosition
464 464 if currentSize < neededSize:
465 465 if msgFlag and (msg != None):
466 466 print msg #print"\tSkipping the file %s due to it hasn't enough data" %filename
467 467 return False
468 468
469 469 return True
470 470
471 471 def getData():
472 472 pass
473 473
474 474 def hasNotDataInBuffer():
475 475 pass
476 476
477 477 def readBlock():
478 478 pass
479 479
480 480 class JRODataWriter(JRODataIO):
481 481
482 482 """
483 483 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
484 484 de los datos siempre se realiza por bloques.
485 485 """
486 486
487 487 blockIndex = 0
488 488
489 489 path = None
490 490
491 491 setFile = None
492 492
493 493 profilesPerBlock = None
494 494
495 495 blocksPerFile = None
496 496
497 497 nWriteBlocks = 0
498 498
499 499 def __init__(self, dataOutObj=None):
500 500 raise ValueError, "Not implemented"
501 501
502 502
503 503 def hasAllDataInBuffer(self):
504 504 raise ValueError, "Not implemented"
505 505
506 506
507 507 def setBlockDimension(self):
508 508 raise ValueError, "Not implemented"
509 509
510 510
511 511 def writeBlock(self):
512 512 raise ValueError, "No implemented"
513 513
514 514
515 515 def putData(self):
516 516 raise ValueError, "No implemented"
517 517
518 518
519 519 def __writeFirstHeader(self):
520 520 """
521 521 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
522 522
523 523 Affected:
524 524 __dataType
525 525
526 526 Return:
527 527 None
528 528 """
529 529
530 530 # CALCULAR PARAMETROS
531 531
532 532 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
533 533 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
534 534
535 535 self.__writeBasicHeader()
536 536 self.__wrSystemHeader()
537 537 self.__wrRadarControllerHeader()
538 538 self.__wrProcessingHeader()
539 539 self.dtype = self.dataOutObj.dtype
540 540
541 541
542 542 def __writeBasicHeader(self, fp=None):
543 543 """
544 544 Escribe solo el Basic header en el file creado
545 545
546 546 Return:
547 547 None
548 548 """
549 549 if fp == None:
550 550 fp = self.fp
551 551
552 552 self.basicHeaderObj.write(fp)
553 553
554 554
555 555 def __wrSystemHeader(self, fp=None):
556 556 """
557 557 Escribe solo el System header en el file creado
558 558
559 559 Return:
560 560 None
561 561 """
562 562 if fp == None:
563 563 fp = self.fp
564 564
565 565 self.systemHeaderObj.write(fp)
566 566
567 567
568 568 def __wrRadarControllerHeader(self, fp=None):
569 569 """
570 570 Escribe solo el RadarController header en el file creado
571 571
572 572 Return:
573 573 None
574 574 """
575 575 if fp == None:
576 576 fp = self.fp
577 577
578 578 self.radarControllerHeaderObj.write(fp)
579 579
580 580
581 581 def __wrProcessingHeader(self, fp=None):
582 582 """
583 583 Escribe solo el Processing header en el file creado
584 584
585 585 Return:
586 586 None
587 587 """
588 588 if fp == None:
589 589 fp = self.fp
590 590
591 591 self.processingHeaderObj.write(fp)
592 592
593 593
594 594 def setNextFile(self):
595 595 """
596 596 Determina el siguiente file que sera escrito
597 597
598 598 Affected:
599 599 self.filename
600 600 self.subfolder
601 601 self.fp
602 602 self.setFile
603 603 self.flagIsNewFile
604 604
605 605 Return:
606 606 0 : Si el archivo no puede ser escrito
607 607 1 : Si el archivo esta listo para ser escrito
608 608 """
609 609 ext = self.ext
610 610 path = self.path
611 611
612 612 if self.fp != None:
613 613 self.fp.close()
614 614
615 615 timeTuple = time.localtime( self.dataOutObj.dataUtcTime)
616 616 subfolder = 'D%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
617 617
618 618 doypath = os.path.join( path, subfolder )
619 619 if not( os.path.exists(doypath) ):
620 620 os.mkdir(doypath)
621 621 self.setFile = -1 #inicializo mi contador de seteo
622 622 else:
623 623 filesList = os.listdir( doypath )
624 624 if len( filesList ) > 0:
625 625 filesList = sorted( filesList, key=str.lower )
626 626 filen = filesList[-1]
627 627 # el filename debera tener el siguiente formato
628 628 # 0 1234 567 89A BCDE (hex)
629 629 # x YYYY DDD SSS .ext
630 630 if isNumber( filen[8:11] ):
631 631 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
632 632 else:
633 633 self.setFile = -1
634 634 else:
635 635 self.setFile = -1 #inicializo mi contador de seteo
636 636
637 637 setFile = self.setFile
638 638 setFile += 1
639 639
640 640 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
641 641 timeTuple.tm_year,
642 642 timeTuple.tm_yday,
643 643 setFile,
644 644 ext )
645 645
646 646 filename = os.path.join( path, subfolder, file )
647 647
648 648 fp = open( filename,'wb' )
649 649
650 650 self.blockIndex = 0
651 651
652 652 #guardando atributos
653 653 self.filename = filename
654 654 self.subfolder = subfolder
655 655 self.fp = fp
656 656 self.setFile = setFile
657 657 self.flagIsNewFile = 1
658 658
659 659 self.getDataHeader()
660 660
661 661 print 'Writing the file: %s'%self.filename
662 662
663 663 self.__writeFirstHeader()
664 664
665 665 return 1
666 666
667 667
668 668 def __setNewBlock(self):
669 669 """
670 670 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
671 671
672 672 Return:
673 673 0 : si no pudo escribir nada
674 674 1 : Si escribio el Basic el First Header
675 675 """
676 676 if self.fp == None:
677 677 self.setNextFile()
678 678
679 679 if self.flagIsNewFile:
680 680 return 1
681 681
682 682 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
683 683 self.__writeBasicHeader()
684 684 return 1
685 685
686 686 if not( self.setNextFile() ):
687 687 return 0
688 688
689 689 return 1
690 690
691 691
692 692 def writeNextBlock(self):
693 693 """
694 694 Selecciona el bloque siguiente de datos y los escribe en un file
695 695
696 696 Return:
697 697 0 : Si no hizo pudo escribir el bloque de datos
698 698 1 : Si no pudo escribir el bloque de datos
699 699 """
700 700 if not( self.__setNewBlock() ):
701 701 return 0
702 702
703 703 self.writeBlock()
704 704
705 705 return 1
706 706
707 707
708 708 def getDataHeader(self):
709 709 """Obtiene una copia del First Header Affected: self.basicHeaderObj self.
710 710 systemHeaderObj self.radarControllerHeaderObj self.processingHeaderObj self.
711 711 dtype Return: None
712 712 """
713 713
714 714 raise ValueError, "No implemented"
715 715
716 716 def setup(self, path, blocksPerFile, profilesPerBlock=None, set=0, ext=None):
717 717 """
718 718 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
719 719
720 720 Inputs:
721 721 path : el path destino en el cual se escribiran los files a crear
722 722 format : formato en el cual sera salvado un file
723 723 set : el setebo del file
724 724
725 725 Return:
726 726 0 : Si no realizo un buen seteo
727 727 1 : Si realizo un buen seteo
728 728 """
729 729
730 730 if ext == None:
731 731 ext = self.ext
732 732
733 733 ext = ext.lower()
734 734
735 735 self.ext = ext
736 736
737 737 self.path = path
738 738
739 739 self.setFile = set - 1
740 740
741 741 self.blocksPerFile = blocksPerFile
742 742
743 743 self.profilesPerBlock = profilesPerBlock
744 744
745 745 if not(self.setNextFile()):
746 746 print "There isn't a next file"
747 747 return 0
748 748
749 749 self.setBlockDimension()
750 750
751 751 return 1
752 752
753 753
754 754
755 755
756 756
757 757
1 NO CONTENT: file renamed from schainpy2/IO/JROHeader.py to schainpy2/IO/JROHeaderIO.py
@@ -1,775 +1,777
1 1 '''
2 2
3 3 $Author$
4 4 $Id$
5 5 '''
6 6
7 7 import os, sys
8 8 import numpy
9 9 import glob
10 10 import fnmatch
11 11 import time, datetime
12 12
13 13 path = os.path.split(os.getcwd())[0]
14 14 sys.path.append(path)
15 15
16 from IO.JROHeader import *
17 from Data.Spectra import Spectra
18
16 from JROHeaderIO import *
19 17 from JRODataIO import JRODataReader
20 18 from JRODataIO import JRODataWriter
21 from JRODataIO import isNumber
22 19
20 from Data.JROData import Spectra
23 21
24 22 class SpectraReader(JRODataReader):
25 23 """
26 24 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
27 25 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
28 26 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
29 27
30 28 paresCanalesIguales * alturas * perfiles (Self Spectra)
31 29 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
32 30 canales * alturas (DC Channels)
33 31
34 32 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
35 33 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
36 34 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
37 35 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
38 36
39 37 Example:
40 38 dpath = "/home/myuser/data"
41 39
42 40 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
43 41
44 42 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
45 43
46 44 readerObj = SpectraReader()
47 45
48 46 readerObj.setup(dpath, startTime, endTime)
49 47
50 48 while(True):
51 49
52 50 readerObj.getData()
53 51
54 print readerObj.dataOutObj.data
52 print readerObj.data_spc
53
54 print readerObj.data_cspc
55
56 print readerObj.data_dc
55 57
56 58 if readerObj.flagNoMoreFiles:
57 59 break
58 60
59 61 """
60 62
61 63 pts2read_SelfSpectra = 0
62 64
63 65 pts2read_CrossSpectra = 0
64 66
65 67 pts2read_DCchannels = 0
66 68
67 69 ext = ".pdata"
68 70
69 71 optchar = "P"
70 72
71 73 dataOutObj = None
72 74
73 75 nRdChannels = None
74 76
75 77 nRdPairs = None
76 78
77 79 rdPairList = []
78 80
79 81
80 82 def __init__(self, dataOutObj=None):
81 83 """
82 84 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
83 85
84 86 Inputs:
85 87 dataOutObj : Objeto de la clase Spectra. Este objeto sera utilizado para
86 88 almacenar un perfil de datos cada vez que se haga un requerimiento
87 89 (getData). El perfil sera obtenido a partir del buffer de datos,
88 90 si el buffer esta vacio se hara un nuevo proceso de lectura de un
89 91 bloque de datos.
90 92 Si este parametro no es pasado se creara uno internamente.
91 93
92 94 Affected:
93 95 self.dataOutObj
94 96
95 97 Return : None
96 98 """
97 99
98 100 self.pts2read_SelfSpectra = 0
99 101
100 102 self.pts2read_CrossSpectra = 0
101 103
102 104 self.pts2read_DCchannels = 0
103 105
104 106 self.datablock = None
105 107
106 108 self.utc = None
107 109
108 110 self.ext = ".pdata"
109 111
110 112 self.optchar = "P"
111 113
112 114 self.basicHeaderObj = BasicHeader()
113 115
114 116 self.systemHeaderObj = SystemHeader()
115 117
116 118 self.radarControllerHeaderObj = RadarControllerHeader()
117 119
118 120 self.processingHeaderObj = ProcessingHeader()
119 121
120 122 self.online = 0
121 123
122 124 self.fp = None
123 125
124 126 self.idFile = None
125 127
126 128 self.dtype = None
127 129
128 130 self.fileSizeByHeader = None
129 131
130 132 self.filenameList = []
131 133
132 134 self.filename = None
133 135
134 136 self.fileSize = None
135 137
136 138 self.firstHeaderSize = 0
137 139
138 140 self.basicHeaderSize = 24
139 141
140 142 self.pathList = []
141 143
142 144 self.lastUTTime = 0
143 145
144 146 self.maxTimeStep = 30
145 147
146 148 self.flagNoMoreFiles = 0
147 149
148 150 self.set = 0
149 151
150 152 self.path = None
151 153
152 154 self.delay = 3 #seconds
153 155
154 156 self.nTries = 3 #quantity tries
155 157
156 158 self.nFiles = 3 #number of files for searching
157 159
158 160 self.nReadBlocks = 0
159 161
160 162 self.flagIsNewFile = 1
161 163
162 164 self.ippSeconds = 0
163 165
164 166 self.flagTimeBlock = 0
165 167
166 168 self.flagIsNewBlock = 0
167 169
168 170 self.nTotalBlocks = 0
169 171
170 172 self.blocksize = 0
171 173
172 174
173 175 def createObjByDefault(self):
174 176
175 177 dataObj = Spectra()
176 178
177 179 return dataObj
178 180
179 181 def __hasNotDataInBuffer(self):
180 182 return 1
181 183
182 184
183 185 def getBlockDimension(self):
184 186 """
185 187 Obtiene la cantidad de puntos a leer por cada bloque de datos
186 188
187 189 Affected:
188 190 self.nRdChannels
189 191 self.nRdPairs
190 192 self.pts2read_SelfSpectra
191 193 self.pts2read_CrossSpectra
192 194 self.pts2read_DCchannels
193 195 self.blocksize
194 196 self.dataOutObj.nChannels
195 197 self.dataOutObj.nPairs
196 198
197 199 Return:
198 200 None
199 201 """
200 202 self.nRdChannels = 0
201 203 self.nRdPairs = 0
202 204 self.rdPairList = []
203 205
204 206 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
205 207 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
206 208 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
207 209 else:
208 210 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
209 211 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
210 212
211 213 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
212 214
213 215 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
214 216 self.blocksize = self.pts2read_SelfSpectra
215 217
216 218 if self.processingHeaderObj.flag_cspc:
217 219 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
218 220 self.blocksize += self.pts2read_CrossSpectra
219 221
220 222 if self.processingHeaderObj.flag_dc:
221 223 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
222 224 self.blocksize += self.pts2read_DCchannels
223 225
224 226 # self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels
225 227
226 228
227 229 def readBlock(self):
228 230 """
229 231 Lee el bloque de datos desde la posicion actual del puntero del archivo
230 232 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
231 233 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
232 234 es seteado a 0
233 235
234 236 Return: None
235 237
236 238 Variables afectadas:
237 239
238 240 self.flagIsNewFile
239 241 self.flagIsNewBlock
240 242 self.nTotalBlocks
241 243 self.data_spc
242 244 self.data_cspc
243 245 self.data_dc
244 246
245 247 Exceptions:
246 248 Si un bloque leido no es un bloque valido
247 249 """
248 250 blockOk_flag = False
249 251 fpointer = self.fp.tell()
250 252
251 253 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
252 254 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
253 255
254 256 if self.processingHeaderObj.flag_cspc:
255 257 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
256 258 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
257 259
258 260 if self.processingHeaderObj.flag_dc:
259 261 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
260 262 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
261 263
262 264
263 265 if not(self.processingHeaderObj.shif_fft):
264 266 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
265 267
266 268 if self.processingHeaderObj.flag_cspc:
267 269 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
268 270
269 271
270 272 spc = numpy.transpose( spc, (0,2,1) )
271 273 self.data_spc = spc
272 274
273 275 if self.processingHeaderObj.flag_cspc:
274 276 cspc = numpy.transpose( cspc, (0,2,1) )
275 277 self.data_cspc = cspc['real'] + cspc['imag']*1j
276 278 else:
277 279 self.data_cspc = None
278 280
279 281 if self.processingHeaderObj.flag_dc:
280 282 self.data_dc = dc['real'] + dc['imag']*1j
281 283 else:
282 284 self.data_dc = None
283 285
284 286 self.flagIsNewFile = 0
285 287 self.flagIsNewBlock = 1
286 288
287 289 self.nTotalBlocks += 1
288 290 self.nReadBlocks += 1
289 291
290 292 return 1
291 293
292 294
293 295 def getData(self):
294 296 """
295 297 Copia el buffer de lectura a la clase "Spectra",
296 298 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
297 299 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
298 300
299 301 Return:
300 302 0 : Si no hay mas archivos disponibles
301 303 1 : Si hizo una buena copia del buffer
302 304
303 305 Affected:
304 306 self.dataOutObj
305 307
306 308 self.flagTimeBlock
307 309 self.flagIsNewBlock
308 310 """
309 311
310 312 if self.flagNoMoreFiles: return 0
311 313
312 314 self.flagTimeBlock = 0
313 315 self.flagIsNewBlock = 0
314 316
315 317 if self.__hasNotDataInBuffer():
316 318
317 319 if not( self.readNextBlock() ):
318 320 return 0
319 321
320 322 # self.updateDataHeader()
321 323
322 324 if self.flagNoMoreFiles == 1:
323 325 print 'Process finished'
324 326 return 0
325 327
326 328 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
327 329
328 330 if self.data_dc == None:
329 331 self.dataOutObj.flagNoData = True
330 332 return 0
331 333
332 334
333 335 self.dataOutObj.data_spc = self.data_spc
334 336
335 337 self.dataOutObj.data_cspc = self.data_cspc
336 338
337 339 self.dataOutObj.data_dc = self.data_dc
338 340
339 341 self.dataOutObj.flagTimeBlock = self.flagTimeBlock
340 342
341 343 self.dataOutObj.flagNoData = False
342 344
343 345 self.dataOutObj.dtype = self.dtype
344 346
345 347 self.dataOutObj.nChannels = self.nRdChannels
346 348
347 349 self.dataOutObj.nPairs = self.nRdPairs
348 350
349 351 self.dataOutObj.pairsList = self.rdPairList
350 352
351 353 self.dataOutObj.nHeights = self.processingHeaderObj.nHeights
352 354
353 355 self.dataOutObj.nProfiles = self.processingHeaderObj.profilesPerBlock
354 356
355 357 self.dataOutObj.nFFTPoints = self.processingHeaderObj.profilesPerBlock
356 358
357 359 self.dataOutObj.nIncohInt = self.processingHeaderObj.nIncohInt
358 360
359 361
360 362 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
361 363
362 364 self.dataOutObj.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
363 365
364 366 self.dataOutObj.channelList = range(self.systemHeaderObj.nChannels)
365 367
366 368 self.dataOutObj.channelIndexList = range(self.systemHeaderObj.nChannels)
367 369
368 370 self.dataOutObj.dataUtcTime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000.#+ self.profileIndex * self.ippSeconds
369 371
370 372 self.dataOutObj.flagShiftFFT = self.processingHeaderObj.shif_fft
371 373
372 374 # self.profileIndex += 1
373 375
374 376 self.dataOutObj.systemHeaderObj = self.systemHeaderObj.copy()
375 377
376 378 self.dataOutObj.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
377 379
378 return 1
380 return self.data_spc
379 381
380 382
381 383 class SpectraWriter(JRODataWriter):
382 384
383 385 """
384 386 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
385 387 de los datos siempre se realiza por bloques.
386 388 """
387 389
388 390 ext = ".pdata"
389 391
390 392 optchar = "P"
391 393
392 394 shape_spc_Buffer = None
393 395
394 396 shape_cspc_Buffer = None
395 397
396 398 shape_dc_Buffer = None
397 399
398 400 data_spc = None
399 401
400 402 data_cspc = None
401 403
402 404 data_dc = None
403 405
404 406 wrPairList = []
405 407
406 408 nWrPairs = 0
407 409
408 410 nWrChannels = 0
409 411
410 412 # dataOutObj = None
411 413
412 414 def __init__(self, dataOutObj=None):
413 415 """
414 416 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
415 417
416 418 Affected:
417 419 self.dataOutObj
418 420 self.basicHeaderObj
419 421 self.systemHeaderObj
420 422 self.radarControllerHeaderObj
421 423 self.processingHeaderObj
422 424
423 425 Return: None
424 426 """
425 427 if dataOutObj == None:
426 428 dataOutObj = Spectra()
427 429
428 430 if not( isinstance(dataOutObj, Spectra) ):
429 431 raise ValueError, "in SpectraReader, dataOutObj must be an Spectra class object"
430 432
431 433 self.dataOutObj = dataOutObj
432 434
433 435 self.nTotalBlocks = 0
434 436
435 437 self.nWrChannels = self.dataOutObj.nChannels
436 438
437 439 # if len(pairList) > 0:
438 440 # self.wrPairList = pairList
439 441 #
440 442 # self.nWrPairs = len(pairList)
441 443
442 444 self.wrPairList = self.dataOutObj.pairsList
443 445
444 446 self.nWrPairs = self.dataOutObj.nPairs
445 447
446 448
447 449
448 450
449 451
450 452 # self.data_spc = None
451 453 # self.data_cspc = None
452 454 # self.data_dc = None
453 455
454 456 # self.fp = None
455 457
456 458 # self.flagIsNewFile = 1
457 459 #
458 460 # self.nTotalBlocks = 0
459 461 #
460 462 # self.flagIsNewBlock = 0
461 463 #
462 464 # self.flagNoMoreFiles = 0
463 465 #
464 466 # self.setFile = None
465 467 #
466 468 # self.dtype = None
467 469 #
468 470 # self.path = None
469 471 #
470 472 # self.noMoreFiles = 0
471 473 #
472 474 # self.filename = None
473 475 #
474 476 # self.basicHeaderObj = BasicHeader()
475 477 #
476 478 # self.systemHeaderObj = SystemHeader()
477 479 #
478 480 # self.radarControllerHeaderObj = RadarControllerHeader()
479 481 #
480 482 # self.processingHeaderObj = ProcessingHeader()
481 483
482 484
483 485 def hasAllDataInBuffer(self):
484 486 return 1
485 487
486 488
487 489 def setBlockDimension(self):
488 490 """
489 491 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
490 492
491 493 Affected:
492 494 self.shape_spc_Buffer
493 495 self.shape_cspc_Buffer
494 496 self.shape_dc_Buffer
495 497
496 498 Return: None
497 499 """
498 500 self.shape_spc_Buffer = (self.dataOutObj.nChannels,
499 501 self.processingHeaderObj.nHeights,
500 502 self.processingHeaderObj.profilesPerBlock)
501 503
502 504 self.shape_cspc_Buffer = (self.dataOutObj.nPairs,
503 505 self.processingHeaderObj.nHeights,
504 506 self.processingHeaderObj.profilesPerBlock)
505 507
506 508 self.shape_dc_Buffer = (self.dataOutObj.nChannels,
507 509 self.processingHeaderObj.nHeights)
508 510
509 511
510 512 def writeBlock(self):
511 513 """
512 514 Escribe el buffer en el file designado
513 515
514 516 Affected:
515 517 self.data_spc
516 518 self.data_cspc
517 519 self.data_dc
518 520 self.flagIsNewFile
519 521 self.flagIsNewBlock
520 522 self.nTotalBlocks
521 523 self.nWriteBlocks
522 524
523 525 Return: None
524 526 """
525 527
526 528 spc = numpy.transpose( self.data_spc, (0,2,1) )
527 529 if not( self.processingHeaderObj.shif_fft ):
528 530 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
529 531 data = spc.reshape((-1))
530 532 data.tofile(self.fp)
531 533
532 534 if self.data_cspc != None:
533 535 data = numpy.zeros( self.shape_cspc_Buffer, self.dtype )
534 536 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
535 537 if not( self.processingHeaderObj.shif_fft ):
536 538 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
537 539 data['real'] = cspc.real
538 540 data['imag'] = cspc.imag
539 541 data = data.reshape((-1))
540 542 data.tofile(self.fp)
541 543
542 544 if self.data_dc != None:
543 545 data = numpy.zeros( self.shape_dc_Buffer, self.dtype )
544 546 dc = self.data_dc
545 547 data['real'] = dc.real
546 548 data['imag'] = dc.imag
547 549 data = data.reshape((-1))
548 550 data.tofile(self.fp)
549 551
550 552 self.data_spc.fill(0)
551 553 self.data_dc.fill(0)
552 554 if self.data_cspc != None:
553 555 self.data_cspc.fill(0)
554 556
555 557 self.flagIsNewFile = 0
556 558 self.flagIsNewBlock = 1
557 559 self.nTotalBlocks += 1
558 560 self.nWriteBlocks += 1
559 561 self.blockIndex += 1
560 562
561 563
562 564 def putData(self):
563 565 """
564 566 Setea un bloque de datos y luego los escribe en un file
565 567
566 568 Affected:
567 569 self.data_spc
568 570 self.data_cspc
569 571 self.data_dc
570 572
571 573 Return:
572 574 0 : Si no hay data o no hay mas files que puedan escribirse
573 575 1 : Si se escribio la data de un bloque en un file
574 576 """
575 577 self.flagIsNewBlock = 0
576 578
577 579 if self.dataOutObj.flagNoData:
578 580 return 0
579 581
580 582 if self.dataOutObj.flagTimeBlock:
581 583 self.data_spc.fill(0)
582 584 self.data_cspc.fill(0)
583 585 self.data_dc.fill(0)
584 586 self.setNextFile()
585 587
586 588 if self.flagIsNewFile == 0:
587 589 self.getBasicHeader()
588 590
589 591 self.data_spc = self.dataOutObj.data_spc
590 592 self.data_cspc = self.dataOutObj.data_cspc
591 593 self.data_dc = self.dataOutObj.data_dc
592 594
593 595 # #self.processingHeaderObj.dataBlocksPerFile)
594 596 if self.hasAllDataInBuffer():
595 597 # self.getDataHeader()
596 598 self.writeNextBlock()
597 599
598 600 if self.flagNoMoreFiles:
599 601 #print 'Process finished'
600 602 return 0
601 603
602 604 return 1
603 605
604 606
605 607 def __getProcessFlags(self):
606 608
607 609 processFlags = 0
608 610
609 611 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
610 612 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
611 613 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
612 614 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
613 615 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
614 616 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
615 617
616 618 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
617 619
618 620
619 621
620 622 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
621 623 PROCFLAG.DATATYPE_SHORT,
622 624 PROCFLAG.DATATYPE_LONG,
623 625 PROCFLAG.DATATYPE_INT64,
624 626 PROCFLAG.DATATYPE_FLOAT,
625 627 PROCFLAG.DATATYPE_DOUBLE]
626 628
627 629
628 630 for index in range(len(dtypeList)):
629 631 if self.dataOutObj.dtype == dtypeList[index]:
630 632 dtypeValue = datatypeValueList[index]
631 633 break
632 634
633 635 processFlags += dtypeValue
634 636
635 637 if self.dataOutObj.flagDecodeData:
636 638 processFlags += PROCFLAG.DECODE_DATA
637 639
638 640 if self.dataOutObj.flagDeflipData:
639 641 processFlags += PROCFLAG.DEFLIP_DATA
640 642
641 643 if self.dataOutObj.code != None:
642 644 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
643 645
644 646 if self.dataOutObj.nIncohInt > 1:
645 647 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
646 648
647 649 if self.dataOutObj.data_dc != None:
648 650 processFlags += PROCFLAG.SAVE_CHANNELS_DC
649 651
650 652 return processFlags
651 653
652 654
653 655 def __getBlockSize(self):
654 656 '''
655 657 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
656 658 '''
657 659
658 660 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
659 661 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
660 662 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
661 663 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
662 664 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
663 665 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
664 666
665 667 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
666 668 datatypeValueList = [1,2,4,8,4,8]
667 669 for index in range(len(dtypeList)):
668 670 if self.dataOutObj.dtype == dtypeList[index]:
669 671 datatypeValue = datatypeValueList[index]
670 672 break
671 673
672 674
673 675 pts2write = self.dataOutObj.nHeights * self.dataOutObj.nFFTPoints
674 676
675 677 pts2write_SelfSpectra = int(self.nWrChannels * pts2write)
676 678 blocksize = (pts2write_SelfSpectra*datatypeValue)
677 679
678 680 if self.dataOutObj.data_cspc != None:
679 681 pts2write_CrossSpectra = int(self.nWrPairs * pts2write)
680 682 blocksize += (pts2write_CrossSpectra*datatypeValue*2)
681 683
682 684 if self.dataOutObj.data_dc != None:
683 685 pts2write_DCchannels = int(self.nWrChannels * self.dataOutObj.nHeights)
684 686 blocksize += (pts2write_DCchannels*datatypeValue*2)
685 687
686 688 blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
687 689
688 690 return blocksize
689 691
690 692
691 693 def getBasicHeader(self):
692 694 self.basicHeaderObj.size = self.basicHeaderSize #bytes
693 695 self.basicHeaderObj.version = self.versionFile
694 696 self.basicHeaderObj.dataBlock = self.nTotalBlocks
695 697
696 698 utc = numpy.floor(self.dataOutObj.dataUtcTime)
697 699 milisecond = (self.dataOutObj.dataUtcTime - utc)* 1000.0
698 700
699 701 self.basicHeaderObj.utc = utc
700 702 self.basicHeaderObj.miliSecond = milisecond
701 703 self.basicHeaderObj.timeZone = 0
702 704 self.basicHeaderObj.dstFlag = 0
703 705 self.basicHeaderObj.errorCount = 0
704 706
705 707 def getDataHeader(self):
706 708
707 709 """
708 710 Obtiene una copia del First Header
709 711
710 712 Affected:
711 713 self.systemHeaderObj
712 714 self.radarControllerHeaderObj
713 715 self.dtype
714 716
715 717 Return:
716 718 None
717 719 """
718 720
719 721 self.systemHeaderObj = self.dataOutObj.systemHeaderObj.copy()
720 722 self.systemHeaderObj.nChannels = self.dataOutObj.nChannels
721 723 self.radarControllerHeaderObj = self.dataOutObj.radarControllerHeaderObj.copy()
722 724
723 725 self.getBasicHeader()
724 726
725 727 processingHeaderSize = 40 # bytes
726 728 self.processingHeaderObj.dtype = 0 # Voltage
727 729 self.processingHeaderObj.blockSize = self.__getBlockSize()
728 730 self.processingHeaderObj.profilesPerBlock = self.dataOutObj.nFFTPoints
729 731 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
730 732 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOutObj.processingHeaderObj.nWindows
731 733 self.processingHeaderObj.processFlags = self.__getProcessFlags()
732 734 self.processingHeaderObj.nCohInt = 1# Cuando la data de origen es de tipo Spectra
733 735 self.processingHeaderObj.nIncohInt = self.dataOutObj.nIncohInt
734 736 self.processingHeaderObj.totalSpectra = self.dataOutObj.nPairs + self.dataOutObj.nChannels
735 737
736 738 if self.processingHeaderObj.totalSpectra > 0:
737 739 channelList = []
738 740 for channel in range(self.dataOutObj.nChannels):
739 741 channelList.append(channel)
740 742 channelList.append(channel)
741 743
742 744 pairsList = []
743 745 for pair in self.dataOutObj.pairsList:
744 746 pairsList.append(pair[0])
745 747 pairsList.append(pair[1])
746 748 spectraComb = channelList + pairsList
747 749 spectraComb = numpy.array(spectraComb,dtype="u1")
748 750 self.processingHeaderObj.spectraComb = spectraComb
749 751 sizeOfSpcComb = len(spectraComb)
750 752 processingHeaderSize += sizeOfSpcComb
751 753
752 754 if self.dataOutObj.code != None:
753 755 self.processingHeaderObj.code = self.dataOutObj.code
754 756 self.processingHeaderObj.nCode = self.dataOutObj.nCode
755 757 self.processingHeaderObj.nBaud = self.dataOutObj.nBaud
756 758 nCodeSize = 4 # bytes
757 759 nBaudSize = 4 # bytes
758 760 codeSize = 4 # bytes
759 761 sizeOfCode = int(nCodeSize + nBaudSize + codeSize * self.dataOutObj.nCode * self.dataOutObj.nBaud)
760 762 processingHeaderSize += sizeOfCode
761 763
762 764 if self.processingHeaderObj.nWindows != 0:
763 765 self.processingHeaderObj.firstHeight = self.dataOutObj.heightList[0]
764 766 self.processingHeaderObj.deltaHeight = self.dataOutObj.heightList[1] - self.dataOutObj.heightList[0]
765 767 self.processingHeaderObj.nHeights = self.dataOutObj.nHeights
766 768 self.processingHeaderObj.samplesWin = self.dataOutObj.nHeights
767 769 sizeOfFirstHeight = 4
768 770 sizeOfdeltaHeight = 4
769 771 sizeOfnHeights = 4
770 772 sizeOfWindows = (sizeOfFirstHeight + sizeOfdeltaHeight + sizeOfnHeights)*self.processingHeaderObj.nWindows
771 773 processingHeaderSize += sizeOfWindows
772 774
773 775 self.processingHeaderObj.size = processingHeaderSize
774 776
775 777 No newline at end of file
@@ -1,584 +1,584
1 1 '''
2 2
3 3 $Author$
4 4 $Id$
5 5 '''
6 6
7 7 import os, sys
8 8 import numpy
9 9 import glob
10 10 import fnmatch
11 11 import time, datetime
12 12
13 13 path = os.path.split(os.getcwd())[0]
14 14 sys.path.append(path)
15 15
16 from JROHeader import *
16 from JROHeaderIO import *
17 17 from JRODataIO import JRODataReader
18 18 from JRODataIO import JRODataWriter
19 19
20 from Data.Voltage import Voltage
20 from Data.JROData import Voltage
21 21
22 22 class VoltageReader(JRODataReader):
23 23 """
24 24 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
25 25 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
26 26 perfiles*alturas*canales) son almacenados en la variable "buffer".
27 27
28 28 perfiles * alturas * canales
29 29
30 30 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
31 31 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
32 32 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
33 33 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
34 34
35 35 Example:
36 36
37 37 dpath = "/home/myuser/data"
38 38
39 39 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
40 40
41 41 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
42 42
43 43 readerObj = VoltageReader()
44 44
45 45 readerObj.setup(dpath, startTime, endTime)
46 46
47 47 while(True):
48 48
49 49 #to get one profile
50 50 profile = readerObj.getData()
51 51
52 52 #print the profile
53 53 print profile
54 54
55 55 #If you want to see all datablock
56 56 print readerObj.datablock
57 57
58 58 if readerObj.flagNoMoreFiles:
59 59 break
60 60
61 61 """
62 62
63 63 ext = ".r"
64 64
65 65 optchar = "D"
66 66 dataOutObj = None
67 67
68 68
69 69 def __init__(self, dataOutObj=None):
70 70 """
71 71 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
72 72
73 73 Input:
74 74 dataOutObj : Objeto de la clase Voltage. Este objeto sera utilizado para
75 75 almacenar un perfil de datos cada vez que se haga un requerimiento
76 76 (getData). El perfil sera obtenido a partir del buffer de datos,
77 77 si el buffer esta vacio se hara un nuevo proceso de lectura de un
78 78 bloque de datos.
79 79 Si este parametro no es pasado se creara uno internamente.
80 80
81 81 Variables afectadas:
82 82 self.dataOutObj
83 83
84 84 Return:
85 85 None
86 86 """
87 87
88 88 self.datablock = None
89 89
90 90 self.utc = 0
91 91
92 92 self.ext = ".r"
93 93
94 94 self.optchar = "D"
95 95
96 96 self.basicHeaderObj = BasicHeader()
97 97
98 98 self.systemHeaderObj = SystemHeader()
99 99
100 100 self.radarControllerHeaderObj = RadarControllerHeader()
101 101
102 102 self.processingHeaderObj = ProcessingHeader()
103 103
104 104 self.online = 0
105 105
106 106 self.fp = None
107 107
108 108 self.idFile = None
109 109
110 110 self.dtype = None
111 111
112 112 self.fileSizeByHeader = None
113 113
114 114 self.filenameList = []
115 115
116 116 self.filename = None
117 117
118 118 self.fileSize = None
119 119
120 120 self.firstHeaderSize = 0
121 121
122 122 self.basicHeaderSize = 24
123 123
124 124 self.pathList = []
125 125
126 126 self.filenameList = []
127 127
128 128 self.lastUTTime = 0
129 129
130 130 self.maxTimeStep = 30
131 131
132 132 self.flagNoMoreFiles = 0
133 133
134 134 self.set = 0
135 135
136 136 self.path = None
137 137
138 138 self.profileIndex = 9999
139 139
140 140 self.delay = 3 #seconds
141 141
142 142 self.nTries = 3 #quantity tries
143 143
144 144 self.nFiles = 3 #number of files for searching
145 145
146 146 self.nReadBlocks = 0
147 147
148 148 self.flagIsNewFile = 1
149 149
150 150 self.ippSeconds = 0
151 151
152 152 self.flagTimeBlock = 0
153 153
154 154 self.flagIsNewBlock = 0
155 155
156 156 self.nTotalBlocks = 0
157 157
158 158 self.blocksize = 0
159 159
160 160 def createObjByDefault(self):
161 161
162 162 dataObj = Voltage()
163 163
164 164 return dataObj
165 165
166 166 def __hasNotDataInBuffer(self):
167 167 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
168 168 return 1
169 169 return 0
170 170
171 171
172 172 def getBlockDimension(self):
173 173 """
174 174 Obtiene la cantidad de puntos a leer por cada bloque de datos
175 175
176 176 Affected:
177 177 self.blocksize
178 178
179 179 Return:
180 180 None
181 181 """
182 182 pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
183 183 self.blocksize = pts2read
184 184
185 185
186 186 def readBlock(self):
187 187 """
188 188 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
189 189 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
190 190 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
191 191 es seteado a 0
192 192
193 193 Inputs:
194 194 None
195 195
196 196 Return:
197 197 None
198 198
199 199 Affected:
200 200 self.profileIndex
201 201 self.datablock
202 202 self.flagIsNewFile
203 203 self.flagIsNewBlock
204 204 self.nTotalBlocks
205 205
206 206 Exceptions:
207 207 Si un bloque leido no es un bloque valido
208 208 """
209 209
210 210 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
211 211
212 212 try:
213 213 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
214 214 except:
215 215 print "The read block (%3d) has not enough data" %self.nReadBlocks
216 216 return 0
217 217
218 218 junk = numpy.transpose(junk, (2,0,1))
219 219 self.datablock = junk['real'] + junk['imag']*1j
220 220
221 221 self.profileIndex = 0
222 222
223 223 self.flagIsNewFile = 0
224 224 self.flagIsNewBlock = 1
225 225
226 226 self.nTotalBlocks += 1
227 227 self.nReadBlocks += 1
228 228
229 229 return 1
230 230
231 231
232 232 def getData(self):
233 233 """
234 234 getData obtiene una unidad de datos del buffer de lectura y la copia a la clase "Voltage"
235 235 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
236 236 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
237 237
238 238 Ademas incrementa el contador del buffer en 1.
239 239
240 240 Return:
241 241 data : retorna un perfil de voltages (alturas * canales) copiados desde el
242 242 buffer. Si no hay mas archivos a leer retorna None.
243 243
244 244 Variables afectadas:
245 245 self.dataOutObj
246 246 self.profileIndex
247 247
248 248 Affected:
249 249 self.dataOutObj
250 250 self.profileIndex
251 251 self.flagTimeBlock
252 252 self.flagIsNewBlock
253 253 """
254 254 if self.flagNoMoreFiles: return 0
255 255
256 256 self.flagTimeBlock = 0
257 257 self.flagIsNewBlock = 0
258 258
259 259 if self.__hasNotDataInBuffer():
260 260
261 261 if not( self.readNextBlock() ):
262 262 return 0
263 263
264 264 # self.updateDataHeader()
265 265
266 266 if self.flagNoMoreFiles == 1:
267 267 print 'Process finished'
268 268 return 0
269 269
270 270 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
271 271
272 272 if self.datablock == None:
273 273 self.dataOutObj.flagNoData = True
274 274 return 0
275 275
276 276 self.dataOutObj.data = self.datablock[:,self.profileIndex,:]
277 277
278 278 self.dataOutObj.dtype = self.dtype
279 279
280 280 self.dataOutObj.nChannels = self.systemHeaderObj.nChannels
281 281
282 282 self.dataOutObj.nHeights = self.processingHeaderObj.nHeights
283 283
284 284 self.dataOutObj.nProfiles = self.processingHeaderObj.profilesPerBlock
285 285
286 286 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
287 287
288 288 self.dataOutObj.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
289 289
290 290 self.dataOutObj.channelList = range(self.systemHeaderObj.nChannels)
291 291
292 292 self.dataOutObj.channelIndexList = range(self.systemHeaderObj.nChannels)
293 293
294 294 self.dataOutObj.flagTimeBlock = self.flagTimeBlock
295 295
296 296 self.dataOutObj.dataUtcTime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.ippSeconds
297 297
298 298 self.dataOutObj.nCohInt = self.processingHeaderObj.nCohInt
299 299
300 300 self.dataOutObj.flagShiftFFT = False
301 301
302 302 if self.processingHeaderObj.code != None:
303 303 self.dataOutObj.nCode = self.processingHeaderObj.nCode
304 304
305 305 self.dataOutObj.nBaud = self.processingHeaderObj.nBaud
306 306
307 307 self.dataOutObj.code = self.processingHeaderObj.code
308 308
309 309 self.profileIndex += 1
310 310
311 311 self.dataOutObj.systemHeaderObj = self.systemHeaderObj.copy()
312 312
313 313 self.dataOutObj.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
314 314
315 315 self.dataOutObj.flagNoData = False
316 316
317 return 1
317 return self.dataOutObj.data
318 318
319 319
320 320 class VoltageWriter(JRODataWriter):
321 321 """
322 322 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
323 323 de los datos siempre se realiza por bloques.
324 324 """
325 325
326 326 ext = ".r"
327 327
328 328 optchar = "D"
329 329
330 330 shapeBuffer = None
331 331
332 332
333 333 def __init__(self, dataOutObj=None):
334 334 """
335 335 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
336 336
337 337 Affected:
338 338 self.dataOutObj
339 339
340 340 Return: None
341 341 """
342 342 if dataOutObj == None:
343 343 dataOutObj = Voltage()
344 344
345 345 if not( isinstance(dataOutObj, Voltage) ):
346 346 raise ValueError, "in VoltageReader, dataOutObj must be an Spectra class object"
347 347
348 348 self.dataOutObj = dataOutObj
349 349
350 350 self.nTotalBlocks = 0
351 351
352 352 self.profileIndex = 0
353 353
354 354 def hasAllDataInBuffer(self):
355 355 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
356 356 return 1
357 357 return 0
358 358
359 359
360 360 def setBlockDimension(self):
361 361 """
362 362 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
363 363
364 364 Affected:
365 365 self.shape_spc_Buffer
366 366 self.shape_cspc_Buffer
367 367 self.shape_dc_Buffer
368 368
369 369 Return: None
370 370 """
371 371 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
372 372 self.processingHeaderObj.nHeights,
373 373 self.systemHeaderObj.nChannels)
374 374
375 375 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
376 376 self.processingHeaderObj.profilesPerBlock,
377 377 self.processingHeaderObj.nHeights),
378 378 dtype=numpy.dtype('complex'))
379 379
380 380
381 381 def writeBlock(self):
382 382 """
383 383 Escribe el buffer en el file designado
384 384
385 385 Affected:
386 386 self.profileIndex
387 387 self.flagIsNewFile
388 388 self.flagIsNewBlock
389 389 self.nTotalBlocks
390 390 self.blockIndex
391 391
392 392 Return: None
393 393 """
394 394 data = numpy.zeros( self.shapeBuffer, self.dtype )
395 395
396 396 junk = numpy.transpose(self.datablock, (1,2,0))
397 397
398 398 data['real'] = junk.real
399 399 data['imag'] = junk.imag
400 400
401 401 data = data.reshape( (-1) )
402 402
403 403 data.tofile( self.fp )
404 404
405 405 self.datablock.fill(0)
406 406
407 407 self.profileIndex = 0
408 408 self.flagIsNewFile = 0
409 409 self.flagIsNewBlock = 1
410 410
411 411 self.blockIndex += 1
412 412 self.nTotalBlocks += 1
413 413
414 414 def putData(self):
415 415 """
416 416 Setea un bloque de datos y luego los escribe en un file
417 417
418 418 Affected:
419 419 self.flagIsNewBlock
420 420 self.profileIndex
421 421
422 422 Return:
423 423 0 : Si no hay data o no hay mas files que puedan escribirse
424 424 1 : Si se escribio la data de un bloque en un file
425 425 """
426 426 self.flagIsNewBlock = 0
427 427
428 428 if self.dataOutObj.flagNoData:
429 429 return 0
430 430
431 431 if self.dataOutObj.flagTimeBlock:
432 432
433 433 self.datablock.fill(0)
434 434 self.profileIndex = 0
435 435 self.setNextFile()
436 436
437 437 if self.profileIndex == 0:
438 438 self.getBasicHeader()
439 439
440 440 self.datablock[:,self.profileIndex,:] = self.dataOutObj.data
441 441
442 442 self.profileIndex += 1
443 443
444 444 if self.hasAllDataInBuffer():
445 445 #if self.flagIsNewFile:
446 446 self.writeNextBlock()
447 447 # self.getDataHeader()
448 448
449 449 if self.flagNoMoreFiles:
450 450 #print 'Process finished'
451 451 return 0
452 452
453 453 return 1
454 454
455 455 def __getProcessFlags(self):
456 456
457 457 processFlags = 0
458 458
459 459 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
460 460 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
461 461 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
462 462 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
463 463 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
464 464 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
465 465
466 466 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
467 467
468 468
469 469
470 470 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
471 471 PROCFLAG.DATATYPE_SHORT,
472 472 PROCFLAG.DATATYPE_LONG,
473 473 PROCFLAG.DATATYPE_INT64,
474 474 PROCFLAG.DATATYPE_FLOAT,
475 475 PROCFLAG.DATATYPE_DOUBLE]
476 476
477 477
478 478 for index in range(len(dtypeList)):
479 479 if self.dataOutObj.dtype == dtypeList[index]:
480 480 dtypeValue = datatypeValueList[index]
481 481 break
482 482
483 483 processFlags += dtypeValue
484 484
485 485 if self.dataOutObj.flagDecodeData:
486 486 processFlags += PROCFLAG.DECODE_DATA
487 487
488 488 if self.dataOutObj.flagDeflipData:
489 489 processFlags += PROCFLAG.DEFLIP_DATA
490 490
491 491 if self.dataOutObj.code != None:
492 492 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
493 493
494 494 if self.dataOutObj.nCohInt > 1:
495 495 processFlags += PROCFLAG.COHERENT_INTEGRATION
496 496
497 497 return processFlags
498 498
499 499
500 500 def __getBlockSize(self):
501 501 '''
502 502 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
503 503 '''
504 504
505 505 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
506 506 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
507 507 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
508 508 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
509 509 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
510 510 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
511 511
512 512 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
513 513 datatypeValueList = [1,2,4,8,4,8]
514 514 for index in range(len(dtypeList)):
515 515 if self.dataOutObj.dtype == dtypeList[index]:
516 516 datatypeValue = datatypeValueList[index]
517 517 break
518 518
519 519 blocksize = int(self.dataOutObj.nHeights * self.dataOutObj.nChannels * self.dataOutObj.nProfiles * datatypeValue * 2)
520 520
521 521 return blocksize
522 522
523 523
524 524 def getBasicHeader(self):
525 525 self.basicHeaderObj.size = self.basicHeaderSize #bytes
526 526 self.basicHeaderObj.version = self.versionFile
527 527 self.basicHeaderObj.dataBlock = self.nTotalBlocks
528 528
529 529 utc = numpy.floor(self.dataOutObj.dataUtcTime)
530 530 milisecond = (self.dataOutObj.dataUtcTime - utc)* 1000.0
531 531
532 532 self.basicHeaderObj.utc = utc
533 533 self.basicHeaderObj.miliSecond = milisecond
534 534 self.basicHeaderObj.timeZone = 0
535 535 self.basicHeaderObj.dstFlag = 0
536 536 self.basicHeaderObj.errorCount = 0
537 537
538 538 def getDataHeader(self):
539 539
540 540 """
541 541 Obtiene una copia del First Header
542 542
543 543 Affected:
544 544 self.systemHeaderObj
545 545 self.radarControllerHeaderObj
546 546 self.dtype
547 547
548 548 Return:
549 549 None
550 550 """
551 551
552 552 self.systemHeaderObj = self.dataOutObj.systemHeaderObj.copy()
553 553 self.systemHeaderObj.nChannels = self.dataOutObj.nChannels
554 554 self.radarControllerHeaderObj = self.dataOutObj.radarControllerHeaderObj.copy()
555 555
556 556 self.getBasicHeader()
557 557
558 558 processingHeaderSize = 40 # bytes
559 559 self.processingHeaderObj.dtype = 0 # Voltage
560 560 self.processingHeaderObj.blockSize = self.__getBlockSize()
561 561 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
562 562 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
563 563 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOutObj.processingHeaderObj.nWindows
564 564 self.processingHeaderObj.processFlags = self.__getProcessFlags()
565 565 self.processingHeaderObj.nCohInt = self.dataOutObj.nCohInt
566 566 self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage
567 567 self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage
568 568
569 569 if self.dataOutObj.code != None:
570 570 self.processingHeaderObj.code = self.dataOutObj.code
571 571 self.processingHeaderObj.nCode = self.dataOutObj.nCode
572 572 self.processingHeaderObj.nBaud = self.dataOutObj.nBaud
573 573 codesize = int(8 + 4 * self.dataOutObj.nCode * self.dataOutObj.nBaud)
574 574 processingHeaderSize += codesize
575 575
576 576 if self.processingHeaderObj.nWindows != 0:
577 577 self.processingHeaderObj.firstHeight = self.dataOutObj.heightList[0]
578 578 self.processingHeaderObj.deltaHeight = self.dataOutObj.heightList[1] - self.dataOutObj.heightList[0]
579 579 self.processingHeaderObj.nHeights = self.dataOutObj.nHeights
580 580 self.processingHeaderObj.samplesWin = self.dataOutObj.nHeights
581 581 processingHeaderSize += 12
582 582
583 583 self.processingHeaderObj.size = processingHeaderSize
584 584 No newline at end of file
@@ -1,388 +1,388
1 1 '''
2 2
3 3 $Author$
4 4 $Id$
5 5 '''
6 6
7 7 import os, sys
8 8 import numpy
9 9 import time
10 10
11 11 path = os.path.split(os.getcwd())[0]
12 12 sys.path.append(path)
13 13
14 from Data.Spectra import Spectra
14 from Data.JROData import Spectra
15 15 from IO.SpectraIO import SpectraWriter
16 16 #from Graphics.SpectraPlot import Spectrum
17 17 #from JRONoise import Noise
18 18
19 19 class SpectraProcessor:
20 20 '''
21 21 classdocs
22 22 '''
23 23
24 24 dataInObj = None
25 25
26 26 dataOutObj = None
27 27
28 28 noiseObj = None
29 29
30 30 integratorObjList = []
31 31
32 32 writerObjList = []
33 33
34 34 integratorObjIndex = None
35 35
36 36 writerObjIndex = None
37 37
38 38 profIndex = 0 # Se emplea cuando el objeto de entrada es un Voltage
39 39
40 40 # integratorObjList = []
41 41 #
42 42 # decoderObjList = []
43 43 #
44 44 # writerObjList = []
45 45 #
46 46 # plotterObjList = []
47 47 #
48 48 # integratorObjIndex = None
49 49 #
50 50 # decoderObjIndex = None
51 51 #
52 52 # writerObjIndex = None
53 53 #
54 54 # plotterObjIndex = None
55 55 #
56 56 # buffer = None
57 57 #
58 58 # profIndex = 0
59 59 #
60 60 # nFFTPoints = None
61 61 #
62 62 # nChannels = None
63 63 #
64 64 # nHeights = None
65 65 #
66 66 # nPairs = None
67 67 #
68 68 # pairList = None
69 69
70 70
71 71 def __init__(self):
72 72 '''
73 73 Constructor
74 74 '''
75 75
76 76 self.integratorObjIndex = None
77 77 self.writerObjIndex = None
78 78 self.integratorObjList = []
79 79 self.writerObjList = []
80 80 self.noiseObj = None
81 81 self.buffer = None
82 82 self.profIndex = 0
83 83
84 84 def setup(self, dataInObj=None, dataOutObj=None, nFFTPoints=None, pairList=None):
85 85
86 86 if dataInObj == None:
87 87 raise ValueError, "This SpectraProcessor.setup() function needs dataInObj input variable"
88 88
89 89 if dataInObj.type == "Voltage":
90 90 if nFFTPoints == None:
91 91 raise ValueError, "This SpectraProcessor.setup() function needs nFFTPoints input variable"
92 92 else:
93 93 nFFTPoints = dataInObj.nFFTPoints
94 94
95 95 self.dataInObj = dataInObj
96 96
97 97 if dataOutObj == None:
98 98 dataOutObj = Spectra()
99 99
100 100 self.dataOutObj = dataOutObj
101 101
102 102 # self.noiseObj = Noise() #aun no se incluye el objeto Noise()
103 103
104 104 ##########################################
105 105 # self.nFFTPoints = nFFTPoints
106 106 # self.nChannels = self.dataInObj.nChannels
107 107 # self.nHeights = self.dataInObj.nHeights
108 108 # self.pairList = pairList
109 109 # if pairList != None:
110 110 # self.nPairs = len(pairList)
111 111 # else:
112 112 # self.nPairs = 0
113 113 #
114 114 # self.dataOutObj.heightList = self.dataInObj.heightList
115 115 # self.dataOutObj.channelIndexList = self.dataInObj.channelIndexList
116 116 # self.dataOutObj.m_BasicHeader = self.dataInObj.m_BasicHeader.copy()
117 117 # self.dataOutObj.m_ProcessingHeader = self.dataInObj.m_ProcessingHeader.copy()
118 118 # self.dataOutObj.m_RadarControllerHeader = self.dataInObj.m_RadarControllerHeader.copy()
119 119 # self.dataOutObj.m_SystemHeader = self.dataInObj.m_SystemHeader.copy()
120 120 #
121 121 # self.dataOutObj.dataType = self.dataInObj.dataType
122 122 # self.dataOutObj.nPairs = self.nPairs
123 123 # self.dataOutObj.nChannels = self.nChannels
124 124 # self.dataOutObj.nProfiles = self.nFFTPoints
125 125 # self.dataOutObj.nHeights = self.nHeights
126 126 # self.dataOutObj.nFFTPoints = self.nFFTPoints
127 127 # #self.dataOutObj.data = None
128 128 #
129 129 # self.dataOutObj.m_SystemHeader.numChannels = self.nChannels
130 130 # self.dataOutObj.m_SystemHeader.nProfiles = self.nFFTPoints
131 131 #
132 132 # self.dataOutObj.m_ProcessingHeader.totalSpectra = self.nChannels + self.nPairs
133 133 # self.dataOutObj.m_ProcessingHeader.profilesPerBlock = self.nFFTPoints
134 134 # self.dataOutObj.m_ProcessingHeader.numHeights = self.nHeights
135 135 # self.dataOutObj.m_ProcessingHeader.shif_fft = True
136 136 #
137 137 # spectraComb = numpy.zeros( (self.nChannels+self.nPairs)*2,numpy.dtype('u1'))
138 138 # k = 0
139 139 # for i in range( 0,self.nChannels*2,2 ):
140 140 # spectraComb[i] = k
141 141 # spectraComb[i+1] = k
142 142 # k += 1
143 143 #
144 144 # k *= 2
145 145 #
146 146 # if self.pairList != None:
147 147 #
148 148 # for pair in self.pairList:
149 149 # spectraComb[k] = pair[0]
150 150 # spectraComb[k+1] = pair[1]
151 151 # k += 2
152 152 #
153 153 # self.dataOutObj.m_ProcessingHeader.spectraComb = spectraComb
154 154
155 155 return self.dataOutObj
156 156
157 157 def init(self):
158 158 #
159 159 # self.nHeights = self.dataInObj.nHeights
160 160 # self.dataOutObj.nHeights = self.nHeights
161 161 # self.dataOutObj.heightList = self.dataInObj.heightList
162 162 #
163 163
164 164 self.integratorObjIndex = 0
165 165 self.writerObjIndex = 0
166 166
167 167 if self.dataInObj.type == "Voltage":
168 168
169 169 if self.buffer == None:
170 170 self.buffer = numpy.zeros((self.nChannels,
171 171 self.nFFTPoints,
172 172 self.dataInObj.nHeights),
173 173 dtype='complex')
174 174
175 175 self.buffer[:,self.profIndex,:] = self.dataInObj.data
176 176 self.profIndex += 1
177 177
178 178 if self.profIndex == self.nFFTPoints:
179 179 self.__getFft()
180 180 self.dataOutObj.flagNoData = False
181 181
182 182 self.buffer = None
183 183 self.profIndex = 0
184 184 return
185 185
186 186 self.dataOutObj.flagNoData = True
187 187
188 188 return
189 189
190 190 #Other kind of data
191 191 if self.dataInObj.type == "Spectra":
192 192 self.dataOutObj.copy(self.dataInObj)
193 193 self.dataOutObj.flagNoData = False
194 194 return
195 195
196 196 raise ValueError, "The datatype is not valid"
197 197
198 198 def __getFft(self):
199 199 """
200 200 Convierte valores de Voltaje a Spectra
201 201
202 202 Affected:
203 203 self.dataOutObj.data_spc
204 204 self.dataOutObj.data_cspc
205 205 self.dataOutObj.data_dc
206 206 self.dataOutObj.heightList
207 207 self.dataOutObj.m_BasicHeader
208 208 self.dataOutObj.m_ProcessingHeader
209 209 self.dataOutObj.m_RadarControllerHeader
210 210 self.dataOutObj.m_SystemHeader
211 211 self.profIndex
212 212 self.buffer
213 213 self.dataOutObj.flagNoData
214 214 self.dataOutObj.dataType
215 215 self.dataOutObj.nPairs
216 216 self.dataOutObj.nChannels
217 217 self.dataOutObj.nProfiles
218 218 self.dataOutObj.m_SystemHeader.numChannels
219 219 self.dataOutObj.m_ProcessingHeader.totalSpectra
220 220 self.dataOutObj.m_ProcessingHeader.profilesPerBlock
221 221 self.dataOutObj.m_ProcessingHeader.numHeights
222 222 self.dataOutObj.m_ProcessingHeader.spectraComb
223 223 self.dataOutObj.m_ProcessingHeader.shif_fft
224 224 """
225 225
226 226 if self.dataInObj.flagNoData:
227 227 return 0
228 228
229 229 fft_volt = numpy.fft.fft(self.buffer,axis=1)
230 230 dc = fft_volt[:,0,:]
231 231
232 232 #calculo de self-spectra
233 233 fft_volt = numpy.fft.fftshift(fft_volt,axes=(1,))
234 234 spc = fft_volt * numpy.conjugate(fft_volt)
235 235 spc = spc.real
236 236
237 237 blocksize = 0
238 238 blocksize += dc.size
239 239 blocksize += spc.size
240 240
241 241 cspc = None
242 242 pairIndex = 0
243 243 if self.pairList != None:
244 244 #calculo de cross-spectra
245 245 cspc = numpy.zeros((self.nPairs, self.nFFTPoints, self.nHeights), dtype='complex')
246 246 for pair in self.pairList:
247 247 cspc[pairIndex,:,:] = numpy.abs(fft_volt[pair[0],:,:] * numpy.conjugate(fft_volt[pair[1],:,:]))
248 248 pairIndex += 1
249 249 blocksize += cspc.size
250 250
251 251 self.dataOutObj.data_spc = spc
252 252 self.dataOutObj.data_cspc = cspc
253 253 self.dataOutObj.data_dc = dc
254 254 self.dataOutObj.m_ProcessingHeader.blockSize = blocksize
255 255 self.dataOutObj.m_BasicHeader.utc = self.dataInObj.m_BasicHeader.utc
256 256
257 257 # self.getNoise()
258 258
259 259 def addWriter(self, wrpath, blocksPerFile):
260 260 objWriter = SpectraWriter(self.dataOutObj)
261 261 objWriter.setup(wrpath, blocksPerFile)
262 262 self.writerObjList.append(objWriter)
263 263
264 264 def addIntegrator(self,N,timeInterval):
265 265
266 266 objIncohInt = IncoherentIntegration(N,timeInterval)
267 267 self.integratorObjList.append(objIncohInt)
268 268
269 269 def writeData(self, wrpath, blocksPerFile):
270 270 if self.dataOutObj.flagNoData:
271 271 return 0
272 272
273 273 if len(self.writerObjList) <= self.writerObjIndex:
274 274 self.addWriter(wrpath, blocksPerFile)
275 275
276 276 self.writerObjList[self.writerObjIndex].putData()
277 277
278 278 self.writerObjIndex += 1
279 279
280 280 def integrator(self, N=None, timeInterval=None):
281 281
282 282 if self.dataOutObj.flagNoData:
283 283 return 0
284 284
285 285 if len(self.integratorObjList) <= self.integratorObjIndex:
286 286 self.addIntegrator(N,timeInterval)
287 287
288 288 myIncohIntObj = self.integratorObjList[self.integratorObjIndex]
289 289 myIncohIntObj.exe(data=self.dataOutObj.data_spc,timeOfData=self.dataOutObj.m_BasicHeader.utc)
290 290
291 291 if myIncohIntObj.isReady:
292 292 self.dataOutObj.data_spc = myIncohIntObj.data
293 293 self.dataOutObj.nAvg = myIncohIntObj.navg
294 294 self.dataOutObj.m_ProcessingHeader.incoherentInt = self.dataInObj.m_ProcessingHeader.incoherentInt*myIncohIntObj.navg
295 295 #print "myIncohIntObj.navg: ",myIncohIntObj.navg
296 296 self.dataOutObj.flagNoData = False
297 297
298 298 """Calcular el ruido"""
299 299 self.getNoise()
300 300 else:
301 301 self.dataOutObj.flagNoData = True
302 302
303 303 self.integratorObjIndex += 1
304 304
305 305
306 306
307 307
308 308 class IncoherentIntegration:
309 309
310 310 integ_counter = None
311 311 data = None
312 312 navg = None
313 313 buffer = None
314 314 nIncohInt = None
315 315
316 316 def __init__(self, N = None, timeInterval = None):
317 317 """
318 318 N
319 319 timeInterval - interval time [min], integer value
320 320 """
321 321
322 322 self.data = None
323 323 self.navg = None
324 324 self.buffer = None
325 325 self.timeOut = None
326 326 self.exitCondition = False
327 327 self.isReady = False
328 328 self.nIncohInt = N
329 329 self.integ_counter = 0
330 330 if timeInterval!=None:
331 331 self.timeIntervalInSeconds = timeInterval * 60. #if (type(timeInterval)!=integer) -> change this line
332 332
333 333 if ((timeInterval==None) and (N==None)):
334 334 print 'N = None ; timeInterval = None'
335 335 sys.exit(0)
336 336 elif timeInterval == None:
337 337 self.timeFlag = False
338 338 else:
339 339 self.timeFlag = True
340 340
341 341
342 342 def exe(self,data,timeOfData):
343 343 """
344 344 data
345 345
346 346 timeOfData [seconds]
347 347 """
348 348
349 349 if self.timeFlag:
350 350 if self.timeOut == None:
351 351 self.timeOut = timeOfData + self.timeIntervalInSeconds
352 352
353 353 if timeOfData < self.timeOut:
354 354 if self.buffer == None:
355 355 self.buffer = data
356 356 else:
357 357 self.buffer = self.buffer + data
358 358 self.integ_counter += 1
359 359 else:
360 360 self.exitCondition = True
361 361
362 362 else:
363 363 if self.integ_counter < self.nIncohInt:
364 364 if self.buffer == None:
365 365 self.buffer = data
366 366 else:
367 367 self.buffer = self.buffer + data
368 368
369 369 self.integ_counter += 1
370 370
371 371 if self.integ_counter == self.nIncohInt:
372 372 self.exitCondition = True
373 373
374 374 if self.exitCondition:
375 375 self.data = self.buffer
376 376 self.navg = self.integ_counter
377 377 self.isReady = True
378 378 self.buffer = None
379 379 self.timeOut = None
380 380 self.integ_counter = 0
381 381 self.exitCondition = False
382 382
383 383 if self.timeFlag:
384 384 self.buffer = data
385 385 self.timeOut = timeOfData + self.timeIntervalInSeconds
386 386 else:
387 387 self.isReady = False
388 388 No newline at end of file
@@ -1,219 +1,220
1 1 '''
2 2
3 3 $Author$
4 4 $Id$
5 5 '''
6 6
7 7 import os
8 8 import sys
9 9 import numpy
10 10 import datetime
11 11
12 12 path = os.path.split(os.getcwd())[0]
13 13 sys.path.append(path)
14 14
15 from Data.Voltage import Voltage
15 from Data.JROData import Voltage
16 16 from IO.VoltageIO import VoltageWriter
17 17 from Graphics2.schainPlotTypes import ScopeFigure
18 18
19 19 class VoltageProcessor:
20
20 21 dataInObj = None
21 22 dataOutObj = None
22 23 integratorObjIndex = None
23 24 writerObjIndex = None
24 25 integratorObjList = None
25 26 writerObjList = None
26 27
27 28 def __init__(self):
28 29 self.integratorObjIndex = None
29 30 self.writerObjIndex = None
30 31 self.plotObjIndex = None
31 32 self.integratorObjList = []
32 33 self.writerObjList = []
33 34 self.plotObjList = []
34 35
35 36 def setup(self,dataInObj=None,dataOutObj=None):
36 37 self.dataInObj = dataInObj
37 38
38 39 if self.dataOutObj == None:
39 40 dataOutObj = Voltage()
40 41
41 42 self.dataOutObj = dataOutObj
42 43
43 44 return self.dataOutObj
44 45
45 46 def init(self):
46 47 self.integratorObjIndex = 0
47 48 self.writerObjIndex = 0
48 49 self.plotObjIndex = 0
49 50
50 51 if not(self.dataInObj.flagNoData):
51 52 self.dataOutObj.copy(self.dataInObj)
52 53 # No necesita copiar en cada init() los atributos de dataInObj
53 54 # la copia deberia hacerse por cada nuevo bloque de datos
54 55
55 56 def addScope(self, idfigure, nframes, wintitle, driver):
56 57 if idfigure==None:
57 58 idfigure = self.plotObjIndex
58 59
59 60 scopeObj = ScopeFigure(idfigure, nframes, wintitle, driver)
60 61 self.plotObjList.append(scopeObj)
61 62
62 63 def plotScope(self,
63 64 idfigure=None,
64 65 minvalue=None,
65 66 maxvalue=None,
66 67 xmin=None,
67 68 xmax=None,
68 69 wintitle='',
69 70 driver='plplot',
70 71 save=False,
71 72 gpath=None,
72 73 titleList=None,
73 74 xlabelList=None,
74 75 ylabelList=None,
75 76 type="power"):
76 77
77 78 if self.dataOutObj.flagNoData:
78 79 return 0
79 80
80 81 nframes = len(self.dataOutObj.channelList)
81 82
82 83 if len(self.plotObjList) <= self.plotObjIndex:
83 84 self.addScope(idfigure, nframes, wintitle, driver)
84 85
85 86
86 87 if type=="power":
87 88 data1D = self.dataOutObj.data * numpy.conjugate(self.dataOutObj.data)
88 89 data1D = data1D.real
89 90
90 91 if type =="iq":
91 92 data1D = self.dataOutObj.data
92 93
93 94 thisDatetime = datetime.datetime.fromtimestamp(self.dataOutObj.dataUtcTime)
94 95
95 96 dateTime = "%s"%(thisDatetime.strftime("%d-%b-%Y %H:%M:%S"))
96 97 date = "%s"%(thisDatetime.strftime("%d-%b-%Y"))
97 98
98 99 figureTitle = "Scope Plot Radar Data: " + date
99 100
100 101 plotObj = self.plotObjList[self.plotObjIndex]
101 102
102 103 plotObj.plot1DArray(data1D,
103 104 self.dataOutObj.heightList,
104 105 self.dataOutObj.channelList,
105 106 xmin,
106 107 xmax,
107 108 minvalue,
108 109 maxvalue,
109 110 figureTitle,
110 111 save,
111 112 gpath)
112 113
113 114 self.plotObjIndex += 1
114 115
115 116
116 117 def addIntegrator(self,N,timeInterval):
117 118 objCohInt = CoherentIntegrator(N,timeInterval)
118 119 self.integratorObjList.append(objCohInt)
119 120
120 121 def addWriter(self, wrpath, blocksPerFile, profilesPerBlock):
121 122 writerObj = VoltageWriter(self.dataOutObj)
122 123 writerObj.setup(wrpath,blocksPerFile,profilesPerBlock)
123 124 self.writerObjList.append(writerObj)
124 125
125 126 def writeData(self, wrpath, blocksPerFile, profilesPerBlock):
126 127
127 128 if self.dataOutObj.flagNoData:
128 129 return 0
129 130
130 131 if len(self.writerObjList) <= self.writerObjIndex:
131 132 self.addWriter(wrpath, blocksPerFile, profilesPerBlock)
132 133
133 134 self.writerObjList[self.writerObjIndex].putData()
134 135
135 136 self.writerObjIndex += 1
136 137
137 138 def integrator(self, N=None, timeInterval=None):
138 139 if self.dataOutObj.flagNoData:
139 140 return 0
140 141 if len(self.integratorObjList) <= self.integratorObjIndex:
141 142 self.addIntegrator(N,timeInterval)
142 143
143 144 myCohIntObj = self.integratorObjList[self.integratorObjIndex]
144 145 myCohIntObj.exe(data=self.dataOutObj.data,timeOfData=None)
145 146
146 147
147 148
148 149 class CoherentIntegrator:
149 150
150 151 integ_counter = None
151 152 data = None
152 153 navg = None
153 154 buffer = None
154 155 nCohInt = None
155 156
156 157 def __init__(self, N=None,timeInterval=None):
157 158
158 159 self.data = None
159 160 self.navg = None
160 161 self.buffer = None
161 162 self.timeOut = None
162 163 self.exitCondition = False
163 164 self.isReady = False
164 165 self.nCohInt = N
165 166 self.integ_counter = 0
166 167 if timeInterval!=None:
167 168 self.timeIntervalInSeconds = timeInterval * 60. #if (type(timeInterval)!=integer) -> change this line
168 169
169 170 if ((timeInterval==None) and (N==None)):
170 171 raise ValueError, "N = None ; timeInterval = None"
171 172
172 173 if timeInterval == None:
173 174 self.timeFlag = False
174 175 else:
175 176 self.timeFlag = True
176 177
177 178 def exe(self, data, timeOfData):
178 179
179 180 if self.timeFlag:
180 181 if self.timeOut == None:
181 182 self.timeOut = timeOfData + self.timeIntervalInSeconds
182 183
183 184 if timeOfData < self.timeOut:
184 185 if self.buffer == None:
185 186 self.buffer = data
186 187 else:
187 188 self.buffer = self.buffer + data
188 189 self.integ_counter += 1
189 190 else:
190 191 self.exitCondition = True
191 192
192 193 else:
193 194 if self.integ_counter < self.nCohInt:
194 195 if self.buffer == None:
195 196 self.buffer = data
196 197 else:
197 198 self.buffer = self.buffer + data
198 199
199 200 self.integ_counter += 1
200 201
201 202 if self.integ_counter == self.nCohInt:
202 203 self.exitCondition = True
203 204
204 205 if self.exitCondition:
205 206 self.data = self.buffer
206 207 self.navg = self.integ_counter
207 208 self.isReady = True
208 209 self.buffer = None
209 210 self.timeOut = None
210 211 self.integ_counter = 0
211 212 self.exitCondition = False
212 213
213 214 if self.timeFlag:
214 215 self.buffer = data
215 216 self.timeOut = timeOfData + self.timeIntervalInSeconds
216 217 else:
217 218 self.isReady = False
218 219
219 220
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
General Comments 0
You need to be logged in to leave comments. Login now