##// END OF EJS Templates
En esta version se las funciones de LECTURA de rawdata y pdata operan satisfactoriamente.
Daniel Valdez -
r122:1ab50a21fdd6
parent child
Show More
@@ -0,0 +1,1
1
@@ -0,0 +1,68
1
2 import os, sys
3 import time, datetime
4
5 path = os.path.split(os.getcwd())[0]
6 sys.path.append(path)
7
8 from Data.Voltage import Voltage
9 from Data.Spectra import Spectra
10 from IO.VoltageIO import *
11 from IO.SpectraIO import *
12 from Processing.VoltageProcessor import *
13
14
15
16 class TestSChain:
17
18 def __init__(self):
19 self.setValues()
20 self.createObjects()
21 self.testSChain()
22
23 def setValues(self):
24 self.path = "/Users/jro/Documents/RadarData/MST_ISR/MST"
25 # self.path = "/home/roj-idl71/Data/RAWDATA/IMAGING"
26 self.path = "/Users/danielangelsuarezmunoz/Data/EW_Drifts"
27 self.path = "/Users/danielangelsuarezmunoz/Data/IMAGING"
28
29 self.wrpath = "/Users/jro/Documents/RadarData/wr_data"
30
31 self.startDate = datetime.date(2012,3,1)
32 self.endDate = datetime.date(2012,3,30)
33
34 self.startTime = datetime.time(0,0,0)
35 self.endTime = datetime.time(14,1,1)
36
37 def createObjects(self):
38
39 self.readerObj = SpectraReader()
40
41 self.voltObj1 = self.readerObj.setup(
42 path = self.path,
43 startDate = self.startDate,
44 endDate = self.endDate,
45 startTime = self.startTime,
46 endTime = self.endTime,
47 expLabel = '',
48 online = 0)
49
50
51
52 def testSChain(self):
53
54 ini = time.time()
55
56 while(True):
57 self.readerObj.getData()
58
59 if self.readerObj.flagNoMoreFiles:
60 break
61
62 if self.readerObj.flagIsNewBlock:
63 print 'Block No %04d, Time: %s' %(self.readerObj.nTotalBlocks,
64 datetime.datetime.fromtimestamp(self.readerObj.basicHeaderObj.utc),)
65
66
67 if __name__ == '__main__':
68 TestSChain() No newline at end of file
@@ -1,57 +1,57
1 1 import os, sys
2 2 import copy
3 3 import numpy
4 4
5 5 path = os.path.split(os.getcwd())[0]
6 6 sys.path.append(path)
7 7
8 8 from IO.JROHeader import SystemHeader, RadarControllerHeader
9 9
10 class JROData():
10 class JROData:
11 11
12 12 # m_BasicHeader = BasicHeader()
13 13 # m_ProcessingHeader = ProcessingHeader()
14 14
15 15 systemHeaderObj = SystemHeader()
16 16
17 17 radarControllerHeaderObj = RadarControllerHeader()
18 18
19 data = None
19 # data = None
20 20
21 21 type = None
22 22
23 23 dtype = None
24 24
25 25 nChannels = None
26 26
27 27 nHeights = None
28 28
29 29 nProfiles = None
30 30
31 31 heightList = None
32 32
33 33 channelList = None
34 34
35 35 channelIndexList = None
36 36
37 37 flagNoData = False
38 38
39 39 flagTimeBlock = False
40 40
41 41 dataUtcTime = None
42 42
43 43 def __init__(self):
44 44
45 45 raise ValueError, "This class has not been implemented"
46 46
47 47 def copy(self, inputObj=None):
48 48
49 49 if inputObj == None:
50 50 return copy.deepcopy(self)
51 51
52 52 for key in inputObj.__dict__.keys():
53 53 self.__dict__[key] = inputObj.__dict__[key]
54 54
55 55 def deepcopy(self):
56 56
57 57 return copy.deepcopy(self) No newline at end of file
1 NO CONTENT: modified file
@@ -1,48 +1,50
1 1 import os, sys
2 2 import numpy
3 3
4 4 path = os.path.split(os.getcwd())[0]
5 5 sys.path.append(path)
6 6
7 7 from JROData import JROData
8 8 from IO.JROHeader import SystemHeader, RadarControllerHeader
9 9
10 10 class Voltage(JROData):
11 11
12 12 nCohInt = None
13 13
14 data = None
15
14 16 def __init__(self):
15 17 '''
16 18 Constructor
17 19 '''
18 20
19 21 self.m_RadarControllerHeader = RadarControllerHeader()
20 22
21 23 self.m_SystemHeader = SystemHeader()
22 24
23 25 self.type = "Voltage"
24 26
25 27 #data es un numpy array de 2 dmensiones ( canales, alturas)
26 28 self.data = None
27 29
28 30 self.dtype = None
29 31
30 32 self.nChannels = 0
31 33
32 34 self.nHeights = 0
33 35
34 36 self.nProfiles = None
35 37
36 38 self.heightList = None
37 39
38 40 self.channelList = None
39 41
40 42 self.channelIndexList = None
41 43
42 44 self.flagNoData = True
43 45
44 46 self.flagTimeBlock = False
45 47
46 48 self.dataUtcTime = None
47 49
48 50 self.nCohInt = None
@@ -1,727 +1,738
1 1 import os, sys
2 2 import glob
3 3 import time
4 4 import numpy
5 5 import fnmatch
6 6 import time, datetime
7 7
8 8 path = os.path.split(os.getcwd())[0]
9 9 sys.path.append(path)
10 10
11 11 from JROHeader import *
12 12 from Data.JROData import JROData
13 13
14 14 def isNumber(str):
15 15 """
16 16 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
17 17
18 18 Excepciones:
19 19 Si un determinado string no puede ser convertido a numero
20 20 Input:
21 21 str, string al cual se le analiza para determinar si convertible a un numero o no
22 22
23 23 Return:
24 24 True : si el string es uno numerico
25 25 False : no es un string numerico
26 26 """
27 27 try:
28 28 float( str )
29 29 return True
30 30 except:
31 31 return False
32 32
33 33 def isThisFileinRange(filename, startUTSeconds, endUTSeconds):
34 34 """
35 35 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
36 36
37 37 Inputs:
38 38 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
39 39
40 40 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
41 41 segundos contados desde 01/01/1970.
42 42 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
43 43 segundos contados desde 01/01/1970.
44 44
45 45 Return:
46 46 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
47 47 fecha especificado, de lo contrario retorna False.
48 48
49 49 Excepciones:
50 50 Si el archivo no existe o no puede ser abierto
51 51 Si la cabecera no puede ser leida.
52 52
53 53 """
54 54 basicHeaderObj = BasicHeader()
55 55
56 56 try:
57 57 fp = open(filename,'rb')
58 58 except:
59 59 raise IOError, "The file %s can't be opened" %(filename)
60 60
61 61 sts = basicHeaderObj.read(fp)
62 62 fp.close()
63 63
64 64 if not(sts):
65 65 print "Skipping the file %s because it has not a valid header" %(filename)
66 66 return 0
67 67
68 68 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
69 69 return 0
70 70
71 71 return 1
72 72
73 73
74 74
75 75
76 76 class JRODataIO:
77 77
78 78 c = 3E8
79 79
80 80 basicHeaderObj = BasicHeader()
81 81
82 82 systemHeaderObj = SystemHeader()
83 83
84 84 radarControllerHeaderObj = RadarControllerHeader()
85 85
86 86 processingHeaderObj = ProcessingHeader()
87 87
88 88 online = 0
89 89
90 90 dtype = None
91 91
92 92 pathList = []
93 93
94 94 filenameList = []
95 95
96 96 filename = None
97 97
98 98 ext = None
99 99
100 100 flagNoMoreFiles = 0
101 101
102 102 flagIsNewFile = 1
103 103
104 104 flagTimeBlock = 0
105 105
106 106 flagIsNewBlock = 0
107 107
108 108 fp = None
109 109
110 110 firstHeaderSize = 0
111 111
112 112 basicHeaderSize = 24
113 113
114 114 versionFile = 1103
115 115
116 116 fileSize = None
117 117
118 118 ippSeconds = None
119 119
120 120 fileSizeByHeader = None
121 121
122 122 fileIndex = None
123 123
124 124 profileIndex = None
125 125
126 126 blockIndex = None
127 127
128 128 nTotalBlocks = None
129 129
130 130 maxTimeStep = 30
131 131
132 132 lastUTTime = None
133 133
134 datablock = None
135
136 dataOutObj = None
137
138 blocksize = None
139
134 140 def __init__(self):
135 141 pass
136 142
137 143 class JRODataReader(JRODataIO):
144
145 nReadBlocks = 0
146
138 147 def __init__(self):
148
139 149 pass
140 150
141 151 def createObjByDefault(self):
142 152 """
143 153
144 154 """
145 155 raise ValueError, "This method has not been implemented"
146 156
147 157 def getBlockDimension(self):
148 158
149 159 raise ValueError, "No implemented"
150 160
151 161 def __searchFilesOffLine(self,
152 162 path,
153 163 startDate,
154 164 endDate,
155 165 startTime=datetime.time(0,0,0),
156 166 endTime=datetime.time(23,59,59),
157 167 set=None,
158 168 expLabel="",
159 169 ext=".r"):
160 170 dirList = []
161 171 for thisPath in os.listdir(path):
162 172 if os.path.isdir(os.path.join(path,thisPath)):
163 173 dirList.append(thisPath)
164 174
165 175 if not(dirList):
166 176 return None, None
167 177
168 178 pathList = []
169 179 dateList = []
170 180
171 181 thisDate = startDate
172 182
173 183 while(thisDate <= endDate):
174 184 year = thisDate.timetuple().tm_year
175 185 doy = thisDate.timetuple().tm_yday
176 186
177 187 match = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy))
178 188 if len(match) == 0:
179 189 thisDate += datetime.timedelta(1)
180 190 continue
181 191
182 192 pathList.append(os.path.join(path,match[0],expLabel))
183 193 dateList.append(thisDate)
184 194 thisDate += datetime.timedelta(1)
185 195
186 196 filenameList = []
187 197 for index in range(len(pathList)):
188 198
189 199 thisPath = pathList[index]
190 200 fileList = glob.glob1(thisPath, "*%s" %ext)
191 201 fileList.sort()
192 202
193 203 #Busqueda de datos en el rango de horas indicados
194 204 thisDate = dateList[index]
195 205 startDT = datetime.datetime.combine(thisDate, startTime)
196 206 endDT = datetime.datetime.combine(thisDate, endTime)
197 207
198 208 startUtSeconds = time.mktime(startDT.timetuple())
199 209 endUtSeconds = time.mktime(endDT.timetuple())
200 210
201 211 for file in fileList:
202 212
203 213 filename = os.path.join(thisPath,file)
204 214
205 215 if isThisFileinRange(filename, startUtSeconds, endUtSeconds):
206 216 filenameList.append(filename)
207 217
208 218 if not(filenameList):
209 219 return None, None
210 220
211 221 self.filenameList = filenameList
212 222
213 223 return pathList, filenameList
214 224
215 225 def setup(self,dataOutObj=None,
216 path=None,startDate=None,
226 path=None,
227 startDate=None,
217 228 endDate=None,
218 229 startTime=datetime.time(0,0,0),
219 230 endTime=datetime.time(23,59,59),
220 231 set=0,
221 232 expLabel = "",
222 233 ext = None,
223 234 online = 0):
224 235
225 236 if path == None:
226 237 raise ValueError, "The path is not valid"
227 238
228 239 if ext == None:
229 240 ext = self.ext
230 241
231 242 if dataOutObj == None:
232 243 dataOutObj = self.createObjByDefault()
233 244
234 245 self.dataOutObj = dataOutObj
235 246
236 247 if online:
237 248 pass
238 249
239 250 else:
240 251 print "Searching file in offline mode"
241 252 pathList, filenameList = self.__searchFilesOffLine(path, startDate, endDate, startTime, endTime, set, expLabel, ext)
242 253 if not(pathList):
243 254 print "No files in range: %s - %s"%(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
244 255 return None
245 256 self.fileIndex = -1
246 257 self.pathList = pathList
247 258 self.filenameList = filenameList
248 259
249 260 self.online = online
250 261 ext = ext.lower()
251 262 self.ext = ext
252 263
253 264 if not(self.setNextFile()):
254 265 if (startDate!=None) and (endDate!=None):
255 266 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
256 267 elif startDate != None:
257 268 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
258 269 else:
259 270 print "No files"
260 271
261 272 return None
262 273
263 274 # self.updateDataHeader()
264 275
265 276 return self.dataOutObj
266 277
267 278 def __setNextFileOffline(self):
268 279 idFile = self.fileIndex
269 280
270 281 while (True):
271 282 idFile += 1
272 283 if not(idFile < len(self.filenameList)):
273 284 self.flagNoMoreFiles = 1
274 285 print "No more Files"
275 286 return 0
276 287
277 288 filename = self.filenameList[idFile]
278 289
279 290 if not(self.__verifyFile(filename)):
280 291 continue
281 292
282 293 fileSize = os.path.getsize(filename)
283 294 fp = open(filename,'rb')
284 295 break
285 296
286 297 self.flagIsNewFile = 1
287 298 self.fileIndex = idFile
288 299 self.filename = filename
289 300 self.fileSize = fileSize
290 301 self.fp = fp
291 302
292 303 print "Setting the file: %s"%self.filename
293 304
294 305 return 1
295 306
296 307
297 308
298 309 def setNextFile(self):
299 310 if self.fp != None:
300 311 self.fp.close()
301 312
302 313 if self.online:
303 314 newFile = self.__setNextFileOnline()
304 315 else:
305 316 newFile = self.__setNextFileOffline()
306 317
307 318 if not(newFile):
308 319 return 0
309 320
310 321 self.__readFirstHeader()
311 322 self.nReadBlocks = 0
312 323 return 1
313 324
314 325 def __setNewBlock(self):
315 326 if self.fp == None:
316 327 return 0
317 328
318 329 if self.flagIsNewFile:
319 330 return 1
320 331
321 332 self.lastUTTime = self.basicHeaderObj.utc
322 333 currentSize = self.fileSize - self.fp.tell()
323 334 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
324 335
325 336 if (currentSize >= neededSize):
326 337 self.__rdBasicHeader()
327 338 return 1
328 339
329 340 if not(self.setNextFile()):
330 341 return 0
331 342
332 343 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
333 344
334 345 self.flagTimeBlock = 0
335 346
336 347 if deltaTime > self.maxTimeStep:
337 348 self.flagTimeBlock = 1
338 349
339 350 return 1
340 351
341 352
342 353 def readNextBlock(self):
343 354 if not(self.__setNewBlock()):
344 355 return 0
345 356
346 357 if not(self.readBlock()):
347 358 return 0
348 359
349 360 return 1
350 361
351 362 def __rdProcessingHeader(self, fp=None):
352 363 if fp == None:
353 364 fp = self.fp
354 365
355 366 self.processingHeaderObj.read(fp)
356 367
357 368 def __rdRadarControllerHeader(self, fp=None):
358 369 if fp == None:
359 370 fp = self.fp
360 371
361 372 self.radarControllerHeaderObj.read(fp)
362 373
363 374 def __rdSystemHeader(self, fp=None):
364 375 if fp == None:
365 376 fp = self.fp
366 377
367 378 self.systemHeaderObj.read(fp)
368 379
369 380 def __rdBasicHeader(self, fp=None):
370 381 if fp == None:
371 382 fp = self.fp
372 383
373 384 self.basicHeaderObj.read(fp)
374 385
375 386
376 387 def __readFirstHeader(self):
377 388 self.__rdBasicHeader()
378 389 self.__rdSystemHeader()
379 390 self.__rdRadarControllerHeader()
380 391 self.__rdProcessingHeader()
381 392
382 393 self.firstHeaderSize = self.basicHeaderObj.size
383 394
384 395 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
385 396 if datatype == 0:
386 397 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
387 398 elif datatype == 1:
388 399 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
389 400 elif datatype == 2:
390 401 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
391 402 elif datatype == 3:
392 403 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
393 404 elif datatype == 4:
394 405 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
395 406 elif datatype == 5:
396 407 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
397 408 else:
398 409 raise ValueError, 'Data type was not defined'
399 410
400 411 self.dtype = datatype_str
401 412 self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
402 413 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
403 414 # self.dataOutObj.channelList = numpy.arange(self.systemHeaderObj.numChannels)
404 415 # self.dataOutObj.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
405 416 self.getBlockDimension()
406 417
407 418
408 419 def __verifyFile(self, filename, msgFlag=True):
409 420 msg = None
410 421 try:
411 422 fp = open(filename, 'rb')
412 423 currentPosition = fp.tell()
413 424 except:
414 425 if msgFlag:
415 426 print "The file %s can't be opened" % (filename)
416 427 return False
417 428
418 429 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
419 430
420 431 if neededSize == 0:
421 432 basicHeaderObj = BasicHeader()
422 433 systemHeaderObj = SystemHeader()
423 434 radarControllerHeaderObj = RadarControllerHeader()
424 435 processingHeaderObj = ProcessingHeader()
425 436
426 437 try:
427 438 if not( basicHeaderObj.read(fp) ): raise ValueError
428 439 if not( systemHeaderObj.read(fp) ): raise ValueError
429 440 if not( radarControllerHeaderObj.read(fp) ): raise ValueError
430 441 if not( processingHeaderObj.read(fp) ): raise ValueError
431 442 data_type = int(numpy.log2((processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
432 443
433 444 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
434 445
435 446 except:
436 447 if msgFlag:
437 448 print "\tThe file %s is empty or it hasn't enough data" % filename
438 449
439 450 fp.close()
440 451 return False
441 452 else:
442 453 msg = "\tSkipping the file %s due to it hasn't enough data" %filename
443 454
444 455 fp.close()
445 456 fileSize = os.path.getsize(filename)
446 457 currentSize = fileSize - currentPosition
447 458 if currentSize < neededSize:
448 459 if msgFlag and (msg != None):
449 460 print msg #print"\tSkipping the file %s due to it hasn't enough data" %filename
450 461 return False
451 462
452 463 return True
453 464
465 def getData():
466 pass
467
468 def hasNotDataInBuffer():
469 pass
470
471 def readBlock():
472 pass
473
454 474 class JRODataWriter(JRODataIO):
455 475
456 476 """
457 477 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
458 478 de los datos siempre se realiza por bloques.
459 479 """
460 480
461 481 blockIndex = 0
462 482
463 483 setFile = None
464 484
465 485
466 486 def __init__(self, dataOutObj=None):
467 487 raise ValueError, "Not implemented"
468 488
469 489
470 490 def hasAllDataInBuffer(self):
471 491 raise ValueError, "Not implemented"
472 492
473 493
474 494 def setBlockDimension(self):
475 495 raise ValueError, "Not implemented"
476 496
477 497
478 498 def writeBlock(self):
479 499 raise ValueError, "No implemented"
480 500
481 501
482 502 def putData(self):
483 503 raise ValueError, "No implemented"
484 504
485 505
486 506 def __writeFirstHeader(self):
487 507 """
488 508 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
489 509
490 510 Affected:
491 511 __dataType
492 512
493 513 Return:
494 514 None
495 515 """
496 516
497 517 # CALCULAR PARAMETROS
498 518
499 519 sizeLongHeader = 0#XXXX
500 520 self.basicHeaderObj.size = 24 + sizeLongHeader
501 521
502 522 self.__writeBasicHeader()
503 523 self.__wrSystemHeader()
504 524 self.__wrRadarControllerHeader()
505 525 self.__wrProcessingHeader()
506 526 self.dtype = self.dataOutObj.dtype
507 527
508 528
509 529 def __writeBasicHeader(self, fp=None):
510 530 """
511 531 Escribe solo el Basic header en el file creado
512 532
513 533 Return:
514 534 None
515 535 """
516 536 if fp == None:
517 537 fp = self.fp
518 538
519 539 self.dataOutObj.basicHeaderObj.write(fp)
520 540
521 541
522 542 def __wrSystemHeader(self, fp=None):
523 543 """
524 544 Escribe solo el System header en el file creado
525 545
526 546 Return:
527 547 None
528 548 """
529 549 if fp == None:
530 550 fp = self.fp
531 551
532 552 self.dataOutObj.systemHeaderObj.write(fp)
533 553
534 554
535 555 def __wrRadarControllerHeader(self, fp=None):
536 556 """
537 557 Escribe solo el RadarController header en el file creado
538 558
539 559 Return:
540 560 None
541 561 """
542 562 if fp == None:
543 563 fp = self.fp
544 564
545 565 self.dataOutObj.radarControllerHeaderObj.write(fp)
546 566
547 567
548 568 def __wrProcessingHeader(self, fp=None):
549 569 """
550 570 Escribe solo el Processing header en el file creado
551 571
552 572 Return:
553 573 None
554 574 """
555 575 if fp == None:
556 576 fp = self.fp
557 577
558 578 self.dataOutObj.processingHeaderObj.write(fp)
559 579
560 580
561 581 def setNextFile(self):
562 582 """
563 583 Determina el siguiente file que sera escrito
564 584
565 585 Affected:
566 586 self.filename
567 587 self.subfolder
568 588 self.fp
569 589 self.setFile
570 590 self.flagIsNewFile
571 591
572 592 Return:
573 593 0 : Si el archivo no puede ser escrito
574 594 1 : Si el archivo esta listo para ser escrito
575 595 """
576 596 ext = self.ext
577 597 path = self.path
578 598
579 599 if self.fp != None:
580 600 self.fp.close()
581 601
582 602 timeTuple = time.localtime( self.dataOutObj.dataUtcTime)
583 603 subfolder = 'D%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
584 604
585 605 doypath = os.path.join( path, subfolder )
586 606 if not( os.path.exists(doypath) ):
587 607 os.mkdir(doypath)
588 608 self.setFile = -1 #inicializo mi contador de seteo
589 609 else:
590 610 filesList = os.listdir( doypath )
591 611 if len( filesList ) > 0:
592 612 filesList = sorted( filesList, key=str.lower )
593 613 filen = filesList[-1]
594 614 # el filename debera tener el siguiente formato
595 615 # 0 1234 567 89A BCDE (hex)
596 616 # x YYYY DDD SSS .ext
597 617 if isNumber( filen[8:11] ):
598 618 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
599 619 else:
600 620 self.setFile = -1
601 621 else:
602 622 self.setFile = -1 #inicializo mi contador de seteo
603 623
604 624 setFile = self.setFile
605 625 setFile += 1
606 626
607 627 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
608 628 timeTuple.tm_year,
609 629 timeTuple.tm_yday,
610 630 setFile,
611 631 ext )
612 632
613 633 filename = os.path.join( path, subfolder, file )
614 634
615 635 fp = open( filename,'wb' )
616 636
617 637 self.blockIndex = 0
618 638
619 639 #guardando atributos
620 640 self.filename = filename
621 641 self.subfolder = subfolder
622 642 self.fp = fp
623 643 self.setFile = setFile
624 644 self.flagIsNewFile = 1
625 645
626 646 print 'Writing the file: %s'%self.filename
627 647
628 648 self.__writeFirstHeader()
629 649
630 650 return 1
631 651
632 652
633 653 def __setNewBlock(self):
634 654 """
635 655 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
636 656
637 657 Return:
638 658 0 : si no pudo escribir nada
639 659 1 : Si escribio el Basic el First Header
640 660 """
641 661 if self.fp == None:
642 662 self.setNextFile()
643 663
644 664 if self.flagIsNewFile:
645 665 return 1
646 666
647 667 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
648 668 self.__writeBasicHeader()
649 669 return 1
650 670
651 671 if not( self.setNextFile() ):
652 672 return 0
653 673
654 674 return 1
655 675
656 676
657 677 def writeNextBlock(self):
658 678 """
659 679 Selecciona el bloque siguiente de datos y los escribe en un file
660 680
661 681 Return:
662 682 0 : Si no hizo pudo escribir el bloque de datos
663 683 1 : Si no pudo escribir el bloque de datos
664 684 """
665 685 if not( self.__setNewBlock() ):
666 686 return 0
667 687
668 688 self.writeBlock()
669 689
670 690 return 1
671 691
672 692
673 693 def getDataHeader(self):
674 """
675 Obtiene una copia del First Header
676
677 Affected:
678 self.basicHeaderObj
679 self.systemHeaderObj
680 self.radarControllerHeaderObj
681 self.processingHeaderObj
682 self.dtype
683
684 Return:
685 None
694 """Obtiene una copia del First Header Affected: self.basicHeaderObj self.
695 systemHeaderObj self.radarControllerHeaderObj self.processingHeaderObj self.
696 dtype Return: None
686 697 """
687 698
688 699 raise ValueError, "No implemented"
689 700
690 701 def setup(self, path, set=0, ext=None):
691 702 """
692 703 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
693 704
694 705 Inputs:
695 706 path : el path destino en el cual se escribiran los files a crear
696 707 format : formato en el cual sera salvado un file
697 708 set : el setebo del file
698 709
699 710 Return:
700 711 0 : Si no realizo un buen seteo
701 712 1 : Si realizo un buen seteo
702 713 """
703 714
704 715 if ext == None:
705 716 ext = self.ext
706 717
707 718 ext = ext.lower()
708 719
709 720 self.path = path
710 721 self.setFile = set - 1
711 722 self.ext = ext
712 723 #self.format = format
713 724 self.getDataHeader()
714 725
715 726 self.setBlockDimension()
716 727
717 728 if not( self.setNextFile() ):
718 729 print "There isn't a next file"
719 730 return 0
720 731
721 732 return 1
722 733
723 734
724 735
725 736
726 737
727 738
1 NO CONTENT: modified file
@@ -1,539 +1,574
1 1 '''
2 2 File: SpectraIO.py
3 3 Created on 20/02/2012
4 4
5 5 @author $Author: dsuarez $
6 6 @version $Id: SpectraIO.py 110 2012-07-19 15:18:18Z dsuarez $
7 7 '''
8 8
9 9 import os, sys
10 10 import numpy
11 11 import glob
12 12 import fnmatch
13 13 import time, datetime
14 14
15 15 path = os.path.split(os.getcwd())[0]
16 16 sys.path.append(path)
17 17
18 from Model.JROHeader import *
19 from Model.Spectra import Spectra
18 from IO.JROHeader import *
19 from Data.Spectra import Spectra
20 20
21 21 from JRODataIO import JRODataReader
22 22 from JRODataIO import JRODataWriter
23 23 from JRODataIO import isNumber
24 24
25 25
26 26 class SpectraReader(JRODataReader):
27 27 """
28 28 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
29 29 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
30 30 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
31 31
32 32 paresCanalesIguales * alturas * perfiles (Self Spectra)
33 33 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
34 34 canales * alturas (DC Channels)
35 35
36 36 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
37 37 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
38 38 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
39 39 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
40 40
41 41 Example:
42 42 dpath = "/home/myuser/data"
43 43
44 44 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
45 45
46 46 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
47 47
48 48 readerObj = SpectraReader()
49 49
50 50 readerObj.setup(dpath, startTime, endTime)
51 51
52 52 while(True):
53 53
54 54 readerObj.getData()
55 55
56 56 print readerObj.dataOutObj.data
57 57
58 58 if readerObj.flagNoMoreFiles:
59 59 break
60 60
61 61 """
62 dataOutObj = None
63
64 datablock = None
65 62
66 63 pts2read_SelfSpectra = 0
67 64
68 65 pts2read_CrossSpectra = 0
69 66
70 67 pts2read_DCchannels = 0
71 68
72 69 ext = ".pdata"
73 70
74 71 optchar = "P"
75 72
76 flag_cspc = False
73 dataOutObj = None
74
75 nRdChannels = None
76
77 nRdPairs = None
78
79 rdPairList = []
80
77 81
78 82 def __init__(self, dataOutObj=None):
79 83 """
80 84 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
81 85
82 86 Inputs:
83 87 dataOutObj : Objeto de la clase Spectra. Este objeto sera utilizado para
84 88 almacenar un perfil de datos cada vez que se haga un requerimiento
85 89 (getData). El perfil sera obtenido a partir del buffer de datos,
86 90 si el buffer esta vacio se hara un nuevo proceso de lectura de un
87 91 bloque de datos.
88 92 Si este parametro no es pasado se creara uno internamente.
89 93
90 94 Affected:
91 95 self.dataOutObj
92 96
93 97 Return : None
94 98 """
95 99
96 100 self.pts2read_SelfSpectra = 0
97 101
98 102 self.pts2read_CrossSpectra = 0
99 103
100 self.pts2read_DCs = 0
104 self.pts2read_DCchannels = 0
101 105
102 106 self.datablock = None
103 107
104 108 self.utc = None
105 109
106 110 self.ext = ".pdata"
107 111
108 112 self.optchar = "P"
109 113
110 114 self.basicHeaderObj = BasicHeader()
111 115
112 116 self.systemHeaderObj = SystemHeader()
113 117
114 118 self.radarControllerHeaderObj = RadarControllerHeader()
115 119
116 120 self.processingHeaderObj = ProcessingHeader()
117 121
118 122 self.online = 0
119 123
120 124 self.fp = None
121 125
122 126 self.idFile = None
123 127
124 128 self.dtype = None
125 129
126 130 self.fileSizeByHeader = None
127 131
128 132 self.filenameList = []
129 133
130 134 self.filename = None
131 135
132 136 self.fileSize = None
133 137
134 138 self.firstHeaderSize = 0
135 139
136 140 self.basicHeaderSize = 24
137 141
138 142 self.pathList = []
139 143
140 144 self.lastUTTime = 0
141 145
142 146 self.maxTimeStep = 30
143 147
144 148 self.flagNoMoreFiles = 0
145 149
146 150 self.set = 0
147 151
148 152 self.path = None
149 153
150 154 self.delay = 3 #seconds
151 155
152 156 self.nTries = 3 #quantity tries
153 157
154 158 self.nFiles = 3 #number of files for searching
155 159
156 160 self.nReadBlocks = 0
157 161
158 162 self.flagIsNewFile = 1
159 163
160 164 self.ippSeconds = 0
161 165
162 166 self.flagTimeBlock = 0
163 167
164 168 self.flagIsNewBlock = 0
165 169
166 170 self.nTotalBlocks = 0
167 171
168 172 self.blocksize = 0
169 173
170 174
171 175 def createObjByDefault(self):
172 176
173 177 dataObj = Spectra()
174 178
175 179 return dataObj
176 180
177 181 def __hasNotDataInBuffer(self):
178 182 return 1
179 183
180 184
181 185 def getBlockDimension(self):
182 186 """
183 187 Obtiene la cantidad de puntos a leer por cada bloque de datos
184 188
185 189 Affected:
186 self.nChannels
187 self.nPairs
190 self.nRdChannels
191 self.nRdPairs
188 192 self.pts2read_SelfSpectra
189 193 self.pts2read_CrossSpectra
190 194 self.pts2read_DCchannels
191 195 self.blocksize
192 196 self.dataOutObj.nChannels
193 197 self.dataOutObj.nPairs
194 198
195 199 Return:
196 200 None
197 201 """
198 self.nChannels = 0
199 self.nPairs = 0
200 self.pairList = []
202 self.nRdChannels = 0
203 self.nRdPairs = 0
204 self.rdPairList = []
201 205
202 206 for i in range( 0, self.processingHeaderObj.totalSpectra*2, 2 ):
203 207 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
204 self.nChannels = self.nChannels + 1 #par de canales iguales
208 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
205 209 else:
206 self.nPairs = self.nPairs + 1 #par de canales diferentes
207 self.pairList.append( (self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]) )
210 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
211 self.rdPairList.append( (self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]) )
208 212
209 pts2read = self.processingHeaderObj.numHeights * self.processingHeaderObj.profilesPerBlock
213 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
210 214
211 self.pts2read_SelfSpectra = int(self.nChannels * pts2read)
215 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
212 216 self.blocksize = self.pts2read_SelfSpectra
213 217
214 218 if self.processingHeaderObj.flag_cspc:
215 self.pts2read_CrossSpectra = int(self.nPairs * pts2read)
219 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
216 220 self.blocksize += self.pts2read_CrossSpectra
217 221
218 222 if self.processingHeaderObj.flag_dc:
219 self.pts2read_DCchannels = int(self.systemHeaderObj.numChannels * self.processingHeaderObj.numHeights)
223 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
220 224 self.blocksize += self.pts2read_DCchannels
221 225
222 226 # self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels
223 227
224 228
225 229 def readBlock(self):
226 230 """
227 231 Lee el bloque de datos desde la posicion actual del puntero del archivo
228 232 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
229 233 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
230 234 es seteado a 0
231 235
232 236 Return: None
233 237
234 238 Variables afectadas:
235 self.datablockIndex
239
236 240 self.flagIsNewFile
237 241 self.flagIsNewBlock
238 242 self.nTotalBlocks
239 243 self.data_spc
240 244 self.data_cspc
241 245 self.data_dc
242 246
243 247 Exceptions:
244 248 Si un bloque leido no es un bloque valido
245 249 """
246 250 blockOk_flag = False
247 251 fpointer = self.fp.tell()
248 252
249 spc = numpy.fromfile( self.fp, self.dataType[0], self.pts2read_SelfSpectra )
250 spc = spc.reshape( (self.nChannels, self.processingHeaderObj.numHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
253 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
254 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
251 255
252 if self.flag_cspc:
253 cspc = numpy.fromfile( self.fp, self.dataType, self.pts2read_CrossSpectra )
254 cspc = cspc.reshape( (self.nPairs, self.processingHeaderObj.numHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
256 if self.processingHeaderObj.flag_cspc:
257 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
258 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
255 259
256 260 if self.processingHeaderObj.flag_dc:
257 dc = numpy.fromfile( self.fp, self.dataType, self.pts2read_DCchannels ) #int(self.processingHeaderObj.numHeights*self.systemHeaderObj.numChannels) )
258 dc = dc.reshape( (self.systemHeaderObj.numChannels, self.processingHeaderObj.numHeights) ) #transforma a un arreglo 2D
261 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
262 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
259 263
260 264
261 265 if not(self.processingHeaderObj.shif_fft):
262 266 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
263 267
264 if self.flag_cspc:
268 if self.processingHeaderObj.flag_cspc:
265 269 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
266 270
267 271
268 272 spc = numpy.transpose( spc, (0,2,1) )
269 273 self.data_spc = spc
270 274
271 if self.flag_cspc:
275 if self.processingHeaderObj.flag_cspc:
272 276 cspc = numpy.transpose( cspc, (0,2,1) )
273 277 self.data_cspc = cspc['real'] + cspc['imag']*1j
274 278 else:
275 279 self.data_cspc = None
276 280
277 281 if self.processingHeaderObj.flag_dc:
278 282 self.data_dc = dc['real'] + dc['imag']*1j
279 283 else:
280 284 self.data_dc = None
281 285
282 self.datablockIndex = 0
283 286 self.flagIsNewFile = 0
284 287 self.flagIsNewBlock = 1
285 288
286 289 self.nTotalBlocks += 1
287 290 self.nReadBlocks += 1
288 291
289 292 return 1
290 293
291 294
292 295 def getData(self):
293 296 """
294 297 Copia el buffer de lectura a la clase "Spectra",
295 298 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
296 299 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
297 300
298 301 Return:
299 302 0 : Si no hay mas archivos disponibles
300 303 1 : Si hizo una buena copia del buffer
301 304
302 305 Affected:
303 306 self.dataOutObj
304 self.datablockIndex
307
305 308 self.flagTimeBlock
306 309 self.flagIsNewBlock
307 310 """
308 311
309 312 if self.flagNoMoreFiles: return 0
310 313
311 314 self.flagTimeBlock = 0
312 315 self.flagIsNewBlock = 0
313 316
314 317 if self.__hasNotDataInBuffer():
315 318
316 319 if not( self.readNextBlock() ):
317 320 return 0
318 321
319 self.updateDataHeader()
322 # self.updateDataHeader()
320 323
321 324 if self.flagNoMoreFiles == 1:
322 325 print 'Process finished'
323 326 return 0
324 327
325 328 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
326 329
327 330 if self.data_dc == None:
328 331 self.dataOutObj.flagNoData = True
329 332 return 0
330 333
331 self.dataOutObj.flagNoData = False
332 self.dataOutObj.flagTimeBlock = self.flagTimeBlock
333 334
334 335 self.dataOutObj.data_spc = self.data_spc
336
335 337 self.dataOutObj.data_cspc = self.data_cspc
338
336 339 self.dataOutObj.data_dc = self.data_dc
337 340
341 self.dataOutObj.flagTimeBlock = self.flagTimeBlock
342
343 self.dataOutObj.flagNoData = False
344
345 self.dataOutObj.dtype = self.dtype
346
347 self.dataOutObj.nChannels = self.nRdChannels
348
349 self.dataOutObj.nPairs = self.nRdPairs
350
351 self.dataOutObj.pairsList = self.rdPairList
352
353 self.dataOutObj.nHeights = self.processingHeaderObj.nHeights
354
355 self.dataOutObj.nProfiles = self.processingHeaderObj.profilesPerBlock
356
357 self.dataOutObj.nFFTPoints = self.processingHeaderObj.profilesPerBlock
358
359 self.dataOutObj.nIncohInt = self.processingHeaderObj.nIncohInt
360
361
362 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
363
364 self.dataOutObj.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
365
366 self.dataOutObj.channelList = range(self.systemHeaderObj.nChannels)
367
368 self.dataOutObj.channelIndexList = range(self.systemHeaderObj.nChannels)
369
370 self.dataOutObj.dataUtcTime = self.basicHeaderObj.utc #+ self.profileIndex * self.ippSeconds
371
372 # self.profileIndex += 1
373
374 self.dataOutObj.systemHeaderObj = self.systemHeaderObj.copy()
375
376 self.dataOutObj.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
377
338 378 return 1
339 379
340 380
341 381 class SpectraWriter(JRODataWriter):
342 382
343 383 """
344 384 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
345 385 de los datos siempre se realiza por bloques.
346 386 """
347 387
348 dataOutObj = None
349 388
350 389 shape_spc_Buffer = None
351 390 shape_cspc_Buffer = None
352 391 shape_dc_Buffer = None
353
354 data_spc = None
355 data_cspc = None
356 data_dc = None
357
392 dataOutObj = None
358 393
359 394 def __init__(self, dataOutObj=None):
360 395 """
361 396 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
362 397
363 398 Affected:
364 399 self.dataOutObj
365 400 self.basicHeaderObj
366 401 self.systemHeaderObj
367 402 self.radarControllerHeaderObj
368 403 self.processingHeaderObj
369 404
370 405 Return: None
371 406 """
372 407 if dataOutObj == None:
373 408 dataOutObj = Spectra()
374 409
375 410 if not( isinstance(dataOutObj, Spectra) ):
376 411 raise ValueError, "in SpectraReader, dataOutObj must be an Spectra class object"
377 412
378 413 self.dataOutObj = dataOutObj
379 414
380 415 self.ext = ".pdata"
381 416
382 417 self.optchar = "P"
383 418
384 419 self.shape_spc_Buffer = None
385 420 self.shape_cspc_Buffer = None
386 421 self.shape_dc_Buffer = None
387 422
388 423 self.data_spc = None
389 424 self.data_cspc = None
390 425 self.data_dc = None
391 426
392 427 ####################################
393 428
394 429 self.fp = None
395 430
396 431 self.nWriteBlocks = 0
397 432
398 433 self.flagIsNewFile = 1
399 434
400 435 self.nTotalBlocks = 0
401 436
402 437 self.flagIsNewBlock = 0
403 438
404 439 self.flagNoMoreFiles = 0
405 440
406 441 self.setFile = None
407 442
408 self.dataType = None
443 self.dtype = None
409 444
410 445 self.path = None
411 446
412 447 self.noMoreFiles = 0
413 448
414 449 self.filename = None
415 450
416 451 self.basicHeaderObj = BasicHeader()
417 452
418 453 self.systemHeaderObj = SystemHeader()
419 454
420 455 self.radarControllerHeaderObj = RadarControllerHeader()
421 456
422 457 self.processingHeaderObj = ProcessingHeader()
423 458
424 459
425 460 def hasAllDataInBuffer(self):
426 461 return 1
427 462
428 463
429 464 def setBlockDimension(self):
430 465 """
431 466 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
432 467
433 468 Affected:
434 469 self.shape_spc_Buffer
435 470 self.shape_cspc_Buffer
436 471 self.shape_dc_Buffer
437 472
438 473 Return: None
439 474 """
440 475 self.shape_spc_Buffer = (self.dataOutObj.nChannels,
441 self.processingHeaderObj.numHeights,
476 self.processingHeaderObj.nHeights,
442 477 self.processingHeaderObj.profilesPerBlock)
443 478
444 479 self.shape_cspc_Buffer = (self.dataOutObj.nPairs,
445 self.processingHeaderObj.numHeights,
480 self.processingHeaderObj.nHeights,
446 481 self.processingHeaderObj.profilesPerBlock)
447 482
448 self.shape_dc_Buffer = (self.systemHeaderObj.numChannels,
449 self.processingHeaderObj.numHeights)
483 self.shape_dc_Buffer = (self.systemHeaderObj.nChannels,
484 self.processingHeaderObj.nHeights)
450 485
451 486
452 487 def writeBlock(self):
453 488 """
454 489 Escribe el buffer en el file designado
455 490
456 491 Affected:
457 492 self.data_spc
458 493 self.data_cspc
459 494 self.data_dc
460 495 self.flagIsNewFile
461 496 self.flagIsNewBlock
462 497 self.nTotalBlocks
463 498 self.nWriteBlocks
464 499
465 500 Return: None
466 501 """
467 502
468 503 spc = numpy.transpose( self.data_spc, (0,2,1) )
469 504 if not( self.processingHeaderObj.shif_fft ):
470 505 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
471 506 data = spc.reshape((-1))
472 507 data.tofile(self.fp)
473 508
474 509 if self.data_cspc != None:
475 data = numpy.zeros( self.shape_cspc_Buffer, self.dataType )
510 data = numpy.zeros( self.shape_cspc_Buffer, self.dtype )
476 511 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
477 512 if not( self.processingHeaderObj.shif_fft ):
478 513 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
479 514 data['real'] = cspc.real
480 515 data['imag'] = cspc.imag
481 516 data = data.reshape((-1))
482 517 data.tofile(self.fp)
483 518
484 data = numpy.zeros( self.shape_dc_Buffer, self.dataType )
519 data = numpy.zeros( self.shape_dc_Buffer, self.dtype )
485 520 dc = self.data_dc
486 521 data['real'] = dc.real
487 522 data['imag'] = dc.imag
488 523 data = data.reshape((-1))
489 524 data.tofile(self.fp)
490 525
491 526 self.data_spc.fill(0)
492 527 self.data_dc.fill(0)
493 528 if self.data_cspc != None:
494 529 self.data_cspc.fill(0)
495 530
496 531 self.flagIsNewFile = 0
497 532 self.flagIsNewBlock = 1
498 533 self.nTotalBlocks += 1
499 534 self.nWriteBlocks += 1
500 535
501 536
502 537 def putData(self):
503 538 """
504 539 Setea un bloque de datos y luego los escribe en un file
505 540
506 541 Affected:
507 542 self.data_spc
508 543 self.data_cspc
509 544 self.data_dc
510 545
511 546 Return:
512 547 0 : Si no hay data o no hay mas files que puedan escribirse
513 548 1 : Si se escribio la data de un bloque en un file
514 549 """
515 550 self.flagIsNewBlock = 0
516 551
517 552 if self.dataOutObj.flagNoData:
518 553 return 0
519 554
520 555 if self.dataOutObj.flagTimeBlock:
521 556 self.data_spc.fill(0)
522 557 self.data_cspc.fill(0)
523 558 self.data_dc.fill(0)
524 559 self.setNextFile()
525 560
526 561 self.data_spc = self.dataOutObj.data_spc
527 562 self.data_cspc = self.dataOutObj.data_cspc
528 563 self.data_dc = self.dataOutObj.data_dc
529 564
530 565 # #self.processingHeaderObj.dataBlocksPerFile)
531 566 if self.hasAllDataInBuffer():
532 567 self.getDataHeader()
533 568 self.writeNextBlock()
534 569
535 570 if self.flagNoMoreFiles:
536 571 #print 'Process finished'
537 572 return 0
538 573
539 574 return 1 No newline at end of file
@@ -1,490 +1,481
1 1 '''
2 2 Created on 23/01/2012
3 3
4 4 @author $Author: dsuarez $
5 5 @version $Id: VoltageIO.py 110 2012-07-19 15:18:18Z dsuarez $
6 6 '''
7 7
8 8 import os, sys
9 9 import numpy
10 10 import glob
11 11 import fnmatch
12 12 import time, datetime
13 13
14 14 path = os.path.split(os.getcwd())[0]
15 15 sys.path.append(path)
16 16
17 17 from JROHeader import *
18 18 from JRODataIO import JRODataReader
19 19 from JRODataIO import JRODataWriter
20 20
21 21 from Data.Voltage import Voltage
22 22
23 23 class VoltageReader(JRODataReader):
24 24 """
25 25 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
26 26 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
27 27 perfiles*alturas*canales) son almacenados en la variable "buffer".
28 28
29 29 perfiles * alturas * canales
30 30
31 31 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
32 32 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
33 33 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
34 34 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
35 35
36 36 Example:
37 37
38 38 dpath = "/home/myuser/data"
39 39
40 40 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
41 41
42 42 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
43 43
44 44 readerObj = VoltageReader()
45 45
46 46 readerObj.setup(dpath, startTime, endTime)
47 47
48 48 while(True):
49 49
50 50 #to get one profile
51 51 profile = readerObj.getData()
52 52
53 53 #print the profile
54 54 print profile
55 55
56 56 #If you want to see all datablock
57 57 print readerObj.datablock
58 58
59 59 if readerObj.flagNoMoreFiles:
60 60 break
61 61
62 62 """
63 dataOutObj = None
64
65 datablock = None
66 63
67 64 ext = ".r"
68 65
69 66 optchar = "D"
67 dataOutObj = None
70 68
71 69
72 70 def __init__(self, dataOutObj=None):
73 71 """
74 72 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
75 73
76 74 Input:
77 75 dataOutObj : Objeto de la clase Voltage. Este objeto sera utilizado para
78 76 almacenar un perfil de datos cada vez que se haga un requerimiento
79 77 (getData). El perfil sera obtenido a partir del buffer de datos,
80 78 si el buffer esta vacio se hara un nuevo proceso de lectura de un
81 79 bloque de datos.
82 80 Si este parametro no es pasado se creara uno internamente.
83 81
84 82 Variables afectadas:
85 83 self.dataOutObj
86 84
87 85 Return:
88 86 None
89 87 """
90 88
91 89 self.datablock = None
92 90
93 91 self.utc = 0
94 92
95 93 self.ext = ".r"
96 94
97 95 self.optchar = "D"
98 96
99 97 self.basicHeaderObj = BasicHeader()
100 98
101 99 self.systemHeaderObj = SystemHeader()
102 100
103 101 self.radarControllerHeaderObj = RadarControllerHeader()
104 102
105 103 self.processingHeaderObj = ProcessingHeader()
106 104
107 105 self.online = 0
108 106
109 107 self.fp = None
110 108
111 109 self.idFile = None
112 110
113 111 self.dtype = None
114 112
115 113 self.fileSizeByHeader = None
116 114
117 115 self.filenameList = []
118 116
119 117 self.filename = None
120 118
121 119 self.fileSize = None
122 120
123 121 self.firstHeaderSize = 0
124 122
125 123 self.basicHeaderSize = 24
126 124
127 125 self.pathList = []
128 126
129 127 self.filenameList = []
130 128
131 129 self.lastUTTime = 0
132 130
133 131 self.maxTimeStep = 30
134 132
135 133 self.flagNoMoreFiles = 0
136 134
137 135 self.set = 0
138 136
139 137 self.path = None
140 138
141 139 self.profileIndex = 9999
142 140
143 141 self.delay = 3 #seconds
144 142
145 143 self.nTries = 3 #quantity tries
146 144
147 145 self.nFiles = 3 #number of files for searching
148 146
149 147 self.nReadBlocks = 0
150 148
151 149 self.flagIsNewFile = 1
152 150
153 151 self.ippSeconds = 0
154 152
155 153 self.flagTimeBlock = 0
156 154
157 155 self.flagIsNewBlock = 0
158 156
159 157 self.nTotalBlocks = 0
160 158
161 159 self.blocksize = 0
162 160
163 161 def createObjByDefault(self):
164 162
165 163 dataObj = Voltage()
166 164
167 165 return dataObj
168 166
169 167 def __hasNotDataInBuffer(self):
170 168 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
171 169 return 1
172 170 return 0
173 171
174 172
175 173 def getBlockDimension(self):
176 174 """
177 175 Obtiene la cantidad de puntos a leer por cada bloque de datos
178 176
179 177 Affected:
180 178 self.blocksize
181 179
182 180 Return:
183 181 None
184 182 """
185 183 pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
186 184 self.blocksize = pts2read
187 185
188 186
189 187 def readBlock(self):
190 188 """
191 189 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
192 190 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
193 191 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
194 192 es seteado a 0
195 193
196 194 Inputs:
197 195 None
198 196
199 197 Return:
200 198 None
201 199
202 200 Affected:
203 201 self.profileIndex
204 202 self.datablock
205 203 self.flagIsNewFile
206 204 self.flagIsNewBlock
207 205 self.nTotalBlocks
208 206
209 207 Exceptions:
210 208 Si un bloque leido no es un bloque valido
211 209 """
212 210
213 211 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
214 212
215 213 try:
216 214 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
217 215 except:
218 216 print "The read block (%3d) has not enough data" %self.nReadBlocks
219 217 return 0
220 218
221 219 junk = numpy.transpose(junk, (2,0,1))
222 220 self.datablock = junk['real'] + junk['imag']*1j
223 221
224 222 self.profileIndex = 0
225 223
226 224 self.flagIsNewFile = 0
227 225 self.flagIsNewBlock = 1
228 226
229 227 self.nTotalBlocks += 1
230 228 self.nReadBlocks += 1
231 229
232 230 return 1
233 231
234 232
235 233 def getData(self):
236 234 """
237 235 getData obtiene una unidad de datos del buffer de lectura y la copia a la clase "Voltage"
238 236 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
239 237 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
240 238
241 239 Ademas incrementa el contador del buffer en 1.
242 240
243 241 Return:
244 242 data : retorna un perfil de voltages (alturas * canales) copiados desde el
245 243 buffer. Si no hay mas archivos a leer retorna None.
246 244
247 245 Variables afectadas:
248 246 self.dataOutObj
249 247 self.profileIndex
250 248
251 249 Affected:
252 250 self.dataOutObj
253 251 self.profileIndex
254 252 self.flagTimeBlock
255 253 self.flagIsNewBlock
256 254 """
257 255 if self.flagNoMoreFiles: return 0
258 256
259 257 self.flagTimeBlock = 0
260 258 self.flagIsNewBlock = 0
261 259
262 260 if self.__hasNotDataInBuffer():
263 261
264 262 if not( self.readNextBlock() ):
265 263 return 0
266 264
267 265 # self.updateDataHeader()
268 266
269 267 if self.flagNoMoreFiles == 1:
270 268 print 'Process finished'
271 269 return 0
272 270
273 271 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
274 272
275 273 if self.datablock == None:
276 274 self.dataOutObj.flagNoData = True
277 275 return 0
278 276
279 277 self.dataOutObj.data = self.datablock[:,self.profileIndex,:]
280 278
281 279 self.dataOutObj.dtype = self.dtype
282 280
283 281 self.dataOutObj.nChannels = self.systemHeaderObj.nChannels
284 282
285 283 self.dataOutObj.nHeights = self.processingHeaderObj.nHeights
286 284
287 285 self.dataOutObj.nProfiles = self.processingHeaderObj.profilesPerBlock
288 286
289 287 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
290 288
291 289 self.dataOutObj.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
292 290
293 291 self.dataOutObj.channelList = range(self.systemHeaderObj.nChannels)
294 292
295 293 self.dataOutObj.channelIndexList = range(self.systemHeaderObj.nChannels)
296 294
297 self.dataOutObj.flagNoData = True
298
299 295 self.dataOutObj.flagTimeBlock = self.flagTimeBlock
300 296
301 297 self.dataOutObj.dataUtcTime = self.basicHeaderObj.utc + self.profileIndex * self.ippSeconds
302 298
303 299 self.dataOutObj.nCohInt = self.processingHeaderObj.nCohInt
304 300
305 301 self.profileIndex += 1
306 302
307 303 self.dataOutObj.systemHeaderObj = self.systemHeaderObj.copy()
308 304
309 305 self.dataOutObj.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
310 306
307 self.dataOutObj.flagNoData = False
308
311 309 return 1
312 310
313 311
314 312 class VoltageWriter(JRODataWriter):
315 313 """
316 314 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
317 315 de los datos siempre se realiza por bloques.
318 316 """
319 __configHeaderFile = 'wrSetHeadet.txt'
320
321 dataOutObj = None
322 317
323 318 ext = ".r"
324 319
325 320 optchar = "D"
326 321
327 datablock = None
328
329 profileIndex = 0
330
331 322 shapeBuffer = None
332 323
333 324
334 325 def __init__(self, dataOutObj=None):
335 326 """
336 327 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
337 328
338 329 Affected:
339 330 self.dataOutObj
340 331
341 332 Return: None
342 333 """
343 334 if dataOutObj == None:
344 335 dataOutObj = Voltage()
345 336
346 337 if not( isinstance(dataOutObj, Voltage) ):
347 338 raise ValueError, "in VoltageReader, dataOutObj must be an Spectra class object"
348 339
349 340 self.dataOutObj = dataOutObj
350 341
351 342
352 343 def hasAllDataInBuffer(self):
353 344 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
354 345 return 1
355 346 return 0
356 347
357 348
358 349 def setBlockDimension(self):
359 350 """
360 351 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
361 352
362 353 Affected:
363 354 self.shape_spc_Buffer
364 355 self.shape_cspc_Buffer
365 356 self.shape_dc_Buffer
366 357
367 358 Return: None
368 359 """
369 360 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
370 361 self.processingHeaderObj.nHeights,
371 362 self.systemHeaderObj.nChannels )
372 363
373 364 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
374 365 self.processingHeaderObj.profilesPerBlock,
375 366 self.processingHeaderObj.nHeights),
376 367 dtype=numpy.dtype('complex'))
377 368
378 369
379 370 def writeBlock(self):
380 371 """
381 372 Escribe el buffer en el file designado
382 373
383 374 Affected:
384 375 self.profileIndex
385 376 self.flagIsNewFile
386 377 self.flagIsNewBlock
387 378 self.nTotalBlocks
388 379 self.blockIndex
389 380
390 381 Return: None
391 382 """
392 383 data = numpy.zeros( self.shapeBuffer, self.dtype )
393 384
394 385 junk = numpy.transpose(self.datablock, (1,2,0))
395 386
396 387 data['real'] = junk.real
397 388 data['imag'] = junk.imag
398 389
399 390 data = data.reshape( (-1) )
400 391
401 392 data.tofile( self.fp )
402 393
403 394 self.datablock.fill(0)
404 395
405 396 self.profileIndex = 0
406 397 self.flagIsNewFile = 0
407 398 self.flagIsNewBlock = 1
408 399
409 400 self.blockIndex += 1
410 401 self.nTotalBlocks += 1
411 402
412 403 def putData(self):
413 404 """
414 405 Setea un bloque de datos y luego los escribe en un file
415 406
416 407 Affected:
417 408 self.flagIsNewBlock
418 409 self.profileIndex
419 410
420 411 Return:
421 412 0 : Si no hay data o no hay mas files que puedan escribirse
422 413 1 : Si se escribio la data de un bloque en un file
423 414 """
424 415 self.flagIsNewBlock = 0
425 416
426 417 if self.dataOutObj.flagNoData:
427 418 return 0
428 419
429 420 if self.dataOutObj.flagTimeBlock:
430 421
431 422 self.datablock.fill(0)
432 423 self.profileIndex = 0
433 424 self.setNextFile()
434 425
435 426 self.datablock[:,self.profileIndex,:] = self.dataOutObj.data
436 427
437 428 self.profileIndex += 1
438 429
439 430 if self.hasAllDataInBuffer():
440 431 #if self.flagIsNewFile:
441 432 self.getDataHeader()
442 433 self.writeNextBlock()
443 434
444 435 if self.flagNoMoreFiles:
445 436 #print 'Process finished'
446 437 return 0
447 438
448 439 return 1
449 440
450 441 def getDataHeader(self):
451 442
452 443 """
453 444 Obtiene una copia del First Header
454 445
455 446 Affected:
456 447 self.systemHeaderObj
457 448 self.radarControllerHeaderObj
458 449 self.dtype
459 450
460 451 Return:
461 452 None
462 453 """
463 454
464 455 # CALCULAR PARAMETROS
465 456
466 457 self.systemHeaderObj = self.dataOutObj.systemHeaderObj.copy()
467 458 self.radarControllerHeaderObj = self.dataOutObj.radarControllerHeaderObj.copy()
468 459
469 460 self.basicHeaderObj.size = self.basicHeaderSize
470 461 self.basicHeaderObj.version = self.versionFile
471 462 self.basicHeaderObj.dataBlock = self.nTotalBlocks
472 463 self.basicHeaderObj.utc = self.dataOutObj.dataUtcTime
473 464 self.basicHeaderObj.miliSecond = 0
474 465 self.basicHeaderObj.timeZone = 0
475 466 self.basicHeaderObj.dstFlag = 0
476 467 self.basicHeaderObj.errorCount = 0
477 468
478 469 self.processingHeaderObj.size = 0
479 470 self.processingHeaderObj.dtype = self.dataOutObj.dtype
480 471 self.processingHeaderObj.blockSize = 0
481 472 self.processingHeaderObj.profilesPerBlock = 0
482 473 self.processingHeaderObj.dataBlocksPerFile = 0
483 474 self.processingHeaderObj.numWindows = 0
484 475 self.processingHeaderObj.processFlags = 0
485 476 self.processingHeaderObj.coherentInt = 0
486 477 self.processingHeaderObj.incoherentInt = 0
487 478 self.processingHeaderObj.totalSpectra = 0
488 479
489 480 self.dtype = self.dataOutObj.dtype
490 481 No newline at end of file
1 NO CONTENT: modified file
General Comments 0
You need to be logged in to leave comments. Login now