##// END OF EJS Templates
JRODATA: timeInterval is a property now...
Miguel Valdez -
r527:6ccd54aeeb93
parent child
Show More
@@ -1,686 +1,687
1 1 '''
2 2 @author: Daniel Suarez
3 3 '''
4 4
5 5 import os
6 6 import sys
7 7 import glob
8 8 import fnmatch
9 9 import datetime
10 10 import time
11 11 import re
12 12 import h5py
13 13 import numpy
14 14
15 15 from model.proc.jroproc_base import ProcessingUnit, Operation
16 16 from model.data.jroamisr import AMISR
17 17
18 18 class RadacHeader():
19 19 def __init__(self, fp):
20 20 header = 'Raw11/Data/RadacHeader'
21 21 self.beamCodeByPulse = fp.get(header+'/BeamCode')
22 22 self.beamCode = fp.get('Raw11/Data/Beamcodes')
23 23 self.code = fp.get(header+'/Code')
24 24 self.frameCount = fp.get(header+'/FrameCount')
25 25 self.modeGroup = fp.get(header+'/ModeGroup')
26 26 self.nsamplesPulse = fp.get(header+'/NSamplesPulse')
27 27 self.pulseCount = fp.get(header+'/PulseCount')
28 28 self.radacTime = fp.get(header+'/RadacTime')
29 29 self.timeCount = fp.get(header+'/TimeCount')
30 30 self.timeStatus = fp.get(header+'/TimeStatus')
31 31
32 32 self.nrecords = self.pulseCount.shape[0] #nblocks
33 33 self.npulses = self.pulseCount.shape[1] #nprofile
34 34 self.nsamples = self.nsamplesPulse[0,0] #ngates
35 35 self.nbeams = self.beamCode.shape[1]
36 36
37 37
38 38 def getIndexRangeToPulse(self, idrecord=0):
39 39 #indexToZero = numpy.where(self.pulseCount.value[idrecord,:]==0)
40 40 #startPulseCountId = indexToZero[0][0]
41 41 #endPulseCountId = startPulseCountId - 1
42 42 #range1 = numpy.arange(startPulseCountId,self.npulses,1)
43 43 #range2 = numpy.arange(0,startPulseCountId,1)
44 44 #return range1, range2
45 45 zero = 0
46 46 npulse = max(self.pulseCount[0,:]+1)-1
47 47 looking_index = numpy.where(self.pulseCount.value[idrecord,:]==npulse)[0]
48 48 getLastIndex = looking_index[-1]
49 49 index_data = numpy.arange(0,getLastIndex+1,1)
50 50 index_buffer = numpy.arange(getLastIndex+1,self.npulses,1)
51 51 return index_data, index_buffer
52 52
53 53 class AMISRReader(ProcessingUnit):
54 54
55 55 path = None
56 56 startDate = None
57 57 endDate = None
58 58 startTime = None
59 59 endTime = None
60 60 walk = None
61 61 isConfig = False
62 62
63 63 def __init__(self):
64 64 self.set = None
65 65 self.subset = None
66 66 self.extension_file = '.h5'
67 67 self.dtc_str = 'dtc'
68 68 self.dtc_id = 0
69 69 self.status = True
70 70 self.isConfig = False
71 71 self.dirnameList = []
72 72 self.filenameList = []
73 73 self.fileIndex = None
74 74 self.flagNoMoreFiles = False
75 75 self.flagIsNewFile = 0
76 76 self.filename = ''
77 77 self.amisrFilePointer = None
78 78 self.radacHeaderObj = None
79 79 self.dataOut = self.__createObjByDefault()
80 80 self.datablock = None
81 81 self.rest_datablock = None
82 82 self.range = None
83 83 self.idrecord_count = 0
84 84 self.profileIndex = 0
85 85 self.index_amisr_sample = None
86 86 self.index_amisr_buffer = None
87 87 self.beamCodeByFrame = None
88 88 self.radacTimeByFrame = None
89 89 #atributos originales tal y como esta en el archivo de datos
90 90 self.beamCodesFromFile = None
91 91 self.radacTimeFromFile = None
92 92 self.rangeFromFile = None
93 93 self.dataByFrame = None
94 94 self.dataset = None
95 95
96 96 self.beamCodeDict = {}
97 97 self.beamRangeDict = {}
98 98
99 99 #experiment cgf file
100 100 self.npulsesint_fromfile = None
101 101 self.recordsperfile_fromfile = None
102 102 self.nbeamcodes_fromfile = None
103 103 self.ngates_fromfile = None
104 104 self.ippSeconds_fromfile = None
105 105 self.frequency_h5file = None
106 106
107 107
108 108 self.__firstFile = True
109 109 self.buffer_radactime = None
110 110
111 111 self.index4_schain_datablock = None
112 112 self.index4_buffer = None
113 113 self.schain_datablock = None
114 114 self.buffer = None
115 115 self.linear_pulseCount = None
116 116 self.npulseByFrame = None
117 117 self.profileIndex_offset = None
118 118 self.timezone = 'ut'
119 119
120 120 self.__waitForNewFile = 20
121 121 self.__filename_online = None
122 122
123 123 def __createObjByDefault(self):
124 124
125 125 dataObj = AMISR()
126 126
127 127 return dataObj
128 128
129 129 def __setParameters(self,path='', startDate='',endDate='',startTime='', endTime='', walk=''):
130 130 self.path = path
131 131 self.startDate = startDate
132 132 self.endDate = endDate
133 133 self.startTime = startTime
134 134 self.endTime = endTime
135 135 self.walk = walk
136 136
137 137 def __checkPath(self):
138 138 if os.path.exists(self.path):
139 139 self.status = 1
140 140 else:
141 141 self.status = 0
142 142 print 'Path:%s does not exists'%self.path
143 143
144 144 return
145 145
146 146 def __selDates(self, amisr_dirname_format):
147 147 try:
148 148 year = int(amisr_dirname_format[0:4])
149 149 month = int(amisr_dirname_format[4:6])
150 150 dom = int(amisr_dirname_format[6:8])
151 151 thisDate = datetime.date(year,month,dom)
152 152
153 153 if (thisDate>=self.startDate and thisDate <= self.endDate):
154 154 return amisr_dirname_format
155 155 except:
156 156 return None
157 157
158 158 def __findDataForDates(self,online=False):
159 159
160 160
161 161
162 162 if not(self.status):
163 163 return None
164 164
165 165 pat = '\d+.\d+'
166 166 dirnameList = [re.search(pat,x) for x in os.listdir(self.path)]
167 167 dirnameList = filter(lambda x:x!=None,dirnameList)
168 168 dirnameList = [x.string for x in dirnameList]
169 169 if not(online):
170 170 dirnameList = [self.__selDates(x) for x in dirnameList]
171 171 dirnameList = filter(lambda x:x!=None,dirnameList)
172 172 if len(dirnameList)>0:
173 173 self.status = 1
174 174 self.dirnameList = dirnameList
175 175 self.dirnameList.sort()
176 176 else:
177 177 self.status = 0
178 178 return None
179 179
180 180 def __getTimeFromData(self):
181 181 startDateTime_Reader = datetime.datetime.combine(self.startDate,self.startTime)
182 182 endDateTime_Reader = datetime.datetime.combine(self.endDate,self.endTime)
183 183
184 184 print 'Filtering Files from %s to %s'%(startDateTime_Reader, endDateTime_Reader)
185 185 print '........................................'
186 186 filter_filenameList = []
187 187 self.filenameList.sort()
188 188 for i in range(len(self.filenameList)-1):
189 189 filename = self.filenameList[i]
190 190 fp = h5py.File(filename,'r')
191 191 time_str = fp.get('Time/RadacTimeString')
192 192
193 193 startDateTimeStr_File = time_str[0][0].split('.')[0]
194 194 junk = time.strptime(startDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
195 195 startDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
196 196
197 197 endDateTimeStr_File = time_str[-1][-1].split('.')[0]
198 198 junk = time.strptime(endDateTimeStr_File, '%Y-%m-%d %H:%M:%S')
199 199 endDateTime_File = datetime.datetime(junk.tm_year,junk.tm_mon,junk.tm_mday,junk.tm_hour, junk.tm_min, junk.tm_sec)
200 200
201 201 fp.close()
202 202
203 203 if self.timezone == 'lt':
204 204 startDateTime_File = startDateTime_File - datetime.timedelta(minutes = 300)
205 205 endDateTime_File = endDateTime_File - datetime.timedelta(minutes = 300)
206 206
207 207 if (endDateTime_File>=startDateTime_Reader and endDateTime_File<endDateTime_Reader):
208 208 #self.filenameList.remove(filename)
209 209 filter_filenameList.append(filename)
210 210
211 211 filter_filenameList.sort()
212 212 self.filenameList = filter_filenameList
213 213 return 1
214 214
215 215 def __filterByGlob1(self, dirName):
216 216 filter_files = glob.glob1(dirName, '*.*%s'%self.extension_file)
217 217 filterDict = {}
218 218 filterDict.setdefault(dirName)
219 219 filterDict[dirName] = filter_files
220 220 return filterDict
221 221
222 222 def __getFilenameList(self, fileListInKeys, dirList):
223 223 for value in fileListInKeys:
224 224 dirName = value.keys()[0]
225 225 for file in value[dirName]:
226 226 filename = os.path.join(dirName, file)
227 227 self.filenameList.append(filename)
228 228
229 229
230 230 def __selectDataForTimes(self, online=False):
231 231 #aun no esta implementado el filtro for tiempo
232 232 if not(self.status):
233 233 return None
234 234
235 235 dirList = [os.path.join(self.path,x) for x in self.dirnameList]
236 236
237 237 fileListInKeys = [self.__filterByGlob1(x) for x in dirList]
238 238
239 239 self.__getFilenameList(fileListInKeys, dirList)
240 240 if not(online):
241 241 #filtro por tiempo
242 242 if not(self.all):
243 243 self.__getTimeFromData()
244 244
245 245 if len(self.filenameList)>0:
246 246 self.status = 1
247 247 self.filenameList.sort()
248 248 else:
249 249 self.status = 0
250 250 return None
251 251
252 252 else:
253 253 #get the last file - 1
254 254 self.filenameList = [self.filenameList[-2]]
255 255
256 256 new_dirnameList = []
257 257 for dirname in self.dirnameList:
258 258 junk = numpy.array([dirname in x for x in self.filenameList])
259 259 junk_sum = junk.sum()
260 260 if junk_sum > 0:
261 261 new_dirnameList.append(dirname)
262 262 self.dirnameList = new_dirnameList
263 263 return 1
264 264
265 265 def __searchFilesOnline(self,
266 266 path,
267 267 walk=True):
268 268
269 269 startDate = datetime.datetime.utcnow().date()
270 270 endDate = datetime.datetime.utcnow().date()
271 271
272 272 self.__setParameters(path=path, startDate=startDate, endDate=endDate, walk=walk)
273 273
274 274 self.__checkPath()
275 275
276 276 self.__findDataForDates(online=True)
277 277
278 278 self.dirnameList = [self.dirnameList[-1]]
279 279
280 280 self.__selectDataForTimes(online=True)
281 281
282 282 return
283 283
284 284
285 285 def __searchFilesOffline(self,
286 286 path,
287 287 startDate,
288 288 endDate,
289 289 startTime=datetime.time(0,0,0),
290 290 endTime=datetime.time(23,59,59),
291 291 walk=True):
292 292
293 293 self.__setParameters(path, startDate, endDate, startTime, endTime, walk)
294 294
295 295 self.__checkPath()
296 296
297 297 self.__findDataForDates()
298 298
299 299 self.__selectDataForTimes()
300 300
301 301 for i in range(len(self.filenameList)):
302 302 print "%s" %(self.filenameList[i])
303 303
304 304 return
305 305
306 306 def __setNextFileOffline(self):
307 307 idFile = self.fileIndex
308 308
309 309 while (True):
310 310 idFile += 1
311 311 if not(idFile < len(self.filenameList)):
312 312 self.flagNoMoreFiles = 1
313 313 print "No more Files"
314 314 return 0
315 315
316 316 filename = self.filenameList[idFile]
317 317
318 318 amisrFilePointer = h5py.File(filename,'r')
319 319
320 320 break
321 321
322 322 self.flagIsNewFile = 1
323 323 self.fileIndex = idFile
324 324 self.filename = filename
325 325
326 326 self.amisrFilePointer = amisrFilePointer
327 327
328 328 print "Setting the file: %s"%self.filename
329 329
330 330 return 1
331 331
332 332
333 333 def __setNextFileOnline(self):
334 334 filename = self.filenameList[0]
335 335 if self.__filename_online != None:
336 336 self.__selectDataForTimes(online=True)
337 337 filename = self.filenameList[0]
338 338 while self.__filename_online == filename:
339 339 print 'waiting %d seconds to get a new file...'%(self.__waitForNewFile)
340 340 time.sleep(self.__waitForNewFile)
341 341 self.__selectDataForTimes(online=True)
342 342 filename = self.filenameList[0]
343 343
344 344 self.__filename_online = filename
345 345
346 346 self.amisrFilePointer = h5py.File(filename,'r')
347 347 self.flagIsNewFile = 1
348 348 self.filename = filename
349 349 print "Setting the file: %s"%self.filename
350 350 return 1
351 351
352 352
353 353 def __readHeader(self):
354 354 self.radacHeaderObj = RadacHeader(self.amisrFilePointer)
355 355
356 356 #update values from experiment cfg file
357 357 if self.radacHeaderObj.nrecords == self.recordsperfile_fromfile:
358 358 self.radacHeaderObj.nrecords = self.recordsperfile_fromfile
359 359 self.radacHeaderObj.nbeams = self.nbeamcodes_fromfile
360 360 self.radacHeaderObj.npulses = self.npulsesint_fromfile
361 361 self.radacHeaderObj.nsamples = self.ngates_fromfile
362 362
363 363 #looking index list for data
364 364 start_index = self.radacHeaderObj.pulseCount[0,:][0]
365 365 end_index = self.radacHeaderObj.npulses
366 366 range4data = range(start_index, end_index)
367 367 self.index4_schain_datablock = numpy.array(range4data)
368 368
369 369 buffer_start_index = 0
370 370 buffer_end_index = self.radacHeaderObj.pulseCount[0,:][0]
371 371 range4buffer = range(buffer_start_index, buffer_end_index)
372 372 self.index4_buffer = numpy.array(range4buffer)
373 373
374 374 self.linear_pulseCount = numpy.array(range4data + range4buffer)
375 375 self.npulseByFrame = max(self.radacHeaderObj.pulseCount[0,:]+1)
376 376
377 377 #get tuning frequency
378 378 frequency_h5file_dataset = self.amisrFilePointer.get('Rx'+'/TuningFrequency')
379 379 self.frequency_h5file = frequency_h5file_dataset[0,0]
380 380
381 381 self.flagIsNewFile = 1
382 382
383 383 def __getBeamCode(self):
384 384 self.beamCodeDict = {}
385 385 self.beamRangeDict = {}
386 386
387 387 beamCodeMap = self.amisrFilePointer.get('Setup/BeamcodeMap')
388 388
389 389 for i in range(len(self.radacHeaderObj.beamCode[0,:])):
390 390 self.beamCodeDict.setdefault(i)
391 391 self.beamRangeDict.setdefault(i)
392 392 beamcodeValue = self.radacHeaderObj.beamCode[0,i]
393 393 beamcodeIndex = numpy.where(beamCodeMap[:,0] == beamcodeValue)[0][0]
394 394 x = beamCodeMap[beamcodeIndex][1]
395 395 y = beamCodeMap[beamcodeIndex][2]
396 396 z = beamCodeMap[beamcodeIndex][3]
397 397 self.beamCodeDict[i] = [beamcodeValue, x, y, z]
398 398
399 399 just4record0 = self.radacHeaderObj.beamCodeByPulse[0,:]
400 400
401 401 for i in range(len(self.beamCodeDict.values())):
402 402 xx = numpy.where(just4record0==self.beamCodeDict.values()[i][0])
403 403 indexPulseByBeam = self.linear_pulseCount[xx[0]]
404 404 self.beamRangeDict[i] = indexPulseByBeam
405 405
406 406 def __getExpParameters(self):
407 407 if not(self.status):
408 408 return None
409 409
410 410 experimentCfgPath = os.path.join(self.path, self.dirnameList[0], 'Setup')
411 411
412 412 expFinder = glob.glob1(experimentCfgPath,'*.exp')
413 413 if len(expFinder)== 0:
414 414 self.status = 0
415 415 return None
416 416
417 417 experimentFilename = os.path.join(experimentCfgPath,expFinder[0])
418 418
419 419 f = open(experimentFilename)
420 420 lines = f.readlines()
421 421 f.close()
422 422
423 423 parmsList = ['npulsesint*','recordsperfile*','nbeamcodes*','ngates*']
424 424 filterList = [fnmatch.filter(lines, x) for x in parmsList]
425 425
426 426
427 427 values = [re.sub(r'\D',"",x[0]) for x in filterList]
428 428
429 429 self.npulsesint_fromfile = int(values[0])
430 430 self.recordsperfile_fromfile = int(values[1])
431 431 self.nbeamcodes_fromfile = int(values[2])
432 432 self.ngates_fromfile = int(values[3])
433 433
434 434 tufileFinder = fnmatch.filter(lines, 'tufile=*')
435 435 tufile = tufileFinder[0].split('=')[1].split('\n')[0]
436 436 tufile = tufile.split('\r')[0]
437 437 tufilename = os.path.join(experimentCfgPath,tufile)
438 438
439 439 f = open(tufilename)
440 440 lines = f.readlines()
441 441 f.close()
442 442 self.ippSeconds_fromfile = float(lines[1].split()[2])/1E6
443 443
444 444
445 445 self.status = 1
446 446
447 447 def __setIdsAndArrays(self):
448 448 self.dataByFrame = self.__setDataByFrame()
449 449 self.beamCodeByFrame = self.amisrFilePointer.get('Raw11/Data/RadacHeader/BeamCode').value[0, :]
450 450 self.readRanges()
451 451 self.index_amisr_sample, self.index_amisr_buffer = self.radacHeaderObj.getIndexRangeToPulse(0)
452 452 self.radacTimeByFrame = numpy.zeros(self.radacHeaderObj.npulses)
453 453 if len(self.index_amisr_buffer) > 0:
454 454 self.buffer_radactime = numpy.zeros_like(self.radacTimeByFrame)
455 455
456 456
457 457 def __setNextFile(self,online=False):
458 458
459 459 if not(online):
460 460 newFile = self.__setNextFileOffline()
461 461 else:
462 462 newFile = self.__setNextFileOnline()
463 463
464 464 if not(newFile):
465 465 return 0
466 466
467 467 self.__readHeader()
468 468
469 469 if self.__firstFile:
470 470 self.__setIdsAndArrays()
471 471 self.__firstFile = False
472 472
473 473 self.__getBeamCode()
474 474 self.readDataBlock()
475 475
476 476
477 477 def setup(self,path=None,
478 478 startDate=None,
479 479 endDate=None,
480 480 startTime=datetime.time(0,0,0),
481 481 endTime=datetime.time(23,59,59),
482 482 walk=True,
483 483 timezone='ut',
484 484 all=0,
485 485 online=False):
486 486
487 487 self.timezone = timezone
488 488 self.all = all
489 489 self.online = online
490 490 if not(online):
491 491 #Busqueda de archivos offline
492 492 self.__searchFilesOffline(path, startDate, endDate, startTime, endTime, walk)
493 493 else:
494 494 self.__searchFilesOnline(path, walk)
495 495
496 496 if not(self.filenameList):
497 497 print "There is no files into the folder: %s"%(path)
498 498
499 499 sys.exit(-1)
500 500
501 501 self.__getExpParameters()
502 502
503 503 self.fileIndex = -1
504 504
505 505 self.__setNextFile(online)
506 506
507 507 # first_beamcode = self.radacHeaderObj.beamCodeByPulse[0,0]
508 508 # index = numpy.where(self.radacHeaderObj.beamCodeByPulse[0,:]!=first_beamcode)[0][0]
509 509 self.profileIndex_offset = self.radacHeaderObj.pulseCount[0,:][0]
510 510 self.profileIndex = self.profileIndex_offset
511 511
512 512 def readRanges(self):
513 513 dataset = self.amisrFilePointer.get('Raw11/Data/Samples/Range')
514 514
515 515 self.rangeFromFile = numpy.reshape(dataset.value,(-1))
516 516 return self.rangeFromFile
517 517
518 518
519 519 def readRadacTime(self,idrecord, range1, range2):
520 520 self.radacTimeFromFile = self.radacHeaderObj.radacTime.value
521 521
522 522 radacTimeByFrame = numpy.zeros((self.radacHeaderObj.npulses))
523 523 #radacTimeByFrame = dataset[idrecord - 1,range1]
524 524 #radacTimeByFrame = dataset[idrecord,range2]
525 525
526 526 return radacTimeByFrame
527 527
528 528 def readBeamCode(self, idrecord, range1, range2):
529 529 dataset = self.amisrFilePointer.get('Raw11/Data/RadacHeader/BeamCode')
530 530 beamcodeByFrame = numpy.zeros((self.radacHeaderObj.npulses))
531 531 self.beamCodesFromFile = dataset.value
532 532
533 533 #beamcodeByFrame[range1] = dataset[idrecord - 1, range1]
534 534 #beamcodeByFrame[range2] = dataset[idrecord, range2]
535 535 beamcodeByFrame[range1] = dataset[idrecord, range1]
536 536 beamcodeByFrame[range2] = dataset[idrecord, range2]
537 537
538 538 return beamcodeByFrame
539 539
540 540
541 541 def __setDataByFrame(self):
542 542 ndata = 2 # porque es complejo
543 543 dataByFrame = numpy.zeros((self.radacHeaderObj.npulses, self.radacHeaderObj.nsamples, ndata))
544 544 return dataByFrame
545 545
546 546 def __readDataSet(self):
547 547 dataset = self.amisrFilePointer.get('Raw11/Data/Samples/Data')
548 548 return dataset
549 549
550 550 def __setDataBlock(self,):
551 551 real = self.dataByFrame[:,:,0] #asumo que 0 es real
552 552 imag = self.dataByFrame[:,:,1] #asumo que 1 es imaginario
553 553 datablock = real + imag*1j #armo el complejo
554 554 return datablock
555 555
556 556 def readSamples_version1(self,idrecord):
557 557 #estas tres primeras lineas solo se deben ejecutar una vez
558 558 if self.flagIsNewFile:
559 559 #reading dataset
560 560 self.dataset = self.__readDataSet()
561 561 self.flagIsNewFile = 0
562 562
563 563 if idrecord == 0:
564 564 self.dataByFrame[self.index4_schain_datablock, : ,:] = self.dataset[0, self.index_amisr_sample,:,:]
565 565 self.radacTimeByFrame[self.index4_schain_datablock] = self.radacHeaderObj.radacTime[0, self.index_amisr_sample]
566 566 datablock = self.__setDataBlock()
567 567 if len(self.index_amisr_buffer) > 0:
568 568 self.buffer = self.dataset[0, self.index_amisr_buffer,:,:]
569 569 self.buffer_radactime = self.radacHeaderObj.radacTime[0, self.index_amisr_buffer]
570 570
571 571 return datablock
572 572 if len(self.index_amisr_buffer) > 0:
573 573 self.dataByFrame[self.index4_buffer,:,:] = self.buffer.copy()
574 574 self.radacTimeByFrame[self.index4_buffer] = self.buffer_radactime.copy()
575 575 self.dataByFrame[self.index4_schain_datablock,:,:] = self.dataset[idrecord, self.index_amisr_sample,:,:]
576 576 self.radacTimeByFrame[self.index4_schain_datablock] = self.radacHeaderObj.radacTime[idrecord, self.index_amisr_sample]
577 577 datablock = self.__setDataBlock()
578 578 if len(self.index_amisr_buffer) > 0:
579 579 self.buffer = self.dataset[idrecord, self.index_amisr_buffer, :, :]
580 580 self.buffer_radactime = self.radacHeaderObj.radacTime[idrecord, self.index_amisr_buffer]
581 581
582 582 return datablock
583 583
584 584
585 585 def readSamples(self,idrecord):
586 586 if self.flagIsNewFile:
587 587 self.dataByFrame = self.__setDataByFrame()
588 588 self.beamCodeByFrame = self.amisrFilePointer.get('Raw11/Data/RadacHeader/BeamCode').value[idrecord, :]
589 589
590 590 #reading ranges
591 591 self.readRanges()
592 592 #reading dataset
593 593 self.dataset = self.__readDataSet()
594 594
595 595 self.flagIsNewFile = 0
596 596 self.radacTimeByFrame = self.radacHeaderObj.radacTime.value[idrecord, :]
597 597 self.dataByFrame = self.dataset[idrecord, :, :, :]
598 598 datablock = self.__setDataBlock()
599 599 return datablock
600 600
601 601
602 602 def readDataBlock(self):
603 603
604 604 self.datablock = self.readSamples_version1(self.idrecord_count)
605 605 #self.datablock = self.readSamples(self.idrecord_count)
606 606 #print 'record:', self.idrecord_count
607 607
608 608 self.idrecord_count += 1
609 609 self.profileIndex = 0
610 610
611 611 if self.idrecord_count >= self.radacHeaderObj.nrecords:
612 612 self.idrecord_count = 0
613 613 self.flagIsNewFile = 1
614 614
615 615 def readNextBlock(self):
616 616
617 617 self.readDataBlock()
618 618
619 619 if self.flagIsNewFile:
620 620 self.__setNextFile(self.online)
621 621 pass
622 622
623 623 def __hasNotDataInBuffer(self):
624 624 #self.radacHeaderObj.npulses debe ser otra variable para considerar el numero de pulsos a tomar en el primer y ultimo record
625 625 if self.profileIndex >= self.radacHeaderObj.npulses:
626 626 return 1
627 627 return 0
628 628
629 629 def printUTC(self):
630 630 print self.dataOut.utctime
631 631 print ''
632 632
633 633 def setObjProperties(self):
634
634 635 self.dataOut.heightList = self.rangeFromFile/1000.0 #km
635 636 self.dataOut.nProfiles = self.radacHeaderObj.npulses
636 637 self.dataOut.nRecords = self.radacHeaderObj.nrecords
637 638 self.dataOut.nBeams = self.radacHeaderObj.nbeams
638 639 self.dataOut.ippSeconds = self.ippSeconds_fromfile
639 self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
640 # self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
640 641 self.dataOut.frequency = self.frequency_h5file
641 642 self.dataOut.npulseByFrame = self.npulseByFrame
642 643 self.dataOut.nBaud = None
643 644 self.dataOut.nCode = None
644 645 self.dataOut.code = None
645 646
646 647 self.dataOut.beamCodeDict = self.beamCodeDict
647 648 self.dataOut.beamRangeDict = self.beamRangeDict
648 649
649 650 if self.timezone == 'lt':
650 651 self.dataOut.timeZone = time.timezone / 60. #get the timezone in minutes
651 652 else:
652 653 self.dataOut.timeZone = 0 #by default time is UTC
653 654
654 655 def getData(self):
655 656
656 657 if self.flagNoMoreFiles:
657 658 self.dataOut.flagNoData = True
658 659 print 'Process finished'
659 660 return 0
660 661
661 662 if self.__hasNotDataInBuffer():
662 663 self.readNextBlock()
663 664
664 665
665 666 if self.datablock == None: # setear esta condicion cuando no hayan datos por leers
666 667 self.dataOut.flagNoData = True
667 668 return 0
668 669
669 670 self.dataOut.data = numpy.reshape(self.datablock[self.profileIndex,:],(1,-1))
670 671
671 672 self.dataOut.utctime = self.radacTimeByFrame[self.profileIndex]
672 673 self.dataOut.profileIndex = self.profileIndex
673 674 self.dataOut.flagNoData = False
674 675
675 676 self.profileIndex += 1
676 677
677 678 return self.dataOut.data
678 679
679 680
680 681 def run(self, **kwargs):
681 682 if not(self.isConfig):
682 683 self.setup(**kwargs)
683 684 self.setObjProperties()
684 685 self.isConfig = True
685 686
686 687 self.getData()
@@ -1,1337 +1,1338
1 1 '''
2 2
3 3 '''
4 4 import os
5 5 import sys
6 6 import glob
7 7 import time
8 8 import numpy
9 9 import fnmatch
10 10 import time, datetime
11 11 #import h5py
12 12 import traceback
13 13
14 14 #try:
15 15 # import pyfits
16 16 #except:
17 17 # print "pyfits module has not been imported, it should be installed to save files in fits format"
18 18
19 19 #from jrodata import *
20 20 #from jroheaderIO import *
21 21 #from jroprocessing import *
22 22
23 23 #import re
24 24 #from xml.etree.ElementTree import Element, SubElement, ElementTree
25 25
26 26
27 27 LOCALTIME = True #-18000
28 28
29 29 from model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
30 30
31 31 def isNumber(str):
32 32 """
33 33 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
34 34
35 35 Excepciones:
36 36 Si un determinado string no puede ser convertido a numero
37 37 Input:
38 38 str, string al cual se le analiza para determinar si convertible a un numero o no
39 39
40 40 Return:
41 41 True : si el string es uno numerico
42 42 False : no es un string numerico
43 43 """
44 44 try:
45 45 float( str )
46 46 return True
47 47 except:
48 48 return False
49 49
50 50 def isThisFileinRange(filename, startUTSeconds, endUTSeconds):
51 51 """
52 52 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
53 53
54 54 Inputs:
55 55 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
56 56
57 57 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
58 58 segundos contados desde 01/01/1970.
59 59 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
60 60 segundos contados desde 01/01/1970.
61 61
62 62 Return:
63 63 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
64 64 fecha especificado, de lo contrario retorna False.
65 65
66 66 Excepciones:
67 67 Si el archivo no existe o no puede ser abierto
68 68 Si la cabecera no puede ser leida.
69 69
70 70 """
71 71 basicHeaderObj = BasicHeader(LOCALTIME)
72 72
73 73 try:
74 74 fp = open(filename,'rb')
75 75 except IOError:
76 76 traceback.print_exc()
77 77 raise IOError, "The file %s can't be opened" %(filename)
78 78
79 79 sts = basicHeaderObj.read(fp)
80 80 fp.close()
81 81
82 82 if not(sts):
83 83 print "Skipping the file %s because it has not a valid header" %(filename)
84 84 return 0
85 85
86 86 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
87 87 return 0
88 88
89 89 return 1
90 90
91 91 def isFileinThisTime(filename, startTime, endTime):
92 92 """
93 93 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
94 94
95 95 Inputs:
96 96 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
97 97
98 98 startTime : tiempo inicial del rango seleccionado en formato datetime.time
99 99
100 100 endTime : tiempo final del rango seleccionado en formato datetime.time
101 101
102 102 Return:
103 103 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
104 104 fecha especificado, de lo contrario retorna False.
105 105
106 106 Excepciones:
107 107 Si el archivo no existe o no puede ser abierto
108 108 Si la cabecera no puede ser leida.
109 109
110 110 """
111 111
112 112
113 113 try:
114 114 fp = open(filename,'rb')
115 115 except IOError:
116 116 traceback.print_exc()
117 117 raise IOError, "The file %s can't be opened" %(filename)
118 118
119 119 basicHeaderObj = BasicHeader(LOCALTIME)
120 120 sts = basicHeaderObj.read(fp)
121 121 fp.close()
122 122
123 123 thisDatetime = basicHeaderObj.datatime
124 124 thisTime = thisDatetime.time()
125 125
126 126 if not(sts):
127 127 print "Skipping the file %s because it has not a valid header" %(filename)
128 128 return None
129 129
130 130 if not ((startTime <= thisTime) and (endTime > thisTime)):
131 131 return None
132 132
133 133 return thisDatetime
134 134
135 135 def getFileFromSet(path, ext, set):
136 136 validFilelist = []
137 137 fileList = os.listdir(path)
138 138
139 139 # 0 1234 567 89A BCDE
140 140 # H YYYY DDD SSS .ext
141 141
142 142 for thisFile in fileList:
143 143 try:
144 144 year = int(thisFile[1:5])
145 145 doy = int(thisFile[5:8])
146 146 except:
147 147 continue
148 148
149 149 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
150 150 continue
151 151
152 152 validFilelist.append(thisFile)
153 153
154 154 myfile = fnmatch.filter(validFilelist,'*%4.4d%3.3d%3.3d*'%(year,doy,set))
155 155
156 156 if len(myfile)!= 0:
157 157 return myfile[0]
158 158 else:
159 159 filename = '*%4.4d%3.3d%3.3d%s'%(year,doy,set,ext.lower())
160 160 print 'the filename %s does not exist'%filename
161 161 print '...going to the last file: '
162 162
163 163 if validFilelist:
164 164 validFilelist = sorted( validFilelist, key=str.lower )
165 165 return validFilelist[-1]
166 166
167 167 return None
168 168
169 169 def getlastFileFromPath(path, ext):
170 170 """
171 171 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
172 172 al final de la depuracion devuelve el ultimo file de la lista que quedo.
173 173
174 174 Input:
175 175 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
176 176 ext : extension de los files contenidos en una carpeta
177 177
178 178 Return:
179 179 El ultimo file de una determinada carpeta, no se considera el path.
180 180 """
181 181 validFilelist = []
182 182 fileList = os.listdir(path)
183 183
184 184 # 0 1234 567 89A BCDE
185 185 # H YYYY DDD SSS .ext
186 186
187 187 for thisFile in fileList:
188 188
189 189 year = thisFile[1:5]
190 190 if not isNumber(year):
191 191 continue
192 192
193 193 doy = thisFile[5:8]
194 194 if not isNumber(doy):
195 195 continue
196 196
197 197 year = int(year)
198 198 doy = int(doy)
199 199
200 200 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
201 201 continue
202 202
203 203 validFilelist.append(thisFile)
204 204
205 205 if validFilelist:
206 206 validFilelist = sorted( validFilelist, key=str.lower )
207 207 return validFilelist[-1]
208 208
209 209 return None
210 210
211 211 def checkForRealPath(path, foldercounter, year, doy, set, ext):
212 212 """
213 213 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
214 214 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
215 215 el path exacto de un determinado file.
216 216
217 217 Example :
218 218 nombre correcto del file es .../.../D2009307/P2009307367.ext
219 219
220 220 Entonces la funcion prueba con las siguientes combinaciones
221 221 .../.../y2009307367.ext
222 222 .../.../Y2009307367.ext
223 223 .../.../x2009307/y2009307367.ext
224 224 .../.../x2009307/Y2009307367.ext
225 225 .../.../X2009307/y2009307367.ext
226 226 .../.../X2009307/Y2009307367.ext
227 227 siendo para este caso, la ultima combinacion de letras, identica al file buscado
228 228
229 229 Return:
230 230 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
231 231 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
232 232 para el filename
233 233 """
234 234 fullfilename = None
235 235 find_flag = False
236 236 filename = None
237 237
238 238 prefixDirList = [None,'d','D']
239 239 if ext.lower() == ".r": #voltage
240 240 prefixFileList = ['d','D']
241 241 elif ext.lower() == ".pdata": #spectra
242 242 prefixFileList = ['p','P']
243 243 else:
244 244 return None, filename
245 245
246 246 #barrido por las combinaciones posibles
247 247 for prefixDir in prefixDirList:
248 248 thispath = path
249 249 if prefixDir != None:
250 250 #formo el nombre del directorio xYYYYDDD (x=d o x=D)
251 251 if foldercounter == 0:
252 252 thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy ))
253 253 else:
254 254 thispath = os.path.join(path, "%s%04d%03d_%02d" % ( prefixDir, year, doy , foldercounter))
255 255 for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D"
256 256 filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
257 257 fullfilename = os.path.join( thispath, filename ) #formo el path completo
258 258
259 259 if os.path.exists( fullfilename ): #verifico que exista
260 260 find_flag = True
261 261 break
262 262 if find_flag:
263 263 break
264 264
265 265 if not(find_flag):
266 266 return None, filename
267 267
268 268 return fullfilename, filename
269 269
270 270 def isDoyFolder(folder):
271 271 try:
272 272 year = int(folder[1:5])
273 273 except:
274 274 return 0
275 275
276 276 try:
277 277 doy = int(folder[5:8])
278 278 except:
279 279 return 0
280 280
281 281 return 1
282 282
283 283 class JRODataIO:
284 284
285 285 c = 3E8
286 286
287 287 isConfig = False
288 288
289 289 basicHeaderObj = None
290 290
291 291 systemHeaderObj = None
292 292
293 293 radarControllerHeaderObj = None
294 294
295 295 processingHeaderObj = None
296 296
297 297 online = 0
298 298
299 299 dtype = None
300 300
301 301 pathList = []
302 302
303 303 filenameList = []
304 304
305 305 filename = None
306 306
307 307 ext = None
308 308
309 309 flagIsNewFile = 1
310 310
311 311 flagTimeBlock = 0
312 312
313 313 flagIsNewBlock = 0
314 314
315 315 fp = None
316 316
317 317 firstHeaderSize = 0
318 318
319 319 basicHeaderSize = 24
320 320
321 321 versionFile = 1103
322 322
323 323 fileSize = None
324 324
325 325 # ippSeconds = None
326 326
327 327 fileSizeByHeader = None
328 328
329 329 fileIndex = None
330 330
331 331 profileIndex = None
332 332
333 333 blockIndex = None
334 334
335 335 nTotalBlocks = None
336 336
337 337 maxTimeStep = 30
338 338
339 339 lastUTTime = None
340 340
341 341 datablock = None
342 342
343 343 dataOut = None
344 344
345 345 blocksize = None
346 346
347 getblock = False
347 getByBlock = False
348 348
349 349 def __init__(self):
350 350
351 351 raise ValueError, "Not implemented"
352 352
353 353 def run(self):
354 354
355 355 raise ValueError, "Not implemented"
356 356
357 357 class JRODataReader(JRODataIO):
358 358
359 359 nReadBlocks = 0
360 360
361 361 delay = 10 #number of seconds waiting a new file
362 362
363 363 nTries = 3 #quantity tries
364 364
365 365 nFiles = 3 #number of files for searching
366 366
367 367 path = None
368 368
369 369 foldercounter = 0
370 370
371 371 flagNoMoreFiles = 0
372 372
373 373 datetimeList = []
374 374
375 375 __isFirstTimeOnline = 1
376 376
377 377 __printInfo = True
378 378
379 379 profileIndex = None
380 380
381 381 def __init__(self):
382 382
383 383 """
384 384
385 385 """
386 386
387 387 raise ValueError, "This method has not been implemented"
388 388
389 389
390 390 def createObjByDefault(self):
391 391 """
392 392
393 393 """
394 394 raise ValueError, "This method has not been implemented"
395 395
396 396 def getBlockDimension(self):
397 397
398 398 raise ValueError, "No implemented"
399 399
400 400 def __searchFilesOffLine(self,
401 401 path,
402 402 startDate,
403 403 endDate,
404 404 startTime=datetime.time(0,0,0),
405 405 endTime=datetime.time(23,59,59),
406 406 set=None,
407 407 expLabel='',
408 408 ext='.r',
409 409 walk=True):
410 410
411 411 pathList = []
412 412
413 413 if not walk:
414 414 #pathList.append(path)
415 415 multi_path = path.split(',')
416 416 for single_path in multi_path:
417 417 pathList.append(single_path)
418 418
419 419 else:
420 420 #dirList = []
421 421 multi_path = path.split(',')
422 422 for single_path in multi_path:
423 423 dirList = []
424 424 for thisPath in os.listdir(single_path):
425 425 if not os.path.isdir(os.path.join(single_path,thisPath)):
426 426 continue
427 427 if not isDoyFolder(thisPath):
428 428 continue
429 429
430 430 dirList.append(thisPath)
431 431
432 432 if not(dirList):
433 433 return None, None
434 434
435 435 thisDate = startDate
436 436
437 437 while(thisDate <= endDate):
438 438 year = thisDate.timetuple().tm_year
439 439 doy = thisDate.timetuple().tm_yday
440 440
441 441 matchlist = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy) + '*')
442 442 if len(matchlist) == 0:
443 443 thisDate += datetime.timedelta(1)
444 444 continue
445 445 for match in matchlist:
446 446 pathList.append(os.path.join(single_path,match,expLabel))
447 447
448 448 thisDate += datetime.timedelta(1)
449 449
450 450 if pathList == []:
451 451 print "Any folder was found for the date range: %s-%s" %(startDate, endDate)
452 452 return None, None
453 453
454 454 print "%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate)
455 455
456 456 filenameList = []
457 457 datetimeList = []
458 458 pathDict = {}
459 459 filenameList_to_sort = []
460 460
461 461 for i in range(len(pathList)):
462 462
463 463 thisPath = pathList[i]
464 464
465 465 fileList = glob.glob1(thisPath, "*%s" %ext)
466 466 fileList.sort()
467 467 pathDict.setdefault(fileList[0])
468 468 pathDict[fileList[0]] = i
469 469 filenameList_to_sort.append(fileList[0])
470 470
471 471 filenameList_to_sort.sort()
472 472
473 473 for file in filenameList_to_sort:
474 474 thisPath = pathList[pathDict[file]]
475 475
476 476 fileList = glob.glob1(thisPath, "*%s" %ext)
477 477 fileList.sort()
478 478
479 479 for file in fileList:
480 480
481 481 filename = os.path.join(thisPath,file)
482 482 thisDatetime = isFileinThisTime(filename, startTime, endTime)
483 483
484 484 if not(thisDatetime):
485 485 continue
486 486
487 487 filenameList.append(filename)
488 488 datetimeList.append(thisDatetime)
489 489
490 490 if not(filenameList):
491 491 print "Any file was found for the time range %s - %s" %(startTime, endTime)
492 492 return None, None
493 493
494 494 print "%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime)
495 495 print
496 496
497 497 for i in range(len(filenameList)):
498 498 print "%s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
499 499
500 500 self.filenameList = filenameList
501 501 self.datetimeList = datetimeList
502 502
503 503 return pathList, filenameList
504 504
505 505 def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True, set=None):
506 506
507 507 """
508 508 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
509 509 devuelve el archivo encontrado ademas de otros datos.
510 510
511 511 Input:
512 512 path : carpeta donde estan contenidos los files que contiene data
513 513
514 514 expLabel : Nombre del subexperimento (subfolder)
515 515
516 516 ext : extension de los files
517 517
518 518 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
519 519
520 520 Return:
521 521 directory : eL directorio donde esta el file encontrado
522 522 filename : el ultimo file de una determinada carpeta
523 523 year : el anho
524 524 doy : el numero de dia del anho
525 525 set : el set del archivo
526 526
527 527
528 528 """
529 529 dirList = []
530 530
531 531 if not walk:
532 532 fullpath = path
533 533 foldercounter = 0
534 534 else:
535 535 #Filtra solo los directorios
536 536 for thisPath in os.listdir(path):
537 537 if not os.path.isdir(os.path.join(path,thisPath)):
538 538 continue
539 539 if not isDoyFolder(thisPath):
540 540 continue
541 541
542 542 dirList.append(thisPath)
543 543
544 544 if not(dirList):
545 545 return None, None, None, None, None, None
546 546
547 547 dirList = sorted( dirList, key=str.lower )
548 548
549 549 doypath = dirList[-1]
550 550 foldercounter = int(doypath.split('_')[1]) if len(doypath.split('_'))>1 else 0
551 551 fullpath = os.path.join(path, doypath, expLabel)
552 552
553 553
554 554 print "%s folder was found: " %(fullpath )
555 555
556 556 if set == None:
557 557 filename = getlastFileFromPath(fullpath, ext)
558 558 else:
559 559 filename = getFileFromSet(fullpath, ext, set)
560 560
561 561 if not(filename):
562 562 return None, None, None, None, None, None
563 563
564 564 print "%s file was found" %(filename)
565 565
566 566 if not(self.__verifyFile(os.path.join(fullpath, filename))):
567 567 return None, None, None, None, None, None
568 568
569 569 year = int( filename[1:5] )
570 570 doy = int( filename[5:8] )
571 571 set = int( filename[8:11] )
572 572
573 573 return fullpath, foldercounter, filename, year, doy, set
574 574
575 575 def __setNextFileOffline(self):
576 576
577 577 idFile = self.fileIndex
578 578
579 579 while (True):
580 580 idFile += 1
581 581 if not(idFile < len(self.filenameList)):
582 582 self.flagNoMoreFiles = 1
583 583 print "No more Files"
584 584 return 0
585 585
586 586 filename = self.filenameList[idFile]
587 587
588 588 if not(self.__verifyFile(filename)):
589 589 continue
590 590
591 591 fileSize = os.path.getsize(filename)
592 592 fp = open(filename,'rb')
593 593 break
594 594
595 595 self.flagIsNewFile = 1
596 596 self.fileIndex = idFile
597 597 self.filename = filename
598 598 self.fileSize = fileSize
599 599 self.fp = fp
600 600
601 601 print "Setting the file: %s"%self.filename
602 602
603 603 return 1
604 604
605 605 def __setNextFileOnline(self):
606 606 """
607 607 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
608 608 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
609 609 siguientes.
610 610
611 611 Affected:
612 612 self.flagIsNewFile
613 613 self.filename
614 614 self.fileSize
615 615 self.fp
616 616 self.set
617 617 self.flagNoMoreFiles
618 618
619 619 Return:
620 620 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
621 621 1 : si el file fue abierto con exito y esta listo a ser leido
622 622
623 623 Excepciones:
624 624 Si un determinado file no puede ser abierto
625 625 """
626 626 nFiles = 0
627 627 fileOk_flag = False
628 628 firstTime_flag = True
629 629
630 630 self.set += 1
631 631
632 632 if self.set > 999:
633 633 self.set = 0
634 634 self.foldercounter += 1
635 635
636 636 #busca el 1er file disponible
637 637 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
638 638 if fullfilename:
639 639 if self.__verifyFile(fullfilename, False):
640 640 fileOk_flag = True
641 641
642 642 #si no encuentra un file entonces espera y vuelve a buscar
643 643 if not(fileOk_flag):
644 644 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
645 645
646 646 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
647 647 tries = self.nTries
648 648 else:
649 649 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
650 650
651 651 for nTries in range( tries ):
652 652 if firstTime_flag:
653 653 print "\tWaiting %0.2f sec for the file \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
654 654 time.sleep( self.delay )
655 655 else:
656 656 print "\tSearching next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
657 657
658 658 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
659 659 if fullfilename:
660 660 if self.__verifyFile(fullfilename):
661 661 fileOk_flag = True
662 662 break
663 663
664 664 if fileOk_flag:
665 665 break
666 666
667 667 firstTime_flag = False
668 668
669 669 print "\tSkipping the file \"%s\" due to this file doesn't exist" % filename
670 670 self.set += 1
671 671
672 672 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
673 673 self.set = 0
674 674 self.doy += 1
675 675 self.foldercounter = 0
676 676
677 677 if fileOk_flag:
678 678 self.fileSize = os.path.getsize( fullfilename )
679 679 self.filename = fullfilename
680 680 self.flagIsNewFile = 1
681 681 if self.fp != None: self.fp.close()
682 682 self.fp = open(fullfilename, 'rb')
683 683 self.flagNoMoreFiles = 0
684 684 print 'Setting the file: %s' % fullfilename
685 685 else:
686 686 self.fileSize = 0
687 687 self.filename = None
688 688 self.flagIsNewFile = 0
689 689 self.fp = None
690 690 self.flagNoMoreFiles = 1
691 691 print 'No more Files'
692 692
693 693 return fileOk_flag
694 694
695 695 def setNextFile(self):
696 696 if self.fp != None:
697 697 self.fp.close()
698 698
699 699 if self.online:
700 700 newFile = self.__setNextFileOnline()
701 701 else:
702 702 newFile = self.__setNextFileOffline()
703 703
704 704 if not(newFile):
705 705 return 0
706 706
707 707 self.__readFirstHeader()
708 708 self.nReadBlocks = 0
709 709 return 1
710 710
711 711 def __waitNewBlock(self):
712 712 """
713 713 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
714 714
715 715 Si el modo de lectura es OffLine siempre retorn 0
716 716 """
717 717 if not self.online:
718 718 return 0
719 719
720 720 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
721 721 return 0
722 722
723 723 currentPointer = self.fp.tell()
724 724
725 725 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
726 726
727 727 for nTries in range( self.nTries ):
728 728
729 729 self.fp.close()
730 730 self.fp = open( self.filename, 'rb' )
731 731 self.fp.seek( currentPointer )
732 732
733 733 self.fileSize = os.path.getsize( self.filename )
734 734 currentSize = self.fileSize - currentPointer
735 735
736 736 if ( currentSize >= neededSize ):
737 737 self.basicHeaderObj.read(self.fp)
738 738 return 1
739 739
740 740 if self.fileSize == self.fileSizeByHeader:
741 741 # self.flagEoF = True
742 742 return 0
743 743
744 744 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
745 745 time.sleep( self.delay )
746 746
747 747
748 748 return 0
749 749
750 750 def waitDataBlock(self,pointer_location):
751 751
752 752 currentPointer = pointer_location
753 753
754 754 neededSize = self.processingHeaderObj.blockSize #+ self.basicHeaderSize
755 755
756 756 for nTries in range( self.nTries ):
757 757 self.fp.close()
758 758 self.fp = open( self.filename, 'rb' )
759 759 self.fp.seek( currentPointer )
760 760
761 761 self.fileSize = os.path.getsize( self.filename )
762 762 currentSize = self.fileSize - currentPointer
763 763
764 764 if ( currentSize >= neededSize ):
765 765 return 1
766 766
767 767 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
768 768 time.sleep( self.delay )
769 769
770 770 return 0
771 771
772 772 def __jumpToLastBlock(self):
773 773
774 774 if not(self.__isFirstTimeOnline):
775 775 return
776 776
777 777 csize = self.fileSize - self.fp.tell()
778 778 blocksize = self.processingHeaderObj.blockSize
779 779
780 780 #salta el primer bloque de datos
781 781 if csize > self.processingHeaderObj.blockSize:
782 782 self.fp.seek(self.fp.tell() + blocksize)
783 783 else:
784 784 return
785 785
786 786 csize = self.fileSize - self.fp.tell()
787 787 neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
788 788 while True:
789 789
790 790 if self.fp.tell()<self.fileSize:
791 791 self.fp.seek(self.fp.tell() + neededsize)
792 792 else:
793 793 self.fp.seek(self.fp.tell() - neededsize)
794 794 break
795 795
796 796 # csize = self.fileSize - self.fp.tell()
797 797 # neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
798 798 # factor = int(csize/neededsize)
799 799 # if factor > 0:
800 800 # self.fp.seek(self.fp.tell() + factor*neededsize)
801 801
802 802 self.flagIsNewFile = 0
803 803 self.__isFirstTimeOnline = 0
804 804
805 805 def __setNewBlock(self):
806 806
807 807 if self.fp == None:
808 808 return 0
809 809
810 810 if self.online:
811 811 self.__jumpToLastBlock()
812 812
813 813 if self.flagIsNewFile:
814 814 return 1
815 815
816 816 self.lastUTTime = self.basicHeaderObj.utc
817 817 currentSize = self.fileSize - self.fp.tell()
818 818 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
819 819
820 820 if (currentSize >= neededSize):
821 821 self.basicHeaderObj.read(self.fp)
822 822 return 1
823 823
824 824 if self.__waitNewBlock():
825 825 return 1
826 826
827 827 if not(self.setNextFile()):
828 828 return 0
829 829
830 830 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
831 831
832 832 self.flagTimeBlock = 0
833 833
834 834 if deltaTime > self.maxTimeStep:
835 835 self.flagTimeBlock = 1
836 836
837 837 return 1
838 838
839 839 def readNextBlock(self):
840 840 if not(self.__setNewBlock()):
841 841 return 0
842 842
843 843 if not(self.readBlock()):
844 844 return 0
845 845
846 846 return 1
847 847
848 848 def __readFirstHeader(self):
849 849
850 850 self.basicHeaderObj.read(self.fp)
851 851 self.systemHeaderObj.read(self.fp)
852 852 self.radarControllerHeaderObj.read(self.fp)
853 853 self.processingHeaderObj.read(self.fp)
854 854
855 855 self.firstHeaderSize = self.basicHeaderObj.size
856 856
857 857 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
858 858 if datatype == 0:
859 859 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
860 860 elif datatype == 1:
861 861 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
862 862 elif datatype == 2:
863 863 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
864 864 elif datatype == 3:
865 865 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
866 866 elif datatype == 4:
867 867 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
868 868 elif datatype == 5:
869 869 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
870 870 else:
871 871 raise ValueError, 'Data type was not defined'
872 872
873 873 self.dtype = datatype_str
874 874 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
875 875 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
876 876 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
877 877 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
878 878 self.getBlockDimension()
879 879
880 880 def __verifyFile(self, filename, msgFlag=True):
881 881 msg = None
882 882 try:
883 883 fp = open(filename, 'rb')
884 884 currentPosition = fp.tell()
885 885 except IOError:
886 886 traceback.print_exc()
887 887 if msgFlag:
888 888 print "The file %s can't be opened" % (filename)
889 889 return False
890 890
891 891 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
892 892
893 893 if neededSize == 0:
894 894 basicHeaderObj = BasicHeader(LOCALTIME)
895 895 systemHeaderObj = SystemHeader()
896 896 radarControllerHeaderObj = RadarControllerHeader()
897 897 processingHeaderObj = ProcessingHeader()
898 898
899 899 try:
900 900 if not( basicHeaderObj.read(fp) ): raise IOError
901 901 if not( systemHeaderObj.read(fp) ): raise IOError
902 902 if not( radarControllerHeaderObj.read(fp) ): raise IOError
903 903 if not( processingHeaderObj.read(fp) ): raise IOError
904 904 # data_type = int(numpy.log2((processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
905 905
906 906 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
907 907
908 908 except IOError:
909 909 traceback.print_exc()
910 910 if msgFlag:
911 911 print "\tThe file %s is empty or it hasn't enough data" % filename
912 912
913 913 fp.close()
914 914 return False
915 915 else:
916 916 msg = "\tSkipping the file %s due to it hasn't enough data" %filename
917 917
918 918 fp.close()
919 919 fileSize = os.path.getsize(filename)
920 920 currentSize = fileSize - currentPosition
921 921 if currentSize < neededSize:
922 922 if msgFlag and (msg != None):
923 923 print msg #print"\tSkipping the file %s due to it hasn't enough data" %filename
924 924 return False
925 925
926 926 return True
927 927
928 928 def setup(self,
929 929 path=None,
930 930 startDate=None,
931 931 endDate=None,
932 932 startTime=datetime.time(0,0,0),
933 933 endTime=datetime.time(23,59,59),
934 934 set=None,
935 935 expLabel = "",
936 936 ext = None,
937 937 online = False,
938 938 delay = 60,
939 939 walk = True,
940 940 getblock = False):
941 941
942 942 if path == None:
943 943 raise ValueError, "The path is not valid"
944 944
945 945 if ext == None:
946 946 ext = self.ext
947 947
948 948 if online:
949 949 print "Searching files in online mode..."
950 950
951 951 for nTries in range( self.nTries ):
952 952 fullpath, foldercounter, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk, set=set)
953 953
954 954 if fullpath:
955 955 break
956 956
957 957 print '\tWaiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
958 958 time.sleep( self.delay )
959 959
960 960 if not(fullpath):
961 961 print "There 'isn't valied files in %s" % path
962 962 return None
963 963
964 964 self.year = year
965 965 self.doy = doy
966 966 self.set = set - 1
967 967 self.path = path
968 968 self.foldercounter = foldercounter
969 969 last_set = None
970 970
971 971 else:
972 972 print "Searching files in offline mode ..."
973 973 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
974 974 startTime=startTime, endTime=endTime,
975 975 set=set, expLabel=expLabel, ext=ext,
976 976 walk=walk)
977 977
978 978 if not(pathList):
979 979 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
980 980 datetime.datetime.combine(startDate,startTime).ctime(),
981 981 datetime.datetime.combine(endDate,endTime).ctime())
982 982
983 983 sys.exit(-1)
984 984
985 985
986 986 self.fileIndex = -1
987 987 self.pathList = pathList
988 988 self.filenameList = filenameList
989 989 file_name = os.path.basename(filenameList[-1])
990 990 basename, ext = os.path.splitext(file_name)
991 991 last_set = int(basename[-3:])
992 992
993 993 self.online = online
994 994 self.delay = delay
995 995 ext = ext.lower()
996 996 self.ext = ext
997 self.getblock = getblock
997 self.getByBlock = getblock
998
998 999 if not(self.setNextFile()):
999 1000 if (startDate!=None) and (endDate!=None):
1000 1001 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
1001 1002 elif startDate != None:
1002 1003 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
1003 1004 else:
1004 1005 print "No files"
1005 1006
1006 1007 sys.exit(-1)
1007 1008
1008 1009 # self.updateDataHeader()
1009 1010 if last_set != None:
1010 1011 self.dataOut.last_block = last_set * self.processingHeaderObj.dataBlocksPerFile + self.basicHeaderObj.dataBlock
1011 1012 return
1012 1013
1013 1014 def getBasicHeader(self):
1014 1015
1015 1016 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds
1016 1017
1017 1018 self.dataOut.flagTimeBlock = self.flagTimeBlock
1018 1019
1019 1020 self.dataOut.timeZone = self.basicHeaderObj.timeZone
1020 1021
1021 1022 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
1022 1023
1023 1024 self.dataOut.errorCount = self.basicHeaderObj.errorCount
1024 1025
1025 1026 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
1026 1027
1027 1028 def getFirstHeader(self):
1028 1029
1029 1030 raise ValueError, "This method has not been implemented"
1030 1031
1031 1032 def getData(self):
1032 1033
1033 1034 raise ValueError, "This method has not been implemented"
1034 1035
1035 1036 def hasNotDataInBuffer(self):
1036 1037
1037 1038 raise ValueError, "This method has not been implemented"
1038 1039
1039 1040 def readBlock(self):
1040 1041
1041 1042 raise ValueError, "This method has not been implemented"
1042 1043
1043 1044 def isEndProcess(self):
1044 1045
1045 1046 return self.flagNoMoreFiles
1046 1047
1047 1048 def printReadBlocks(self):
1048 1049
1049 1050 print "Number of read blocks per file %04d" %self.nReadBlocks
1050 1051
1051 1052 def printTotalBlocks(self):
1052 1053
1053 1054 print "Number of read blocks %04d" %self.nTotalBlocks
1054 1055
1055 1056 def printNumberOfBlock(self):
1056 1057
1057 1058 if self.flagIsNewBlock:
1058 1059 print "Block No. %04d, Total blocks %04d -> %s" %(self.basicHeaderObj.dataBlock, self.nTotalBlocks, self.dataOut.datatime.ctime())
1059 1060 self.dataOut.blocknow = self.basicHeaderObj.dataBlock
1060 1061
1061 1062 def printInfo(self):
1062 1063
1063 1064 if self.__printInfo == False:
1064 1065 return
1065 1066
1066 1067 self.basicHeaderObj.printInfo()
1067 1068 self.systemHeaderObj.printInfo()
1068 1069 self.radarControllerHeaderObj.printInfo()
1069 1070 self.processingHeaderObj.printInfo()
1070 1071
1071 1072 self.__printInfo = False
1072 1073
1073 1074
1074 1075 def run(self, **kwargs):
1075 1076
1076 1077 if not(self.isConfig):
1077 1078
1078 1079 # self.dataOut = dataOut
1079 1080 self.setup(**kwargs)
1080 1081 self.isConfig = True
1081 1082
1082 1083 self.getData()
1083 1084
1084 1085 class JRODataWriter(JRODataIO):
1085 1086
1086 1087 """
1087 1088 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
1088 1089 de los datos siempre se realiza por bloques.
1089 1090 """
1090 1091
1091 1092 blockIndex = 0
1092 1093
1093 1094 path = None
1094 1095
1095 1096 setFile = None
1096 1097
1097 1098 profilesPerBlock = None
1098 1099
1099 1100 blocksPerFile = None
1100 1101
1101 1102 nWriteBlocks = 0
1102 1103
1103 1104 def __init__(self, dataOut=None):
1104 1105 raise ValueError, "Not implemented"
1105 1106
1106 1107
1107 1108 def hasAllDataInBuffer(self):
1108 1109 raise ValueError, "Not implemented"
1109 1110
1110 1111
1111 1112 def setBlockDimension(self):
1112 1113 raise ValueError, "Not implemented"
1113 1114
1114 1115
1115 1116 def writeBlock(self):
1116 1117 raise ValueError, "No implemented"
1117 1118
1118 1119
1119 1120 def putData(self):
1120 1121 raise ValueError, "No implemented"
1121 1122
1122 1123
1123 1124 def setBasicHeader(self):
1124 1125
1125 1126 self.basicHeaderObj.size = self.basicHeaderSize #bytes
1126 1127 self.basicHeaderObj.version = self.versionFile
1127 1128 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1128 1129
1129 1130 utc = numpy.floor(self.dataOut.utctime)
1130 1131 milisecond = (self.dataOut.utctime - utc)* 1000.0
1131 1132
1132 1133 self.basicHeaderObj.utc = utc
1133 1134 self.basicHeaderObj.miliSecond = milisecond
1134 1135 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1135 1136 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1136 1137 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1137 1138
1138 1139 def setFirstHeader(self):
1139 1140 """
1140 1141 Obtiene una copia del First Header
1141 1142
1142 1143 Affected:
1143 1144
1144 1145 self.basicHeaderObj
1145 1146 self.systemHeaderObj
1146 1147 self.radarControllerHeaderObj
1147 1148 self.processingHeaderObj self.
1148 1149
1149 1150 Return:
1150 1151 None
1151 1152 """
1152 1153
1153 1154 raise ValueError, "No implemented"
1154 1155
1155 1156 def __writeFirstHeader(self):
1156 1157 """
1157 1158 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1158 1159
1159 1160 Affected:
1160 1161 __dataType
1161 1162
1162 1163 Return:
1163 1164 None
1164 1165 """
1165 1166
1166 1167 # CALCULAR PARAMETROS
1167 1168
1168 1169 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1169 1170 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1170 1171
1171 1172 self.basicHeaderObj.write(self.fp)
1172 1173 self.systemHeaderObj.write(self.fp)
1173 1174 self.radarControllerHeaderObj.write(self.fp)
1174 1175 self.processingHeaderObj.write(self.fp)
1175 1176
1176 1177 self.dtype = self.dataOut.dtype
1177 1178
1178 1179 def __setNewBlock(self):
1179 1180 """
1180 1181 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1181 1182
1182 1183 Return:
1183 1184 0 : si no pudo escribir nada
1184 1185 1 : Si escribio el Basic el First Header
1185 1186 """
1186 1187 if self.fp == None:
1187 1188 self.setNextFile()
1188 1189
1189 1190 if self.flagIsNewFile:
1190 1191 return 1
1191 1192
1192 1193 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1193 1194 self.basicHeaderObj.write(self.fp)
1194 1195 return 1
1195 1196
1196 1197 if not( self.setNextFile() ):
1197 1198 return 0
1198 1199
1199 1200 return 1
1200 1201
1201 1202
1202 1203 def writeNextBlock(self):
1203 1204 """
1204 1205 Selecciona el bloque siguiente de datos y los escribe en un file
1205 1206
1206 1207 Return:
1207 1208 0 : Si no hizo pudo escribir el bloque de datos
1208 1209 1 : Si no pudo escribir el bloque de datos
1209 1210 """
1210 1211 if not( self.__setNewBlock() ):
1211 1212 return 0
1212 1213
1213 1214 self.writeBlock()
1214 1215
1215 1216 return 1
1216 1217
1217 1218 def setNextFile(self):
1218 1219 """
1219 1220 Determina el siguiente file que sera escrito
1220 1221
1221 1222 Affected:
1222 1223 self.filename
1223 1224 self.subfolder
1224 1225 self.fp
1225 1226 self.setFile
1226 1227 self.flagIsNewFile
1227 1228
1228 1229 Return:
1229 1230 0 : Si el archivo no puede ser escrito
1230 1231 1 : Si el archivo esta listo para ser escrito
1231 1232 """
1232 1233 ext = self.ext
1233 1234 path = self.path
1234 1235
1235 1236 if self.fp != None:
1236 1237 self.fp.close()
1237 1238
1238 1239 timeTuple = time.localtime( self.dataOut.utctime)
1239 1240 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1240 1241
1241 1242 fullpath = os.path.join( path, subfolder )
1242 1243 if not( os.path.exists(fullpath) ):
1243 1244 os.mkdir(fullpath)
1244 1245 self.setFile = -1 #inicializo mi contador de seteo
1245 1246 else:
1246 1247 filesList = os.listdir( fullpath )
1247 1248 if len( filesList ) > 0:
1248 1249 filesList = sorted( filesList, key=str.lower )
1249 1250 filen = filesList[-1]
1250 1251 # el filename debera tener el siguiente formato
1251 1252 # 0 1234 567 89A BCDE (hex)
1252 1253 # x YYYY DDD SSS .ext
1253 1254 if isNumber( filen[8:11] ):
1254 1255 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1255 1256 else:
1256 1257 self.setFile = -1
1257 1258 else:
1258 1259 self.setFile = -1 #inicializo mi contador de seteo
1259 1260
1260 1261 setFile = self.setFile
1261 1262 setFile += 1
1262 1263
1263 1264 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
1264 1265 timeTuple.tm_year,
1265 1266 timeTuple.tm_yday,
1266 1267 setFile,
1267 1268 ext )
1268 1269
1269 1270 filename = os.path.join( path, subfolder, file )
1270 1271
1271 1272 fp = open( filename,'wb' )
1272 1273
1273 1274 self.blockIndex = 0
1274 1275
1275 1276 #guardando atributos
1276 1277 self.filename = filename
1277 1278 self.subfolder = subfolder
1278 1279 self.fp = fp
1279 1280 self.setFile = setFile
1280 1281 self.flagIsNewFile = 1
1281 1282
1282 1283 self.setFirstHeader()
1283 1284
1284 1285 print 'Writing the file: %s'%self.filename
1285 1286
1286 1287 self.__writeFirstHeader()
1287 1288
1288 1289 return 1
1289 1290
1290 1291 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=0, ext=None):
1291 1292 """
1292 1293 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1293 1294
1294 1295 Inputs:
1295 1296 path : el path destino en el cual se escribiran los files a crear
1296 1297 format : formato en el cual sera salvado un file
1297 1298 set : el setebo del file
1298 1299
1299 1300 Return:
1300 1301 0 : Si no realizo un buen seteo
1301 1302 1 : Si realizo un buen seteo
1302 1303 """
1303 1304
1304 1305 if ext == None:
1305 1306 ext = self.ext
1306 1307
1307 1308 ext = ext.lower()
1308 1309
1309 1310 self.ext = ext
1310 1311
1311 1312 self.path = path
1312 1313
1313 1314 self.setFile = set - 1
1314 1315
1315 1316 self.blocksPerFile = blocksPerFile
1316 1317
1317 1318 self.profilesPerBlock = profilesPerBlock
1318 1319
1319 1320 self.dataOut = dataOut
1320 1321
1321 1322 if not(self.setNextFile()):
1322 1323 print "There isn't a next file"
1323 1324 return 0
1324 1325
1325 1326 self.setBlockDimension()
1326 1327
1327 1328 return 1
1328 1329
1329 1330 def run(self, dataOut, **kwargs):
1330 1331
1331 1332 if not(self.isConfig):
1332 1333
1333 1334 self.setup(dataOut, **kwargs)
1334 1335 self.isConfig = True
1335 1336
1336 1337 self.putData()
1337 1338
@@ -1,763 +1,763
1 1 '''
2 2
3 3 '''
4 4
5 5 import os, sys
6 6 import time, datetime
7 7 import numpy
8 8 import fnmatch
9 9 import glob
10 10
11 11 try:
12 12 import pyfits
13 13 except:
14 14 """
15 15 """
16 16
17 17 from xml.etree.ElementTree import ElementTree
18 18
19 19 from jroIO_base import isDoyFolder, isNumber
20 20 from model.proc.jroproc_base import Operation, ProcessingUnit
21 21
22 22 class ParameterConf:
23 23 ELEMENTNAME = 'Parameter'
24 24 def __init__(self):
25 25 self.name = ''
26 26 self.value = ''
27 27
28 28 def readXml(self, parmElement):
29 29 self.name = parmElement.get('name')
30 30 self.value = parmElement.get('value')
31 31
32 32 def getElementName(self):
33 33 return self.ELEMENTNAME
34 34
35 35 class Metadata:
36 36
37 37 def __init__(self, filename):
38 38 self.parmConfObjList = []
39 39 self.readXml(filename)
40 40
41 41 def readXml(self, filename):
42 42 self.projectElement = None
43 43 self.procUnitConfObjDict = {}
44 44 self.projectElement = ElementTree().parse(filename)
45 45 self.project = self.projectElement.tag
46 46
47 47 parmElementList = self.projectElement.getiterator(ParameterConf().getElementName())
48 48
49 49 for parmElement in parmElementList:
50 50 parmConfObj = ParameterConf()
51 51 parmConfObj.readXml(parmElement)
52 52 self.parmConfObjList.append(parmConfObj)
53 53
54 54 class FitsWriter(Operation):
55 55
56 56 def __init__(self):
57 57 self.isConfig = False
58 58 self.dataBlocksPerFile = None
59 59 self.blockIndex = 0
60 60 self.flagIsNewFile = 1
61 61 self.fitsObj = None
62 62 self.optchar = 'P'
63 63 self.ext = '.fits'
64 64 self.setFile = 0
65 65
66 66 def setFitsHeader(self, dataOut, metadatafile):
67 67
68 68 header_data = pyfits.PrimaryHDU()
69 69
70 70 metadata4fits = Metadata(metadatafile)
71 71 for parameter in metadata4fits.parmConfObjList:
72 72 parm_name = parameter.name
73 73 parm_value = parameter.value
74 74
75 75 # if parm_value == 'fromdatadatetime':
76 76 # value = time.strftime("%b %d %Y %H:%M:%S", dataOut.datatime.timetuple())
77 77 # elif parm_value == 'fromdataheights':
78 78 # value = dataOut.nHeights
79 79 # elif parm_value == 'fromdatachannel':
80 80 # value = dataOut.nChannels
81 81 # elif parm_value == 'fromdatasamples':
82 82 # value = dataOut.nFFTPoints
83 83 # else:
84 84 # value = parm_value
85 85
86 86 header_data.header[parm_name] = parm_value
87 87
88 88
89 89 header_data.header['DATETIME'] = time.strftime("%b %d %Y %H:%M:%S", dataOut.datatime.timetuple())
90 90 header_data.header['CHANNELLIST'] = str(dataOut.channelList)
91 91 header_data.header['NCHANNELS'] = dataOut.nChannels
92 92 #header_data.header['HEIGHTS'] = dataOut.heightList
93 93 header_data.header['NHEIGHTS'] = dataOut.nHeights
94 94
95 95 header_data.header['IPPSECONDS'] = dataOut.ippSeconds
96 96 header_data.header['NCOHINT'] = dataOut.nCohInt
97 97 header_data.header['NINCOHINT'] = dataOut.nIncohInt
98 98 header_data.header['TIMEZONE'] = dataOut.timeZone
99 99 header_data.header['NBLOCK'] = self.blockIndex
100 100
101 101 header_data.writeto(self.filename)
102 102
103 103 self.addExtension(dataOut.heightList,'HEIGHTLIST')
104 104
105 105
106 106 def setup(self, dataOut, path, dataBlocksPerFile, metadatafile):
107 107
108 108 self.path = path
109 109 self.dataOut = dataOut
110 110 self.metadatafile = metadatafile
111 111 self.dataBlocksPerFile = dataBlocksPerFile
112 112
113 113 def open(self):
114 114 self.fitsObj = pyfits.open(self.filename, mode='update')
115 115
116 116
117 117 def addExtension(self, data, tagname):
118 118 self.open()
119 119 extension = pyfits.ImageHDU(data=data, name=tagname)
120 120 #extension.header['TAG'] = tagname
121 121 self.fitsObj.append(extension)
122 122 self.write()
123 123
124 124 def addData(self, data):
125 125 self.open()
126 126 extension = pyfits.ImageHDU(data=data, name=self.fitsObj[0].header['DATATYPE'])
127 127 extension.header['UTCTIME'] = self.dataOut.utctime
128 128 self.fitsObj.append(extension)
129 129 self.blockIndex += 1
130 130 self.fitsObj[0].header['NBLOCK'] = self.blockIndex
131 131
132 132 self.write()
133 133
134 134 def write(self):
135 135
136 136 self.fitsObj.flush(verbose=True)
137 137 self.fitsObj.close()
138 138
139 139
140 140 def setNextFile(self):
141 141
142 142 ext = self.ext
143 143 path = self.path
144 144
145 145 timeTuple = time.localtime( self.dataOut.utctime)
146 146 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
147 147
148 148 fullpath = os.path.join( path, subfolder )
149 149 if not( os.path.exists(fullpath) ):
150 150 os.mkdir(fullpath)
151 151 self.setFile = -1 #inicializo mi contador de seteo
152 152 else:
153 153 filesList = os.listdir( fullpath )
154 154 if len( filesList ) > 0:
155 155 filesList = sorted( filesList, key=str.lower )
156 156 filen = filesList[-1]
157 157
158 158 if isNumber( filen[8:11] ):
159 159 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
160 160 else:
161 161 self.setFile = -1
162 162 else:
163 163 self.setFile = -1 #inicializo mi contador de seteo
164 164
165 165 setFile = self.setFile
166 166 setFile += 1
167 167
168 168 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
169 169 timeTuple.tm_year,
170 170 timeTuple.tm_yday,
171 171 setFile,
172 172 ext )
173 173
174 174 filename = os.path.join( path, subfolder, file )
175 175
176 176 self.blockIndex = 0
177 177 self.filename = filename
178 178 self.setFile = setFile
179 179 self.flagIsNewFile = 1
180 180
181 181 print 'Writing the file: %s'%self.filename
182 182
183 183 self.setFitsHeader(self.dataOut, self.metadatafile)
184 184
185 185 return 1
186 186
187 187 def writeBlock(self):
188 188 self.addData(self.dataOut.data_spc)
189 189 self.flagIsNewFile = 0
190 190
191 191
192 192 def __setNewBlock(self):
193 193
194 194 if self.flagIsNewFile:
195 195 return 1
196 196
197 197 if self.blockIndex < self.dataBlocksPerFile:
198 198 return 1
199 199
200 200 if not( self.setNextFile() ):
201 201 return 0
202 202
203 203 return 1
204 204
205 205 def writeNextBlock(self):
206 206 if not( self.__setNewBlock() ):
207 207 return 0
208 208 self.writeBlock()
209 209 return 1
210 210
211 211 def putData(self):
212 212 if self.flagIsNewFile:
213 213 self.setNextFile()
214 214 self.writeNextBlock()
215 215
216 216 def run(self, dataOut, **kwargs):
217 217 if not(self.isConfig):
218 218 self.setup(dataOut, **kwargs)
219 219 self.isConfig = True
220 220 self.putData()
221 221
222 222
223 223 class FitsReader(ProcessingUnit):
224 224
225 225 # __TIMEZONE = time.timezone
226 226
227 227 expName = None
228 228 datetimestr = None
229 229 utc = None
230 230 nChannels = None
231 231 nSamples = None
232 232 dataBlocksPerFile = None
233 233 comments = None
234 234 lastUTTime = None
235 235 header_dict = None
236 236 data = None
237 237 data_header_dict = None
238 238
239 239 def __init__(self):
240 240 self.isConfig = False
241 241 self.ext = '.fits'
242 242 self.setFile = 0
243 243 self.flagNoMoreFiles = 0
244 244 self.flagIsNewFile = 1
245 245 self.flagTimeBlock = None
246 246 self.fileIndex = None
247 247 self.filename = None
248 248 self.fileSize = None
249 249 self.fitsObj = None
250 250 self.timeZone = None
251 251 self.nReadBlocks = 0
252 252 self.nTotalBlocks = 0
253 253 self.dataOut = self.createObjByDefault()
254 254 self.maxTimeStep = 10# deberia ser definido por el usuario usando el metodo setup()
255 255 self.blockIndex = 1
256 256
257 257 def createObjByDefault(self):
258 258
259 259 dataObj = Fits()
260 260
261 261 return dataObj
262 262
263 263 def isFileinThisTime(self, filename, startTime, endTime, useLocalTime=False):
264 264 try:
265 265 fitsObj = pyfits.open(filename,'readonly')
266 266 except:
267 267 raise IOError, "The file %s can't be opened" %(filename)
268 268
269 269 header = fitsObj[0].header
270 270 struct_time = time.strptime(header['DATETIME'], "%b %d %Y %H:%M:%S")
271 271 utc = time.mktime(struct_time) - time.timezone #TIMEZONE debe ser un parametro del header FITS
272 272
273 273 ltc = utc
274 274 if useLocalTime:
275 275 ltc -= time.timezone
276 276 thisDatetime = datetime.datetime.utcfromtimestamp(ltc)
277 277 thisTime = thisDatetime.time()
278 278
279 279 if not ((startTime <= thisTime) and (endTime > thisTime)):
280 280 return None
281 281
282 282 return thisDatetime
283 283
284 284 def __setNextFileOnline(self):
285 285 raise ValueError, "No implemented"
286 286
287 287 def __setNextFileOffline(self):
288 288 idFile = self.fileIndex
289 289
290 290 while (True):
291 291 idFile += 1
292 292 if not(idFile < len(self.filenameList)):
293 293 self.flagNoMoreFiles = 1
294 294 print "No more Files"
295 295 return 0
296 296
297 297 filename = self.filenameList[idFile]
298 298
299 299 # if not(self.__verifyFile(filename)):
300 300 # continue
301 301
302 302 fileSize = os.path.getsize(filename)
303 303 fitsObj = pyfits.open(filename,'readonly')
304 304 break
305 305
306 306 self.flagIsNewFile = 1
307 307 self.fileIndex = idFile
308 308 self.filename = filename
309 309 self.fileSize = fileSize
310 310 self.fitsObj = fitsObj
311 311 self.blockIndex = 0
312 312 print "Setting the file: %s"%self.filename
313 313
314 314 return 1
315 315
316 316 def readHeader(self):
317 317 headerObj = self.fitsObj[0]
318 318
319 319 self.header_dict = headerObj.header
320 320 if 'EXPNAME' in headerObj.header.keys():
321 321 self.expName = headerObj.header['EXPNAME']
322 322
323 323 if 'DATATYPE' in headerObj.header.keys():
324 324 self.dataType = headerObj.header['DATATYPE']
325 325
326 326 self.datetimestr = headerObj.header['DATETIME']
327 327 channelList = headerObj.header['CHANNELLIST']
328 328 channelList = channelList.split('[')
329 329 channelList = channelList[1].split(']')
330 330 channelList = channelList[0].split(',')
331 331 channelList = [int(ch) for ch in channelList]
332 332 self.channelList = channelList
333 333 self.nChannels = headerObj.header['NCHANNELS']
334 334 self.nHeights = headerObj.header['NHEIGHTS']
335 335 self.ippSeconds = headerObj.header['IPPSECONDS']
336 336 self.nCohInt = headerObj.header['NCOHINT']
337 337 self.nIncohInt = headerObj.header['NINCOHINT']
338 338 self.dataBlocksPerFile = headerObj.header['NBLOCK']
339 339 self.timeZone = headerObj.header['TIMEZONE']
340 340
341 self.timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt
341 # self.timeInterval = self.ippSeconds * self.nCohInt * self.nIncohInt
342 342
343 343 if 'COMMENT' in headerObj.header.keys():
344 344 self.comments = headerObj.header['COMMENT']
345 345
346 346 self.readHeightList()
347 347
348 348 def readHeightList(self):
349 349 self.blockIndex = self.blockIndex + 1
350 350 obj = self.fitsObj[self.blockIndex]
351 351 self.heightList = obj.data
352 352 self.blockIndex = self.blockIndex + 1
353 353
354 354 def readExtension(self):
355 355 obj = self.fitsObj[self.blockIndex]
356 356 self.heightList = obj.data
357 357 self.blockIndex = self.blockIndex + 1
358 358
359 359 def setNextFile(self):
360 360
361 361 if self.online:
362 362 newFile = self.__setNextFileOnline()
363 363 else:
364 364 newFile = self.__setNextFileOffline()
365 365
366 366 if not(newFile):
367 367 return 0
368 368
369 369 self.readHeader()
370 370
371 371 self.nReadBlocks = 0
372 372 # self.blockIndex = 1
373 373 return 1
374 374
375 375 def __searchFilesOffLine(self,
376 376 path,
377 377 startDate,
378 378 endDate,
379 379 startTime=datetime.time(0,0,0),
380 380 endTime=datetime.time(23,59,59),
381 381 set=None,
382 382 expLabel='',
383 383 ext='.fits',
384 384 walk=True):
385 385
386 386 pathList = []
387 387
388 388 if not walk:
389 389 pathList.append(path)
390 390
391 391 else:
392 392 dirList = []
393 393 for thisPath in os.listdir(path):
394 394 if not os.path.isdir(os.path.join(path,thisPath)):
395 395 continue
396 396 if not isDoyFolder(thisPath):
397 397 continue
398 398
399 399 dirList.append(thisPath)
400 400
401 401 if not(dirList):
402 402 return None, None
403 403
404 404 thisDate = startDate
405 405
406 406 while(thisDate <= endDate):
407 407 year = thisDate.timetuple().tm_year
408 408 doy = thisDate.timetuple().tm_yday
409 409
410 410 matchlist = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy) + '*')
411 411 if len(matchlist) == 0:
412 412 thisDate += datetime.timedelta(1)
413 413 continue
414 414 for match in matchlist:
415 415 pathList.append(os.path.join(path,match,expLabel))
416 416
417 417 thisDate += datetime.timedelta(1)
418 418
419 419 if pathList == []:
420 420 print "Any folder was found for the date range: %s-%s" %(startDate, endDate)
421 421 return None, None
422 422
423 423 print "%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate)
424 424
425 425 filenameList = []
426 426 datetimeList = []
427 427
428 428 for i in range(len(pathList)):
429 429
430 430 thisPath = pathList[i]
431 431
432 432 fileList = glob.glob1(thisPath, "*%s" %ext)
433 433 fileList.sort()
434 434
435 435 for file in fileList:
436 436
437 437 filename = os.path.join(thisPath,file)
438 438 thisDatetime = self.isFileinThisTime(filename, startTime, endTime)
439 439
440 440 if not(thisDatetime):
441 441 continue
442 442
443 443 filenameList.append(filename)
444 444 datetimeList.append(thisDatetime)
445 445
446 446 if not(filenameList):
447 447 print "Any file was found for the time range %s - %s" %(startTime, endTime)
448 448 return None, None
449 449
450 450 print "%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime)
451 451 print
452 452
453 453 for i in range(len(filenameList)):
454 454 print "%s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
455 455
456 456 self.filenameList = filenameList
457 457 self.datetimeList = datetimeList
458 458
459 459 return pathList, filenameList
460 460
461 461 def setup(self, path=None,
462 462 startDate=None,
463 463 endDate=None,
464 464 startTime=datetime.time(0,0,0),
465 465 endTime=datetime.time(23,59,59),
466 466 set=0,
467 467 expLabel = "",
468 468 ext = None,
469 469 online = False,
470 470 delay = 60,
471 471 walk = True):
472 472
473 473 if path == None:
474 474 raise ValueError, "The path is not valid"
475 475
476 476 if ext == None:
477 477 ext = self.ext
478 478
479 479 if not(online):
480 480 print "Searching files in offline mode ..."
481 481 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
482 482 startTime=startTime, endTime=endTime,
483 483 set=set, expLabel=expLabel, ext=ext,
484 484 walk=walk)
485 485
486 486 if not(pathList):
487 487 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
488 488 datetime.datetime.combine(startDate,startTime).ctime(),
489 489 datetime.datetime.combine(endDate,endTime).ctime())
490 490
491 491 sys.exit(-1)
492 492
493 493 self.fileIndex = -1
494 494 self.pathList = pathList
495 495 self.filenameList = filenameList
496 496
497 497 self.online = online
498 498 self.delay = delay
499 499 ext = ext.lower()
500 500 self.ext = ext
501 501
502 502 if not(self.setNextFile()):
503 503 if (startDate!=None) and (endDate!=None):
504 504 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
505 505 elif startDate != None:
506 506 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
507 507 else:
508 508 print "No files"
509 509
510 510 sys.exit(-1)
511 511
512 512
513 513
514 514 def readBlock(self):
515 515 dataObj = self.fitsObj[self.blockIndex]
516 516
517 517 self.data = dataObj.data
518 518 self.data_header_dict = dataObj.header
519 519 self.utc = self.data_header_dict['UTCTIME']
520 520
521 521 self.flagIsNewFile = 0
522 522 self.blockIndex += 1
523 523 self.nTotalBlocks += 1
524 524 self.nReadBlocks += 1
525 525
526 526 return 1
527 527
528 528 def __jumpToLastBlock(self):
529 529 raise ValueError, "No implemented"
530 530
531 531 def __waitNewBlock(self):
532 532 """
533 533 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
534 534
535 535 Si el modo de lectura es OffLine siempre retorn 0
536 536 """
537 537 if not self.online:
538 538 return 0
539 539
540 540 if (self.nReadBlocks >= self.dataBlocksPerFile):
541 541 return 0
542 542
543 543 currentPointer = self.fp.tell()
544 544
545 545 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
546 546
547 547 for nTries in range( self.nTries ):
548 548
549 549 self.fp.close()
550 550 self.fp = open( self.filename, 'rb' )
551 551 self.fp.seek( currentPointer )
552 552
553 553 self.fileSize = os.path.getsize( self.filename )
554 554 currentSize = self.fileSize - currentPointer
555 555
556 556 if ( currentSize >= neededSize ):
557 557 self.__rdBasicHeader()
558 558 return 1
559 559
560 560 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
561 561 time.sleep( self.delay )
562 562
563 563
564 564 return 0
565 565
566 566 def __setNewBlock(self):
567 567
568 568 if self.online:
569 569 self.__jumpToLastBlock()
570 570
571 571 if self.flagIsNewFile:
572 572 return 1
573 573
574 574 self.lastUTTime = self.utc
575 575
576 576 if self.online:
577 577 if self.__waitNewBlock():
578 578 return 1
579 579
580 580 if self.nReadBlocks < self.dataBlocksPerFile:
581 581 return 1
582 582
583 583 if not(self.setNextFile()):
584 584 return 0
585 585
586 586 deltaTime = self.utc - self.lastUTTime
587 587
588 588 self.flagTimeBlock = 0
589 589
590 590 if deltaTime > self.maxTimeStep:
591 591 self.flagTimeBlock = 1
592 592
593 593 return 1
594 594
595 595
596 596 def readNextBlock(self):
597 597 if not(self.__setNewBlock()):
598 598 return 0
599 599
600 600 if not(self.readBlock()):
601 601 return 0
602 602
603 603 return 1
604 604
605 605
606 606 def getData(self):
607 607
608 608 if self.flagNoMoreFiles:
609 609 self.dataOut.flagNoData = True
610 610 print 'Process finished'
611 611 return 0
612 612
613 613 self.flagTimeBlock = 0
614 614 self.flagIsNewBlock = 0
615 615
616 616 if not(self.readNextBlock()):
617 617 return 0
618 618
619 619 if self.data == None:
620 620 self.dataOut.flagNoData = True
621 621 return 0
622 622
623 623 self.dataOut.data = self.data
624 624 self.dataOut.data_header = self.data_header_dict
625 625 self.dataOut.utctime = self.utc
626 626
627 627 self.dataOut.header = self.header_dict
628 628 self.dataOut.expName = self.expName
629 629 self.dataOut.nChannels = self.nChannels
630 630 self.dataOut.timeZone = self.timeZone
631 631 self.dataOut.dataBlocksPerFile = self.dataBlocksPerFile
632 632 self.dataOut.comments = self.comments
633 633 self.dataOut.timeInterval = self.timeInterval
634 634 self.dataOut.channelList = self.channelList
635 635 self.dataOut.heightList = self.heightList
636 636 self.dataOut.flagNoData = False
637 637
638 638 return self.dataOut.data
639 639
640 640 def run(self, **kwargs):
641 641
642 642 if not(self.isConfig):
643 643 self.setup(**kwargs)
644 644 self.isConfig = True
645 645
646 646 self.getData()
647 647
648 648 class SpectraHeisWriter(Operation):
649 649 # set = None
650 650 setFile = None
651 651 idblock = None
652 652 doypath = None
653 653 subfolder = None
654 654
655 655 def __init__(self):
656 656 self.wrObj = FITS()
657 657 # self.dataOut = dataOut
658 658 self.nTotalBlocks=0
659 659 # self.set = None
660 660 self.setFile = None
661 661 self.idblock = 0
662 662 self.wrpath = None
663 663 self.doypath = None
664 664 self.subfolder = None
665 665 self.isConfig = False
666 666
667 667 def isNumber(str):
668 668 """
669 669 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
670 670
671 671 Excepciones:
672 672 Si un determinado string no puede ser convertido a numero
673 673 Input:
674 674 str, string al cual se le analiza para determinar si convertible a un numero o no
675 675
676 676 Return:
677 677 True : si el string es uno numerico
678 678 False : no es un string numerico
679 679 """
680 680 try:
681 681 float( str )
682 682 return True
683 683 except:
684 684 return False
685 685
686 686 def setup(self, dataOut, wrpath):
687 687
688 688 if not(os.path.exists(wrpath)):
689 689 os.mkdir(wrpath)
690 690
691 691 self.wrpath = wrpath
692 692 # self.setFile = 0
693 693 self.dataOut = dataOut
694 694
695 695 def putData(self):
696 696 name= time.localtime( self.dataOut.utctime)
697 697 ext=".fits"
698 698
699 699 if self.doypath == None:
700 700 self.subfolder = 'F%4.4d%3.3d_%d' % (name.tm_year,name.tm_yday,time.mktime(datetime.datetime.now().timetuple()))
701 701 self.doypath = os.path.join( self.wrpath, self.subfolder )
702 702 os.mkdir(self.doypath)
703 703
704 704 if self.setFile == None:
705 705 # self.set = self.dataOut.set
706 706 self.setFile = 0
707 707 # if self.set != self.dataOut.set:
708 708 ## self.set = self.dataOut.set
709 709 # self.setFile = 0
710 710
711 711 #make the filename
712 712 file = 'D%4.4d%3.3d_%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext)
713 713
714 714 filename = os.path.join(self.wrpath,self.subfolder, file)
715 715
716 716 idblock = numpy.array([self.idblock],dtype="int64")
717 717 header=self.wrObj.cFImage(idblock=idblock,
718 718 year=time.gmtime(self.dataOut.utctime).tm_year,
719 719 month=time.gmtime(self.dataOut.utctime).tm_mon,
720 720 day=time.gmtime(self.dataOut.utctime).tm_mday,
721 721 hour=time.gmtime(self.dataOut.utctime).tm_hour,
722 722 minute=time.gmtime(self.dataOut.utctime).tm_min,
723 723 second=time.gmtime(self.dataOut.utctime).tm_sec)
724 724
725 725 c=3E8
726 726 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
727 727 freq=numpy.arange(-1*self.dataOut.nHeights/2.,self.dataOut.nHeights/2.)*(c/(2*deltaHeight*1000))
728 728
729 729 colList = []
730 730
731 731 colFreq=self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints)+'E', array=freq)
732 732
733 733 colList.append(colFreq)
734 734
735 735 nchannel=self.dataOut.nChannels
736 736
737 737 for i in range(nchannel):
738 738 col = self.wrObj.writeData(name="PCh"+str(i+1),
739 739 format=str(self.dataOut.nFFTPoints)+'E',
740 740 data=10*numpy.log10(self.dataOut.data_spc[i,:]))
741 741
742 742 colList.append(col)
743 743
744 744 data=self.wrObj.Ctable(colList=colList)
745 745
746 746 self.wrObj.CFile(header,data)
747 747
748 748 self.wrObj.wFile(filename)
749 749
750 750 #update the setFile
751 751 self.setFile += 1
752 752 self.idblock += 1
753 753
754 754 return 1
755 755
756 756 def run(self, dataOut, **kwargs):
757 757
758 758 if not(self.isConfig):
759 759
760 760 self.setup(dataOut, **kwargs)
761 761 self.isConfig = True
762 762
763 763 self.putData() No newline at end of file
@@ -1,761 +1,761
1 1 '''
2 2 '''
3 3
4 4 import numpy
5 5
6 6 from jroIO_base import LOCALTIME, JRODataReader, JRODataWriter
7 7 from model.proc.jroproc_base import ProcessingUnit, Operation
8 8 from model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
9 9 from model.data.jrodata import Spectra
10 10
11 11 class SpectraReader(JRODataReader, ProcessingUnit):
12 12 """
13 13 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
14 14 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
15 15 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
16 16
17 17 paresCanalesIguales * alturas * perfiles (Self Spectra)
18 18 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
19 19 canales * alturas (DC Channels)
20 20
21 21 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
22 22 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
23 23 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
24 24 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
25 25
26 26 Example:
27 27 dpath = "/home/myuser/data"
28 28
29 29 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
30 30
31 31 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
32 32
33 33 readerObj = SpectraReader()
34 34
35 35 readerObj.setup(dpath, startTime, endTime)
36 36
37 37 while(True):
38 38
39 39 readerObj.getData()
40 40
41 41 print readerObj.data_spc
42 42
43 43 print readerObj.data_cspc
44 44
45 45 print readerObj.data_dc
46 46
47 47 if readerObj.flagNoMoreFiles:
48 48 break
49 49
50 50 """
51 51
52 52 pts2read_SelfSpectra = 0
53 53
54 54 pts2read_CrossSpectra = 0
55 55
56 56 pts2read_DCchannels = 0
57 57
58 58 ext = ".pdata"
59 59
60 60 optchar = "P"
61 61
62 62 dataOut = None
63 63
64 64 nRdChannels = None
65 65
66 66 nRdPairs = None
67 67
68 68 rdPairList = []
69 69
70 70 def __init__(self):
71 71 """
72 72 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
73 73
74 74 Inputs:
75 75 dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para
76 76 almacenar un perfil de datos cada vez que se haga un requerimiento
77 77 (getData). El perfil sera obtenido a partir del buffer de datos,
78 78 si el buffer esta vacio se hara un nuevo proceso de lectura de un
79 79 bloque de datos.
80 80 Si este parametro no es pasado se creara uno internamente.
81 81
82 82 Affected:
83 83 self.dataOut
84 84
85 85 Return : None
86 86 """
87 87
88 88 #Eliminar de la base la herencia
89 89 ProcessingUnit.__init__(self)
90 90
91 91 # self.isConfig = False
92 92
93 93 self.pts2read_SelfSpectra = 0
94 94
95 95 self.pts2read_CrossSpectra = 0
96 96
97 97 self.pts2read_DCchannels = 0
98 98
99 99 self.datablock = None
100 100
101 101 self.utc = None
102 102
103 103 self.ext = ".pdata"
104 104
105 105 self.optchar = "P"
106 106
107 107 self.basicHeaderObj = BasicHeader(LOCALTIME)
108 108
109 109 self.systemHeaderObj = SystemHeader()
110 110
111 111 self.radarControllerHeaderObj = RadarControllerHeader()
112 112
113 113 self.processingHeaderObj = ProcessingHeader()
114 114
115 115 self.online = 0
116 116
117 117 self.fp = None
118 118
119 119 self.idFile = None
120 120
121 121 self.dtype = None
122 122
123 123 self.fileSizeByHeader = None
124 124
125 125 self.filenameList = []
126 126
127 127 self.filename = None
128 128
129 129 self.fileSize = None
130 130
131 131 self.firstHeaderSize = 0
132 132
133 133 self.basicHeaderSize = 24
134 134
135 135 self.pathList = []
136 136
137 137 self.lastUTTime = 0
138 138
139 139 self.maxTimeStep = 30
140 140
141 141 self.flagNoMoreFiles = 0
142 142
143 143 self.set = 0
144 144
145 145 self.path = None
146 146
147 147 self.delay = 60 #seconds
148 148
149 149 self.nTries = 3 #quantity tries
150 150
151 151 self.nFiles = 3 #number of files for searching
152 152
153 153 self.nReadBlocks = 0
154 154
155 155 self.flagIsNewFile = 1
156 156
157 157 self.__isFirstTimeOnline = 1
158 158
159 159 # self.ippSeconds = 0
160 160
161 161 self.flagTimeBlock = 0
162 162
163 163 self.flagIsNewBlock = 0
164 164
165 165 self.nTotalBlocks = 0
166 166
167 167 self.blocksize = 0
168 168
169 169 self.dataOut = self.createObjByDefault()
170 170
171 171 self.profileIndex = 1 #Always
172 172
173 173
174 174 def createObjByDefault(self):
175 175
176 176 dataObj = Spectra()
177 177
178 178 return dataObj
179 179
180 180 def __hasNotDataInBuffer(self):
181 181 return 1
182 182
183 183
184 184 def getBlockDimension(self):
185 185 """
186 186 Obtiene la cantidad de puntos a leer por cada bloque de datos
187 187
188 188 Affected:
189 189 self.nRdChannels
190 190 self.nRdPairs
191 191 self.pts2read_SelfSpectra
192 192 self.pts2read_CrossSpectra
193 193 self.pts2read_DCchannels
194 194 self.blocksize
195 195 self.dataOut.nChannels
196 196 self.dataOut.nPairs
197 197
198 198 Return:
199 199 None
200 200 """
201 201 self.nRdChannels = 0
202 202 self.nRdPairs = 0
203 203 self.rdPairList = []
204 204
205 205 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
206 206 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
207 207 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
208 208 else:
209 209 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
210 210 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
211 211
212 212 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
213 213
214 214 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
215 215 self.blocksize = self.pts2read_SelfSpectra
216 216
217 217 if self.processingHeaderObj.flag_cspc:
218 218 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
219 219 self.blocksize += self.pts2read_CrossSpectra
220 220
221 221 if self.processingHeaderObj.flag_dc:
222 222 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
223 223 self.blocksize += self.pts2read_DCchannels
224 224
225 225 # self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels
226 226
227 227
228 228 def readBlock(self):
229 229 """
230 230 Lee el bloque de datos desde la posicion actual del puntero del archivo
231 231 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
232 232 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
233 233 es seteado a 0
234 234
235 235 Return: None
236 236
237 237 Variables afectadas:
238 238
239 239 self.flagIsNewFile
240 240 self.flagIsNewBlock
241 241 self.nTotalBlocks
242 242 self.data_spc
243 243 self.data_cspc
244 244 self.data_dc
245 245
246 246 Exceptions:
247 247 Si un bloque leido no es un bloque valido
248 248 """
249 249 blockOk_flag = False
250 250 fpointer = self.fp.tell()
251 251
252 252 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
253 253 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
254 254
255 255 if self.processingHeaderObj.flag_cspc:
256 256 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
257 257 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
258 258
259 259 if self.processingHeaderObj.flag_dc:
260 260 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
261 261 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
262 262
263 263
264 264 if not(self.processingHeaderObj.shif_fft):
265 265 #desplaza a la derecha en el eje 2 determinadas posiciones
266 266 shift = int(self.processingHeaderObj.profilesPerBlock/2)
267 267 spc = numpy.roll( spc, shift , axis=2 )
268 268
269 269 if self.processingHeaderObj.flag_cspc:
270 270 #desplaza a la derecha en el eje 2 determinadas posiciones
271 271 cspc = numpy.roll( cspc, shift, axis=2 )
272 272
273 273 # self.processingHeaderObj.shif_fft = True
274 274
275 275 spc = numpy.transpose( spc, (0,2,1) )
276 276 self.data_spc = spc
277 277
278 278 if self.processingHeaderObj.flag_cspc:
279 279 cspc = numpy.transpose( cspc, (0,2,1) )
280 280 self.data_cspc = cspc['real'] + cspc['imag']*1j
281 281 else:
282 282 self.data_cspc = None
283 283
284 284 if self.processingHeaderObj.flag_dc:
285 285 self.data_dc = dc['real'] + dc['imag']*1j
286 286 else:
287 287 self.data_dc = None
288 288
289 289 self.flagIsNewFile = 0
290 290 self.flagIsNewBlock = 1
291 291
292 292 self.nTotalBlocks += 1
293 293 self.nReadBlocks += 1
294 294
295 295 return 1
296 296
297 297 def getFirstHeader(self):
298 298
299 299 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
300 300
301 301 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
302 302
303 303 # self.dataOut.ippSeconds = self.ippSeconds
304 304
305 self.dataOut.timeInterval = self.radarControllerHeaderObj.ippSeconds * self.processingHeaderObj.nCohInt * self.processingHeaderObj.nIncohInt * self.processingHeaderObj.profilesPerBlock
305 # self.dataOut.timeInterval = self.radarControllerHeaderObj.ippSeconds * self.processingHeaderObj.nCohInt * self.processingHeaderObj.nIncohInt * self.processingHeaderObj.profilesPerBlock
306 306
307 307 self.dataOut.dtype = self.dtype
308 308
309 309 # self.dataOut.nPairs = self.nPairs
310 310
311 311 self.dataOut.pairsList = self.rdPairList
312 312
313 313 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
314 314
315 315 self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
316 316
317 317 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
318 318
319 319 self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
320 320
321 321 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
322 322
323 323 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
324 324
325 325 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
326 326
327 327 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
328 328
329 329 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
330 330
331 331 self.dataOut.flagDeflipData = True #asumo q la data no esta sin flip
332 332
333 333 if self.radarControllerHeaderObj.code != None:
334 334
335 335 self.dataOut.nCode = self.radarControllerHeaderObj.nCode
336 336
337 337 self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
338 338
339 339 self.dataOut.code = self.radarControllerHeaderObj.code
340 340
341 341 self.dataOut.flagDecodeData = True
342 342
343 343 def getData(self):
344 344 """
345 345 First method to execute before "RUN" is called.
346 346
347 347 Copia el buffer de lectura a la clase "Spectra",
348 348 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
349 349 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
350 350
351 351 Return:
352 352 0 : Si no hay mas archivos disponibles
353 353 1 : Si hizo una buena copia del buffer
354 354
355 355 Affected:
356 356 self.dataOut
357 357
358 358 self.flagTimeBlock
359 359 self.flagIsNewBlock
360 360 """
361 361
362 362 if self.flagNoMoreFiles:
363 363 self.dataOut.flagNoData = True
364 364 print 'Process finished'
365 365 return 0
366 366
367 367 self.flagTimeBlock = 0
368 368 self.flagIsNewBlock = 0
369 369
370 370 if self.__hasNotDataInBuffer():
371 371
372 372 if not( self.readNextBlock() ):
373 373 self.dataOut.flagNoData = True
374 374 return 0
375 375
376 376 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
377 377
378 378 if self.data_dc == None:
379 379 self.dataOut.flagNoData = True
380 380 return 0
381 381
382 382 self.getBasicHeader()
383 383
384 384 self.getFirstHeader()
385 385
386 386 self.dataOut.data_spc = self.data_spc
387 387
388 388 self.dataOut.data_cspc = self.data_cspc
389 389
390 390 self.dataOut.data_dc = self.data_dc
391 391
392 392 self.dataOut.flagNoData = False
393 393
394 394 self.dataOut.realtime = self.online
395 395
396 396 return self.dataOut.data_spc
397 397
398 398 class SpectraWriter(JRODataWriter, Operation):
399 399
400 400 """
401 401 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
402 402 de los datos siempre se realiza por bloques.
403 403 """
404 404
405 405 ext = ".pdata"
406 406
407 407 optchar = "P"
408 408
409 409 shape_spc_Buffer = None
410 410
411 411 shape_cspc_Buffer = None
412 412
413 413 shape_dc_Buffer = None
414 414
415 415 data_spc = None
416 416
417 417 data_cspc = None
418 418
419 419 data_dc = None
420 420
421 421 # dataOut = None
422 422
423 423 def __init__(self):
424 424 """
425 425 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
426 426
427 427 Affected:
428 428 self.dataOut
429 429 self.basicHeaderObj
430 430 self.systemHeaderObj
431 431 self.radarControllerHeaderObj
432 432 self.processingHeaderObj
433 433
434 434 Return: None
435 435 """
436 436
437 437 Operation.__init__(self)
438 438
439 439 self.isConfig = False
440 440
441 441 self.nTotalBlocks = 0
442 442
443 443 self.data_spc = None
444 444
445 445 self.data_cspc = None
446 446
447 447 self.data_dc = None
448 448
449 449 self.fp = None
450 450
451 451 self.flagIsNewFile = 1
452 452
453 453 self.nTotalBlocks = 0
454 454
455 455 self.flagIsNewBlock = 0
456 456
457 457 self.setFile = None
458 458
459 459 self.dtype = None
460 460
461 461 self.path = None
462 462
463 463 self.noMoreFiles = 0
464 464
465 465 self.filename = None
466 466
467 467 self.basicHeaderObj = BasicHeader(LOCALTIME)
468 468
469 469 self.systemHeaderObj = SystemHeader()
470 470
471 471 self.radarControllerHeaderObj = RadarControllerHeader()
472 472
473 473 self.processingHeaderObj = ProcessingHeader()
474 474
475 475
476 476 def hasAllDataInBuffer(self):
477 477 return 1
478 478
479 479
480 480 def setBlockDimension(self):
481 481 """
482 482 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
483 483
484 484 Affected:
485 485 self.shape_spc_Buffer
486 486 self.shape_cspc_Buffer
487 487 self.shape_dc_Buffer
488 488
489 489 Return: None
490 490 """
491 491 self.shape_spc_Buffer = (self.dataOut.nChannels,
492 492 self.processingHeaderObj.nHeights,
493 493 self.processingHeaderObj.profilesPerBlock)
494 494
495 495 self.shape_cspc_Buffer = (self.dataOut.nPairs,
496 496 self.processingHeaderObj.nHeights,
497 497 self.processingHeaderObj.profilesPerBlock)
498 498
499 499 self.shape_dc_Buffer = (self.dataOut.nChannels,
500 500 self.processingHeaderObj.nHeights)
501 501
502 502
503 503 def writeBlock(self):
504 504 """
505 505 Escribe el buffer en el file designado
506 506
507 507 Affected:
508 508 self.data_spc
509 509 self.data_cspc
510 510 self.data_dc
511 511 self.flagIsNewFile
512 512 self.flagIsNewBlock
513 513 self.nTotalBlocks
514 514 self.nWriteBlocks
515 515
516 516 Return: None
517 517 """
518 518
519 519 spc = numpy.transpose( self.data_spc, (0,2,1) )
520 520 if not( self.processingHeaderObj.shif_fft ):
521 521 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
522 522 data = spc.reshape((-1))
523 523 data = data.astype(self.dtype[0])
524 524 data.tofile(self.fp)
525 525
526 526 if self.data_cspc != None:
527 527 data = numpy.zeros( self.shape_cspc_Buffer, self.dtype )
528 528 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
529 529 if not( self.processingHeaderObj.shif_fft ):
530 530 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
531 531 data['real'] = cspc.real
532 532 data['imag'] = cspc.imag
533 533 data = data.reshape((-1))
534 534 data.tofile(self.fp)
535 535
536 536 if self.data_dc != None:
537 537 data = numpy.zeros( self.shape_dc_Buffer, self.dtype )
538 538 dc = self.data_dc
539 539 data['real'] = dc.real
540 540 data['imag'] = dc.imag
541 541 data = data.reshape((-1))
542 542 data.tofile(self.fp)
543 543
544 544 self.data_spc.fill(0)
545 545
546 546 if self.data_dc != None:
547 547 self.data_dc.fill(0)
548 548
549 549 if self.data_cspc != None:
550 550 self.data_cspc.fill(0)
551 551
552 552 self.flagIsNewFile = 0
553 553 self.flagIsNewBlock = 1
554 554 self.nTotalBlocks += 1
555 555 self.nWriteBlocks += 1
556 556 self.blockIndex += 1
557 557
558 558
559 559 def putData(self):
560 560 """
561 561 Setea un bloque de datos y luego los escribe en un file
562 562
563 563 Affected:
564 564 self.data_spc
565 565 self.data_cspc
566 566 self.data_dc
567 567
568 568 Return:
569 569 0 : Si no hay data o no hay mas files que puedan escribirse
570 570 1 : Si se escribio la data de un bloque en un file
571 571 """
572 572
573 573 if self.dataOut.flagNoData:
574 574 return 0
575 575
576 576 self.flagIsNewBlock = 0
577 577
578 578 if self.dataOut.flagTimeBlock:
579 579 self.data_spc.fill(0)
580 580 self.data_cspc.fill(0)
581 581 self.data_dc.fill(0)
582 582 self.setNextFile()
583 583
584 584 if self.flagIsNewFile == 0:
585 585 self.setBasicHeader()
586 586
587 587 self.data_spc = self.dataOut.data_spc.copy()
588 588 if self.dataOut.data_cspc != None:
589 589 self.data_cspc = self.dataOut.data_cspc.copy()
590 590 self.data_dc = self.dataOut.data_dc.copy()
591 591
592 592 # #self.processingHeaderObj.dataBlocksPerFile)
593 593 if self.hasAllDataInBuffer():
594 594 # self.setFirstHeader()
595 595 self.writeNextBlock()
596 596
597 597 return 1
598 598
599 599
600 600 def __getProcessFlags(self):
601 601
602 602 processFlags = 0
603 603
604 604 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
605 605 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
606 606 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
607 607 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
608 608 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
609 609 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
610 610
611 611 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
612 612
613 613
614 614
615 615 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
616 616 PROCFLAG.DATATYPE_SHORT,
617 617 PROCFLAG.DATATYPE_LONG,
618 618 PROCFLAG.DATATYPE_INT64,
619 619 PROCFLAG.DATATYPE_FLOAT,
620 620 PROCFLAG.DATATYPE_DOUBLE]
621 621
622 622
623 623 for index in range(len(dtypeList)):
624 624 if self.dataOut.dtype == dtypeList[index]:
625 625 dtypeValue = datatypeValueList[index]
626 626 break
627 627
628 628 processFlags += dtypeValue
629 629
630 630 if self.dataOut.flagDecodeData:
631 631 processFlags += PROCFLAG.DECODE_DATA
632 632
633 633 if self.dataOut.flagDeflipData:
634 634 processFlags += PROCFLAG.DEFLIP_DATA
635 635
636 636 if self.dataOut.code != None:
637 637 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
638 638
639 639 if self.dataOut.nIncohInt > 1:
640 640 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
641 641
642 642 if self.dataOut.data_dc != None:
643 643 processFlags += PROCFLAG.SAVE_CHANNELS_DC
644 644
645 645 return processFlags
646 646
647 647
648 648 def __getBlockSize(self):
649 649 '''
650 650 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
651 651 '''
652 652
653 653 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
654 654 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
655 655 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
656 656 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
657 657 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
658 658 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
659 659
660 660 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
661 661 datatypeValueList = [1,2,4,8,4,8]
662 662 for index in range(len(dtypeList)):
663 663 if self.dataOut.dtype == dtypeList[index]:
664 664 datatypeValue = datatypeValueList[index]
665 665 break
666 666
667 667
668 668 pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints
669 669
670 670 pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write)
671 671 blocksize = (pts2write_SelfSpectra*datatypeValue)
672 672
673 673 if self.dataOut.data_cspc != None:
674 674 pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write)
675 675 blocksize += (pts2write_CrossSpectra*datatypeValue*2)
676 676
677 677 if self.dataOut.data_dc != None:
678 678 pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights)
679 679 blocksize += (pts2write_DCchannels*datatypeValue*2)
680 680
681 681 blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
682 682
683 683 return blocksize
684 684
685 685 def setFirstHeader(self):
686 686
687 687 """
688 688 Obtiene una copia del First Header
689 689
690 690 Affected:
691 691 self.systemHeaderObj
692 692 self.radarControllerHeaderObj
693 693 self.dtype
694 694
695 695 Return:
696 696 None
697 697 """
698 698
699 699 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
700 700 self.systemHeaderObj.nChannels = self.dataOut.nChannels
701 701 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
702 702 old_code_size = self.dataOut.radarControllerHeaderObj.code_size
703 703 new_code_size = int(numpy.ceil(self.dataOut.nBaud/32.))*self.dataOut.nCode*4
704 704 self.radarControllerHeaderObj.size = self.radarControllerHeaderObj.size - old_code_size + new_code_size
705 705
706 706 self.setBasicHeader()
707 707
708 708 processingHeaderSize = 40 # bytes
709 709 self.processingHeaderObj.dtype = 1 # Spectra
710 710 self.processingHeaderObj.blockSize = self.__getBlockSize()
711 711 self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints
712 712 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
713 713 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
714 714 self.processingHeaderObj.processFlags = self.__getProcessFlags()
715 715 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval
716 716 self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt
717 717 self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
718 718 self.processingHeaderObj.shif_fft = self.dataOut.flagShiftFFT
719 719
720 720 if self.processingHeaderObj.totalSpectra > 0:
721 721 channelList = []
722 722 for channel in range(self.dataOut.nChannels):
723 723 channelList.append(channel)
724 724 channelList.append(channel)
725 725
726 726 pairsList = []
727 727 if self.dataOut.nPairs > 0:
728 728 for pair in self.dataOut.pairsList:
729 729 pairsList.append(pair[0])
730 730 pairsList.append(pair[1])
731 731
732 732 spectraComb = channelList + pairsList
733 733 spectraComb = numpy.array(spectraComb,dtype="u1")
734 734 self.processingHeaderObj.spectraComb = spectraComb
735 735 sizeOfSpcComb = len(spectraComb)
736 736 processingHeaderSize += sizeOfSpcComb
737 737
738 738 # The processing header should not have information about code
739 739 # if self.dataOut.code != None:
740 740 # self.processingHeaderObj.code = self.dataOut.code
741 741 # self.processingHeaderObj.nCode = self.dataOut.nCode
742 742 # self.processingHeaderObj.nBaud = self.dataOut.nBaud
743 743 # nCodeSize = 4 # bytes
744 744 # nBaudSize = 4 # bytes
745 745 # codeSize = 4 # bytes
746 746 # sizeOfCode = int(nCodeSize + nBaudSize + codeSize * self.dataOut.nCode * self.dataOut.nBaud)
747 747 # processingHeaderSize += sizeOfCode
748 748
749 749 if self.processingHeaderObj.nWindows != 0:
750 750 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
751 751 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
752 752 self.processingHeaderObj.nHeights = self.dataOut.nHeights
753 753 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
754 754 sizeOfFirstHeight = 4
755 755 sizeOfdeltaHeight = 4
756 756 sizeOfnHeights = 4
757 757 sizeOfWindows = (sizeOfFirstHeight + sizeOfdeltaHeight + sizeOfnHeights)*self.processingHeaderObj.nWindows
758 758 processingHeaderSize += sizeOfWindows
759 759
760 760 self.processingHeaderObj.size = processingHeaderSize
761 761
@@ -1,593 +1,609
1 1 '''
2 2
3 3 '''
4 4 import numpy
5 5
6 6 from jroIO_base import LOCALTIME, JRODataReader, JRODataWriter
7 7 from model.proc.jroproc_base import ProcessingUnit, Operation
8 8 from model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
9 9 from model.data.jrodata import Voltage
10 10
11 11 class VoltageReader(JRODataReader, ProcessingUnit):
12 12 """
13 13 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
14 14 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
15 15 perfiles*alturas*canales) son almacenados en la variable "buffer".
16 16
17 17 perfiles * alturas * canales
18 18
19 19 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
20 20 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
21 21 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
22 22 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
23 23
24 24 Example:
25 25
26 26 dpath = "/home/myuser/data"
27 27
28 28 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
29 29
30 30 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
31 31
32 32 readerObj = VoltageReader()
33 33
34 34 readerObj.setup(dpath, startTime, endTime)
35 35
36 36 while(True):
37 37
38 38 #to get one profile
39 39 profile = readerObj.getData()
40 40
41 41 #print the profile
42 42 print profile
43 43
44 44 #If you want to see all datablock
45 45 print readerObj.datablock
46 46
47 47 if readerObj.flagNoMoreFiles:
48 48 break
49 49
50 50 """
51 51
52 52 ext = ".r"
53 53
54 54 optchar = "D"
55 55 dataOut = None
56 56
57 57
58 58 def __init__(self):
59 59 """
60 60 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
61 61
62 62 Input:
63 63 dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para
64 64 almacenar un perfil de datos cada vez que se haga un requerimiento
65 65 (getData). El perfil sera obtenido a partir del buffer de datos,
66 66 si el buffer esta vacio se hara un nuevo proceso de lectura de un
67 67 bloque de datos.
68 68 Si este parametro no es pasado se creara uno internamente.
69 69
70 70 Variables afectadas:
71 71 self.dataOut
72 72
73 73 Return:
74 74 None
75 75 """
76 76
77 77 ProcessingUnit.__init__(self)
78 78
79 79 self.isConfig = False
80 80
81 81 self.datablock = None
82 82
83 83 self.utc = 0
84 84
85 85 self.ext = ".r"
86 86
87 87 self.optchar = "D"
88 88
89 89 self.basicHeaderObj = BasicHeader(LOCALTIME)
90 90
91 91 self.systemHeaderObj = SystemHeader()
92 92
93 93 self.radarControllerHeaderObj = RadarControllerHeader()
94 94
95 95 self.processingHeaderObj = ProcessingHeader()
96 96
97 97 self.online = 0
98 98
99 99 self.fp = None
100 100
101 101 self.idFile = None
102 102
103 103 self.dtype = None
104 104
105 105 self.fileSizeByHeader = None
106 106
107 107 self.filenameList = []
108 108
109 109 self.filename = None
110 110
111 111 self.fileSize = None
112 112
113 113 self.firstHeaderSize = 0
114 114
115 115 self.basicHeaderSize = 24
116 116
117 117 self.pathList = []
118 118
119 119 self.filenameList = []
120 120
121 121 self.lastUTTime = 0
122 122
123 123 self.maxTimeStep = 30
124 124
125 125 self.flagNoMoreFiles = 0
126 126
127 127 self.set = 0
128 128
129 129 self.path = None
130 130
131 131 self.profileIndex = 2**32-1
132 132
133 133 self.delay = 3 #seconds
134 134
135 135 self.nTries = 3 #quantity tries
136 136
137 137 self.nFiles = 3 #number of files for searching
138 138
139 139 self.nReadBlocks = 0
140 140
141 141 self.flagIsNewFile = 1
142 142
143 143 self.__isFirstTimeOnline = 1
144 144
145 145 # self.ippSeconds = 0
146 146
147 147 self.flagTimeBlock = 0
148 148
149 149 self.flagIsNewBlock = 0
150 150
151 151 self.nTotalBlocks = 0
152 152
153 153 self.blocksize = 0
154 154
155 155 self.dataOut = self.createObjByDefault()
156 156
157 157 def createObjByDefault(self):
158 158
159 159 dataObj = Voltage()
160 160
161 161 return dataObj
162 162
163 163 def __hasNotDataInBuffer(self):
164 164 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
165 165 return 1
166 166 return 0
167 167
168 168
169 169 def getBlockDimension(self):
170 170 """
171 171 Obtiene la cantidad de puntos a leer por cada bloque de datos
172 172
173 173 Affected:
174 174 self.blocksize
175 175
176 176 Return:
177 177 None
178 178 """
179 179 pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
180 180 self.blocksize = pts2read
181 181
182 182
183 183 def readBlock(self):
184 184 """
185 185 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
186 186 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
187 187 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
188 188 es seteado a 0
189 189
190 190 Inputs:
191 191 None
192 192
193 193 Return:
194 194 None
195 195
196 196 Affected:
197 197 self.profileIndex
198 198 self.datablock
199 199 self.flagIsNewFile
200 200 self.flagIsNewBlock
201 201 self.nTotalBlocks
202 202
203 203 Exceptions:
204 204 Si un bloque leido no es un bloque valido
205 205 """
206 206 current_pointer_location = self.fp.tell()
207 207 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
208 208
209 209 try:
210 210 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
211 211 except:
212 212 #print "The read block (%3d) has not enough data" %self.nReadBlocks
213 213
214 214 if self.waitDataBlock(pointer_location=current_pointer_location):
215 215 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
216 216 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
217 217 # return 0
218 218
219 219 junk = numpy.transpose(junk, (2,0,1))
220 220 self.datablock = junk['real'] + junk['imag']*1j
221 221
222 222 self.profileIndex = 0
223 223
224 224 self.flagIsNewFile = 0
225 225 self.flagIsNewBlock = 1
226 226
227 227 self.nTotalBlocks += 1
228 228 self.nReadBlocks += 1
229 229
230 230 return 1
231 231
232 232 def getFirstHeader(self):
233 233
234 234 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
235 235
236 236 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
237 237
238 238 # self.dataOut.ippSeconds = self.ippSeconds
239 239
240 self.dataOut.timeInterval = self.radarControllerHeaderObj.ippSeconds * self.processingHeaderObj.nCohInt
240 # self.dataOut.timeInterval = self.radarControllerHeaderObj.ippSeconds * self.processingHeaderObj.nCohInt
241 241
242 242 if self.radarControllerHeaderObj.code != None:
243 243
244 244 self.dataOut.nCode = self.radarControllerHeaderObj.nCode
245 245
246 246 self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
247 247
248 248 self.dataOut.code = self.radarControllerHeaderObj.code
249 249
250 250 self.dataOut.dtype = self.dtype
251 251
252 252 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
253 253
254 254 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
255 255
256 256 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
257 257
258 258 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
259 259
260 260 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
261 261
262 262 self.dataOut.flagShiftFFT = False
263 263
264 264 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
265 265
266 266 self.dataOut.flagDeflipData = False #asumo q la data no esta sin flip
267 267
268 268 self.dataOut.flagShiftFFT = False
269 269
270 270 def getData(self):
271 271 """
272 getData obtiene una unidad de datos del buffer de lectura y la copia a la clase "Voltage"
273 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
274 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
272 getData obtiene una unidad de datos del buffer de lectura, un perfil, y la copia al objeto self.dataOut
273 del tipo "Voltage" con todos los parametros asociados a este (metadata). cuando no hay datos
274 en el buffer de lectura es necesario hacer una nueva lectura de los bloques de datos usando
275 "readNextBlock"
275 276
276 Ademas incrementa el contador del buffer en 1.
277 Ademas incrementa el contador del buffer "self.profileIndex" en 1.
277 278
278 279 Return:
279 data : retorna un perfil de voltages (alturas * canales) copiados desde el
280 buffer. Si no hay mas archivos a leer retorna None.
280
281 Si el flag self.getByBlock ha sido seteado el bloque completo es copiado a self.dataOut y el self.profileIndex
282 es igual al total de perfiles leidos desde el archivo.
283
284 Si self.getByBlock == False:
285
286 self.dataOut.data = buffer[:, thisProfile, :]
287
288 shape = [nChannels, nHeis]
289
290 Si self.getByBlock == True:
291
292 self.dataOut.data = buffer[:, :, :]
293
294 shape = [nChannels, nProfiles, nHeis]
281 295
282 296 Variables afectadas:
283 297 self.dataOut
284 298 self.profileIndex
285 299
286 300 Affected:
287 301 self.dataOut
288 302 self.profileIndex
289 303 self.flagTimeBlock
290 304 self.flagIsNewBlock
291 305 """
292 306
293 307 if self.flagNoMoreFiles:
294 308 self.dataOut.flagNoData = True
295 309 print 'Process finished'
296 310 return 0
297 311
298 312 self.flagTimeBlock = 0
299 313 self.flagIsNewBlock = 0
300 314
301 315 if self.__hasNotDataInBuffer():
302 316
303 317 if not( self.readNextBlock() ):
304 318 return 0
305 319
306 320 self.getFirstHeader()
307 321
308 322 if self.datablock == None:
309 323 self.dataOut.flagNoData = True
310 324 return 0
311 325
312 if self.getblock:
326 if self.getByBlock:
327 self.dataOut.flagDataAsBlock = True
313 328 self.dataOut.data = self.datablock
314 329 self.profileIndex = self.processingHeaderObj.profilesPerBlock
315 330 else:
331 self.dataOut.flagDataAsBlock = False
316 332 self.dataOut.data = self.datablock[:,self.profileIndex,:]
317 333 self.profileIndex += 1
318 334
319 335 self.dataOut.flagNoData = False
320 336
321 337 self.getBasicHeader()
322 338
323 339
324 340
325 341 self.dataOut.realtime = self.online
326 342
327 343 return self.dataOut.data
328 344
329 345 class VoltageWriter(JRODataWriter, Operation):
330 346 """
331 347 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
332 348 de los datos siempre se realiza por bloques.
333 349 """
334 350
335 351 ext = ".r"
336 352
337 353 optchar = "D"
338 354
339 355 shapeBuffer = None
340 356
341 357
342 358 def __init__(self):
343 359 """
344 360 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
345 361
346 362 Affected:
347 363 self.dataOut
348 364
349 365 Return: None
350 366 """
351 367 Operation.__init__(self)
352 368
353 369 self.nTotalBlocks = 0
354 370
355 371 self.profileIndex = 0
356 372
357 373 self.isConfig = False
358 374
359 375 self.fp = None
360 376
361 377 self.flagIsNewFile = 1
362 378
363 379 self.nTotalBlocks = 0
364 380
365 381 self.flagIsNewBlock = 0
366 382
367 383 self.setFile = None
368 384
369 385 self.dtype = None
370 386
371 387 self.path = None
372 388
373 389 self.filename = None
374 390
375 391 self.basicHeaderObj = BasicHeader(LOCALTIME)
376 392
377 393 self.systemHeaderObj = SystemHeader()
378 394
379 395 self.radarControllerHeaderObj = RadarControllerHeader()
380 396
381 397 self.processingHeaderObj = ProcessingHeader()
382 398
383 399 def hasAllDataInBuffer(self):
384 400 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
385 401 return 1
386 402 return 0
387 403
388 404
389 405 def setBlockDimension(self):
390 406 """
391 407 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
392 408
393 409 Affected:
394 410 self.shape_spc_Buffer
395 411 self.shape_cspc_Buffer
396 412 self.shape_dc_Buffer
397 413
398 414 Return: None
399 415 """
400 416 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
401 417 self.processingHeaderObj.nHeights,
402 418 self.systemHeaderObj.nChannels)
403 419
404 420 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
405 421 self.processingHeaderObj.profilesPerBlock,
406 422 self.processingHeaderObj.nHeights),
407 423 dtype=numpy.dtype('complex64'))
408 424
409 425
410 426 def writeBlock(self):
411 427 """
412 428 Escribe el buffer en el file designado
413 429
414 430 Affected:
415 431 self.profileIndex
416 432 self.flagIsNewFile
417 433 self.flagIsNewBlock
418 434 self.nTotalBlocks
419 435 self.blockIndex
420 436
421 437 Return: None
422 438 """
423 439 data = numpy.zeros( self.shapeBuffer, self.dtype )
424 440
425 441 junk = numpy.transpose(self.datablock, (1,2,0))
426 442
427 443 data['real'] = junk.real
428 444 data['imag'] = junk.imag
429 445
430 446 data = data.reshape( (-1) )
431 447
432 448 data.tofile( self.fp )
433 449
434 450 self.datablock.fill(0)
435 451
436 452 self.profileIndex = 0
437 453 self.flagIsNewFile = 0
438 454 self.flagIsNewBlock = 1
439 455
440 456 self.blockIndex += 1
441 457 self.nTotalBlocks += 1
442 458
443 459 def putData(self):
444 460 """
445 461 Setea un bloque de datos y luego los escribe en un file
446 462
447 463 Affected:
448 464 self.flagIsNewBlock
449 465 self.profileIndex
450 466
451 467 Return:
452 468 0 : Si no hay data o no hay mas files que puedan escribirse
453 469 1 : Si se escribio la data de un bloque en un file
454 470 """
455 471 if self.dataOut.flagNoData:
456 472 return 0
457 473
458 474 self.flagIsNewBlock = 0
459 475
460 476 if self.dataOut.flagTimeBlock:
461 477
462 478 self.datablock.fill(0)
463 479 self.profileIndex = 0
464 480 self.setNextFile()
465 481
466 482 if self.profileIndex == 0:
467 483 self.setBasicHeader()
468 484
469 485 self.datablock[:,self.profileIndex,:] = self.dataOut.data
470 486
471 487 self.profileIndex += 1
472 488
473 489 if self.hasAllDataInBuffer():
474 490 #if self.flagIsNewFile:
475 491 self.writeNextBlock()
476 492 # self.setFirstHeader()
477 493
478 494 return 1
479 495
480 496 def __getProcessFlags(self):
481 497
482 498 processFlags = 0
483 499
484 500 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
485 501 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
486 502 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
487 503 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
488 504 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
489 505 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
490 506
491 507 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
492 508
493 509
494 510
495 511 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
496 512 PROCFLAG.DATATYPE_SHORT,
497 513 PROCFLAG.DATATYPE_LONG,
498 514 PROCFLAG.DATATYPE_INT64,
499 515 PROCFLAG.DATATYPE_FLOAT,
500 516 PROCFLAG.DATATYPE_DOUBLE]
501 517
502 518
503 519 for index in range(len(dtypeList)):
504 520 if self.dataOut.dtype == dtypeList[index]:
505 521 dtypeValue = datatypeValueList[index]
506 522 break
507 523
508 524 processFlags += dtypeValue
509 525
510 526 if self.dataOut.flagDecodeData:
511 527 processFlags += PROCFLAG.DECODE_DATA
512 528
513 529 if self.dataOut.flagDeflipData:
514 530 processFlags += PROCFLAG.DEFLIP_DATA
515 531
516 532 if self.dataOut.code != None:
517 533 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
518 534
519 535 if self.dataOut.nCohInt > 1:
520 536 processFlags += PROCFLAG.COHERENT_INTEGRATION
521 537
522 538 return processFlags
523 539
524 540
525 541 def __getBlockSize(self):
526 542 '''
527 543 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
528 544 '''
529 545
530 546 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
531 547 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
532 548 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
533 549 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
534 550 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
535 551 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
536 552
537 553 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
538 554 datatypeValueList = [1,2,4,8,4,8]
539 555 for index in range(len(dtypeList)):
540 556 if self.dataOut.dtype == dtypeList[index]:
541 557 datatypeValue = datatypeValueList[index]
542 558 break
543 559
544 560 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels * self.profilesPerBlock * datatypeValue * 2)
545 561
546 562 return blocksize
547 563
548 564 def setFirstHeader(self):
549 565
550 566 """
551 567 Obtiene una copia del First Header
552 568
553 569 Affected:
554 570 self.systemHeaderObj
555 571 self.radarControllerHeaderObj
556 572 self.dtype
557 573
558 574 Return:
559 575 None
560 576 """
561 577
562 578 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
563 579 self.systemHeaderObj.nChannels = self.dataOut.nChannels
564 580 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
565 581
566 582 self.setBasicHeader()
567 583
568 584 processingHeaderSize = 40 # bytes
569 585 self.processingHeaderObj.dtype = 0 # Voltage
570 586 self.processingHeaderObj.blockSize = self.__getBlockSize()
571 587 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
572 588 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
573 589 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
574 590 self.processingHeaderObj.processFlags = self.__getProcessFlags()
575 591 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
576 592 self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage
577 593 self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage
578 594
579 595 # if self.dataOut.code != None:
580 596 # self.processingHeaderObj.code = self.dataOut.code
581 597 # self.processingHeaderObj.nCode = self.dataOut.nCode
582 598 # self.processingHeaderObj.nBaud = self.dataOut.nBaud
583 599 # codesize = int(8 + 4 * self.dataOut.nCode * self.dataOut.nBaud)
584 600 # processingHeaderSize += codesize
585 601
586 602 if self.processingHeaderObj.nWindows != 0:
587 603 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
588 604 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
589 605 self.processingHeaderObj.nHeights = self.dataOut.nHeights
590 606 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
591 607 processingHeaderSize += 12
592 608
593 609 self.processingHeaderObj.size = processingHeaderSize No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now