##// END OF EJS Templates
Bug fixed: error calculating size of RC Header. ...
Miguel Valdez -
r616:2d27990ff2eb
parent child
Show More
@@ -1,618 +1,679
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JROHeaderIO.py 151 2012-10-31 19:00:51Z murco $
5 5 '''
6 6 import numpy
7 7 import copy
8 8 import datetime
9 9
10 SPEED_OF_LIGHT = 299792458
11 SPEED_OF_LIGHT = 3e8
12
10 13 BASIC_STRUCTURE = numpy.dtype([
11 14 ('nSize','<u4'),
12 15 ('nVersion','<u2'),
13 16 ('nDataBlockId','<u4'),
14 17 ('nUtime','<u4'),
15 18 ('nMilsec','<u2'),
16 19 ('nTimezone','<i2'),
17 20 ('nDstflag','<i2'),
18 21 ('nErrorCount','<u4')
19 22 ])
20 23
21 24 SYSTEM_STRUCTURE = numpy.dtype([
22 25 ('nSize','<u4'),
23 26 ('nNumSamples','<u4'),
24 27 ('nNumProfiles','<u4'),
25 28 ('nNumChannels','<u4'),
26 29 ('nADCResolution','<u4'),
27 30 ('nPCDIOBusWidth','<u4'),
28 31 ])
29 32
30 33 RADAR_STRUCTURE = numpy.dtype([
31 34 ('nSize','<u4'),
32 35 ('nExpType','<u4'),
33 36 ('nNTx','<u4'),
34 37 ('fIpp','<f4'),
35 38 ('fTxA','<f4'),
36 39 ('fTxB','<f4'),
37 40 ('nNumWindows','<u4'),
38 41 ('nNumTaus','<u4'),
39 42 ('nCodeType','<u4'),
40 43 ('nLine6Function','<u4'),
41 44 ('nLine5Function','<u4'),
42 45 ('fClock','<f4'),
43 46 ('nPrePulseBefore','<u4'),
44 47 ('nPrePulseAfter','<u4'),
45 48 ('sRangeIPP','<a20'),
46 49 ('sRangeTxA','<a20'),
47 50 ('sRangeTxB','<a20'),
48 51 ])
49 52
50 53 SAMPLING_STRUCTURE = numpy.dtype([('h0','<f4'),('dh','<f4'),('nsa','<u4')])
51 54
52 55
53 56 PROCESSING_STRUCTURE = numpy.dtype([
54 57 ('nSize','<u4'),
55 58 ('nDataType','<u4'),
56 59 ('nSizeOfDataBlock','<u4'),
57 60 ('nProfilesperBlock','<u4'),
58 61 ('nDataBlocksperFile','<u4'),
59 62 ('nNumWindows','<u4'),
60 63 ('nProcessFlags','<u4'),
61 64 ('nCoherentIntegrations','<u4'),
62 65 ('nIncoherentIntegrations','<u4'),
63 66 ('nTotalSpectra','<u4')
64 67 ])
65 68
66 69 class Header(object):
67 70
68 71 def __init__(self):
69 72 raise
70 73
71 74 def copy(self):
72 75 return copy.deepcopy(self)
73 76
74 77 def read(self):
75 78
76 79 raise ValueError
77 80
78 81 def write(self):
79 82
80 83 raise ValueError
81 84
82 85 def printInfo(self):
83 86
84 87 print "#"*100
85 88 print self.__class__.__name__.upper()
86 89 print "#"*100
87 90 for key in self.__dict__.keys():
88 91 print "%s = %s" %(key, self.__dict__[key])
89 92
90 93 class BasicHeader(Header):
91 94
92 95 size = None
93 96 version = None
94 97 dataBlock = None
95 98 utc = None
96 99 ltc = None
97 100 miliSecond = None
98 101 timeZone = None
99 102 dstFlag = None
100 103 errorCount = None
101 104 datatime = None
102 105
103 106 __LOCALTIME = None
104 107
105 108 def __init__(self, useLocalTime=True):
106 109
107 110 self.size = 24
108 111 self.version = 0
109 112 self.dataBlock = 0
110 113 self.utc = 0
111 114 self.miliSecond = 0
112 115 self.timeZone = 0
113 116 self.dstFlag = 0
114 117 self.errorCount = 0
115 118
116 119 self.useLocalTime = useLocalTime
117 120
118 121 def read(self, fp):
119 122 try:
120 123
121 124 header = numpy.fromfile(fp, BASIC_STRUCTURE,1)
122 125
123 126 self.size = int(header['nSize'][0])
124 127 self.version = int(header['nVersion'][0])
125 128 self.dataBlock = int(header['nDataBlockId'][0])
126 129 self.utc = int(header['nUtime'][0])
127 130 self.miliSecond = int(header['nMilsec'][0])
128 131 self.timeZone = int(header['nTimezone'][0])
129 132 self.dstFlag = int(header['nDstflag'][0])
130 133 self.errorCount = int(header['nErrorCount'][0])
131 134
132 135 except Exception, e:
133 136 print "BasicHeader: "
134 137 print e
135 138 return 0
136 139
137 140 return 1
138 141
139 142 def write(self, fp):
140 143
141 144 headerTuple = (self.size,self.version,self.dataBlock,self.utc,self.miliSecond,self.timeZone,self.dstFlag,self.errorCount)
142 145 header = numpy.array(headerTuple, BASIC_STRUCTURE)
143 146 header.tofile(fp)
144 147
145 148 return 1
146 149
147 150 def get_ltc(self):
148 151
149 152 return self.utc - self.timeZone*60
150 153
151 154 def set_ltc(self, value):
152 155
153 156 self.utc = value + self.timeZone*60
154 157
155 158 def get_datatime(self):
156 159
157 160 return datetime.datetime.utcfromtimestamp(self.ltc)
158 161
159 162 ltc = property(get_ltc, set_ltc)
160 163 datatime = property(get_datatime)
161 164
162 165 class SystemHeader(Header):
163 166
164 167 size = None
165 168 nSamples = None
166 169 nProfiles = None
167 170 nChannels = None
168 171 adcResolution = None
169 172 pciDioBusWidth = None
170 173
171 174 def __init__(self, nSamples=0, nProfiles=0, nChannels=0, adcResolution=14, pciDioBusWith=0):
172 175
173 176 self.size = 24
174 177 self.nSamples = nSamples
175 178 self.nProfiles = nProfiles
176 179 self.nChannels = nChannels
177 180 self.adcResolution = adcResolution
178 181 self.pciDioBusWidth = pciDioBusWith
179 182
180 183 def read(self, fp):
181 184
182 185 try:
183 186 header = numpy.fromfile(fp,SYSTEM_STRUCTURE,1)
184 187 self.size = header['nSize'][0]
185 188 self.nSamples = header['nNumSamples'][0]
186 189 self.nProfiles = header['nNumProfiles'][0]
187 190 self.nChannels = header['nNumChannels'][0]
188 191 self.adcResolution = header['nADCResolution'][0]
189 192 self.pciDioBusWidth = header['nPCDIOBusWidth'][0]
190 193
191 194 except Exception, e:
192 195 print "SystemHeader: " + e
193 196 return 0
194 197
195 198 return 1
196 199
197 200 def write(self, fp):
198 201
199 202 headerTuple = (self.size,self.nSamples,self.nProfiles,self.nChannels,self.adcResolution,self.pciDioBusWidth)
200 203 header = numpy.array(headerTuple,SYSTEM_STRUCTURE)
201 204 header.tofile(fp)
202 205
203 206 return 1
204 207
205 208 class RadarControllerHeader(Header):
206 209
207 210 size = None
208 211 expType = None
209 212 nTx = None
210 213 ipp = None
211 214 txA = None
212 215 txB = None
213 216 nWindows = None
214 217 numTaus = None
215 218 codeType = None
216 219 line6Function = None
217 220 line5Function = None
218 221 fClock = None
219 222 prePulseBefore = None
220 223 prePulserAfter = None
221 224 rangeIpp = None
222 225 rangeTxA = None
223 226 rangeTxB = None
224 227
225 __C = 3e8
228 __size = None
226 229
227 230 def __init__(self, expType=2, nTx=1,
228 231 ippKm=None, txA=0, txB=0,
229 232 nWindows=None, nHeights=None, firstHeight=None, deltaHeight=None,
230 numTaus=0, line6Function=0, line5Function=0, fClock=0,
233 numTaus=0, line6Function=0, line5Function=0, fClock=None,
231 234 prePulseBefore=0, prePulseAfter=0,
232 235 codeType=0, nCode=0, nBaud=0, code=None,
233 236 flip1=0, flip2=0):
234 237
235 238 self.size = 116
236 239 self.expType = expType
237 240 self.nTx = nTx
238 241 self.ipp = ippKm
239 242 self.txA = txA
240 243 self.txB = txB
241 244 self.rangeIpp = ippKm
242 245 self.rangeTxA = txA
243 246 self.rangeTxB = txB
244 247
245 248 self.nWindows = nWindows
246 249 self.numTaus = numTaus
247 250 self.codeType = codeType
248 251 self.line6Function = line6Function
249 252 self.line5Function = line5Function
250 253 self.fClock = fClock
251 254 self.prePulseBefore = prePulseBefore
252 255 self.prePulserAfter = prePulseAfter
253 256
254 257 self.nHeights = nHeights
255 258 self.firstHeight = firstHeight
256 259 self.deltaHeight = deltaHeight
257 260 self.samplesWin = nHeights
258 261
259 262 self.nCode = nCode
260 263 self.nBaud = nBaud
261 264 self.code = code
262 265 self.flip1 = flip1
263 266 self.flip2 = flip2
264 267
265 268 self.code_size = int(numpy.ceil(self.nBaud/32.))*self.nCode*4
266 269 # self.dynamic = numpy.array([],numpy.dtype('byte'))
267 270
271 if self.fClock is None and self.deltaHeight is not None:
272 self.fClock = 0.15/(deltaHeight*1e-6) #0.15Km / (height * 1u)
268 273
269 274 def read(self, fp):
270 275
271 276 try:
272 277 startFp = fp.tell()
273 278 header = numpy.fromfile(fp,RADAR_STRUCTURE,1)
274 279
275 self.size = int(header['nSize'][0])
280 size = int(header['nSize'][0])
276 281 self.expType = int(header['nExpType'][0])
277 282 self.nTx = int(header['nNTx'][0])
278 283 self.ipp = float(header['fIpp'][0])
279 284 self.txA = float(header['fTxA'][0])
280 285 self.txB = float(header['fTxB'][0])
281 286 self.nWindows = int(header['nNumWindows'][0])
282 287 self.numTaus = int(header['nNumTaus'][0])
283 288 self.codeType = int(header['nCodeType'][0])
284 289 self.line6Function = int(header['nLine6Function'][0])
285 290 self.line5Function = int(header['nLine5Function'][0])
286 291 self.fClock = float(header['fClock'][0])
287 292 self.prePulseBefore = int(header['nPrePulseBefore'][0])
288 293 self.prePulserAfter = int(header['nPrePulseAfter'][0])
289 294 self.rangeIpp = header['sRangeIPP'][0]
290 295 self.rangeTxA = header['sRangeTxA'][0]
291 296 self.rangeTxB = header['sRangeTxB'][0]
292 297 # jump Dynamic Radar Controller Header
293 # jumpFp = self.size - 116
298 # jumpFp = size - 116
294 299 # self.dynamic = numpy.fromfile(fp,numpy.dtype('byte'),jumpFp)
295 300 #pointer backward to dynamic header and read
296 301 # backFp = fp.tell() - jumpFp
297 302 # fp.seek(backFp)
298 303
299 304 samplingWindow = numpy.fromfile(fp,SAMPLING_STRUCTURE,self.nWindows)
300 305
301 306 self.nHeights = int(numpy.sum(samplingWindow['nsa']))
302 307 self.firstHeight = samplingWindow['h0']
303 308 self.deltaHeight = samplingWindow['dh']
304 309 self.samplesWin = samplingWindow['nsa']
305 310
306 311 self.Taus = numpy.fromfile(fp,'<f4',self.numTaus)
307 312
308 313 self.code_size = 0
309 314 if self.codeType != 0:
310 315 self.nCode = int(numpy.fromfile(fp,'<u4',1))
311 316 self.nBaud = int(numpy.fromfile(fp,'<u4',1))
312 317 self.code = numpy.empty([self.nCode,self.nBaud],dtype='i1')
313 318
314 319 for ic in range(self.nCode):
315 320 temp = numpy.fromfile(fp,'u4',int(numpy.ceil(self.nBaud/32.)))
316 321 for ib in range(self.nBaud-1,-1,-1):
317 322 self.code[ic,ib] = temp[ib/32]%2
318 323 temp[ib/32] = temp[ib/32]/2
319 324 self.code = 2.0*self.code - 1.0
320 325 self.code_size = int(numpy.ceil(self.nBaud/32.))*self.nCode*4
321 326
322 if self.line5Function == RCfunction.FLIP:
323 self.flip1 = numpy.fromfile(fp,'<u4',1)
324
325 if self.line6Function == RCfunction.FLIP:
326 self.flip2 = numpy.fromfile(fp,'<u4',1)
327 # if self.line5Function == RCfunction.FLIP:
328 # self.flip1 = numpy.fromfile(fp,'<u4',1)
329 #
330 # if self.line6Function == RCfunction.FLIP:
331 # self.flip2 = numpy.fromfile(fp,'<u4',1)
327 332
328 endFp = self.size + startFp
333 endFp = size + startFp
329 334 jumpFp = endFp - fp.tell()
330 335 if jumpFp > 0:
331 336 fp.seek(jumpFp)
332 337
333 338 except Exception, e:
334 339 print "RadarControllerHeader: " + e
335 340 return 0
336 341
337 342 return 1
338 343
339 344 def write(self, fp):
345
340 346 headerTuple = (self.size,
341 347 self.expType,
342 348 self.nTx,
343 349 self.ipp,
344 350 self.txA,
345 351 self.txB,
346 352 self.nWindows,
347 353 self.numTaus,
348 354 self.codeType,
349 355 self.line6Function,
350 356 self.line5Function,
351 357 self.fClock,
352 358 self.prePulseBefore,
353 359 self.prePulserAfter,
354 360 self.rangeIpp,
355 361 self.rangeTxA,
356 362 self.rangeTxB)
357 363
358 364 header = numpy.array(headerTuple,RADAR_STRUCTURE)
359 365 header.tofile(fp)
360 366
361 #dynamic = self.dynamic
362 #dynamic.tofile(fp)
363
364 367 sampleWindowTuple = (self.firstHeight,self.deltaHeight,self.samplesWin)
365 368 samplingWindow = numpy.array(sampleWindowTuple,SAMPLING_STRUCTURE)
366 369 samplingWindow.tofile(fp)
367 370
368 371 if self.numTaus > 0:
369 372 self.Taus.tofile(fp)
370 373
371 374 if self.codeType !=0:
372 375 nCode = numpy.array(self.nCode, '<u4')
373 376 nCode.tofile(fp)
374 377 nBaud = numpy.array(self.nBaud, '<u4')
375 378 nBaud.tofile(fp)
376 379 code1 = (self.code + 1.0)/2.
377 380
378 381 for ic in range(self.nCode):
379 382 tempx = numpy.zeros(numpy.ceil(self.nBaud/32.))
380 383 start = 0
381 384 end = 32
382 385 for i in range(len(tempx)):
383 386 code_selected = code1[ic,start:end]
384 387 for j in range(len(code_selected)-1,-1,-1):
385 388 if code_selected[j] == 1:
386 389 tempx[i] = tempx[i] + 2**(len(code_selected)-1-j)
387 390 start = start + 32
388 391 end = end + 32
389 392
390 393 tempx = tempx.astype('u4')
391 394 tempx.tofile(fp)
392 395
393 if self.line5Function == RCfunction.FLIP:
394 self.flip1.tofile(fp)
395
396 if self.line6Function == RCfunction.FLIP:
397 self.flip2.tofile(fp)
396 # if self.line5Function == RCfunction.FLIP:
397 # self.flip1.tofile(fp)
398 #
399 # if self.line6Function == RCfunction.FLIP:
400 # self.flip2.tofile(fp)
398 401
399 402 return 1
400 403
401 404 def get_ippSeconds(self):
402 405 '''
403 406 '''
404 ippSeconds = 2.0 * 1000 * self.ipp / self.__C
407 ippSeconds = 2.0 * 1000 * self.ipp / SPEED_OF_LIGHT
405 408
406 409 return ippSeconds
407 410
408 411 def set_ippSeconds(self, ippSeconds):
409 412 '''
410 413 '''
411 414
412 self.ipp = ippSeconds * self.__C / (2.0*1000)
415 self.ipp = ippSeconds * SPEED_OF_LIGHT / (2.0*1000)
416
417 return
418
419 def get_size(self):
420
421 self.__size = 116 + 12*self.nWindows + 4*self.numTaus
422
423 if self.codeType != 0:
424 self.__size += 4 + 4 + 4*self.nCode*numpy.ceil(self.nBaud/32.)
425
426 return self.__size
427
428 def set_size(self, value):
429
430 self.__size = value
413 431
414 432 return
415 433
416 434 ippSeconds = property(get_ippSeconds, set_ippSeconds)
435 size = property(get_size, set_size)
417 436
418 437 class ProcessingHeader(Header):
419 438
420 439 size = None
421 440 dtype = None
422 441 blockSize = None
423 442 profilesPerBlock = None
424 443 dataBlocksPerFile = None
425 444 nWindows = None
426 445 processFlags = None
427 446 nCohInt = None
428 447 nIncohInt = None
429 448 totalSpectra = None
430 449
431 450 flag_dc = None
432 451 flag_cspc = None
433 452
434 453 def __init__(self):
435 454
436 455 self.size = 0
437 456 self.dtype = 0
438 457 self.blockSize = 0
439 458 self.profilesPerBlock = 0
440 459 self.dataBlocksPerFile = 0
441 460 self.nWindows = 0
442 461 self.processFlags = 0
443 462 self.nCohInt = 0
444 463 self.nIncohInt = 0
445 464 self.totalSpectra = 0
446 465
447 466 self.nHeights = 0
448 467 self.firstHeight = 0
449 468 self.deltaHeight = 0
450 469 self.samplesWin = 0
451 470 self.spectraComb = 0
452 471 # self.nCode = None
453 472 # self.code = None
454 473 # self.nBaud = None
455 474 self.shif_fft = False
456 475 self.flag_dc = False
457 476 self.flag_cspc = False
458 477
459 478 def read(self, fp):
460 479 # try:
461 480 header = numpy.fromfile(fp,PROCESSING_STRUCTURE,1)
462 481 self.size = int(header['nSize'][0])
463 482 self.dtype = int(header['nDataType'][0])
464 483 self.blockSize = int(header['nSizeOfDataBlock'][0])
465 484 self.profilesPerBlock = int(header['nProfilesperBlock'][0])
466 485 self.dataBlocksPerFile = int(header['nDataBlocksperFile'][0])
467 486 self.nWindows = int(header['nNumWindows'][0])
468 487 self.processFlags = header['nProcessFlags']
469 488 self.nCohInt = int(header['nCoherentIntegrations'][0])
470 489 self.nIncohInt = int(header['nIncoherentIntegrations'][0])
471 490 self.totalSpectra = int(header['nTotalSpectra'][0])
472 491
473 492 samplingWindow = numpy.fromfile(fp,SAMPLING_STRUCTURE,self.nWindows)
474 493
475 494 self.nHeights = int(numpy.sum(samplingWindow['nsa']))
476 495 self.firstHeight = float(samplingWindow['h0'][0])
477 496 self.deltaHeight = float(samplingWindow['dh'][0])
478 497 self.samplesWin = samplingWindow['nsa'][0]
479 498 self.spectraComb = numpy.fromfile(fp,'u1',2*self.totalSpectra)
480 499
481 500 # if ((self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE) == PROCFLAG.DEFINE_PROCESS_CODE):
482 501 # self.nCode = int(numpy.fromfile(fp,'<u4',1))
483 502 # self.nBaud = int(numpy.fromfile(fp,'<u4',1))
484 503 # self.code = numpy.fromfile(fp,'<f4',self.nCode*self.nBaud).reshape(self.nCode,self.nBaud)
485 504
486 505 if ((self.processFlags & PROCFLAG.SHIFT_FFT_DATA) == PROCFLAG.SHIFT_FFT_DATA):
487 506 self.shif_fft = True
488 507 else:
489 508 self.shif_fft = False
490 509
491 510 if ((self.processFlags & PROCFLAG.SAVE_CHANNELS_DC) == PROCFLAG.SAVE_CHANNELS_DC):
492 511 self.flag_dc = True
493 512
494 513 nChannels = 0
495 514 nPairs = 0
496 515 pairList = []
497 516
498 517 for i in range( 0, self.totalSpectra*2, 2 ):
499 518 if self.spectraComb[i] == self.spectraComb[i+1]:
500 519 nChannels = nChannels + 1 #par de canales iguales
501 520 else:
502 521 nPairs = nPairs + 1 #par de canales diferentes
503 522 pairList.append( (self.spectraComb[i], self.spectraComb[i+1]) )
504 523
505 524 self.flag_cspc = False
506 525 if nPairs > 0:
507 526 self.flag_cspc = True
508 527
509 528 # except Exception, e:
510 529 # print "Error ProcessingHeader: "
511 530 # return 0
512 531
513 532 return 1
514 533
515 534 def write(self, fp):
516 535
517 536 headerTuple = (self.size,
518 537 self.dtype,
519 538 self.blockSize,
520 539 self.profilesPerBlock,
521 540 self.dataBlocksPerFile,
522 541 self.nWindows,
523 542 self.processFlags,
524 543 self.nCohInt,
525 544 self.nIncohInt,
526 545 self.totalSpectra)
527 546
528 547 header = numpy.array(headerTuple,PROCESSING_STRUCTURE)
529 548 header.tofile(fp)
530 549
531 550 if self.nWindows != 0:
532 551 sampleWindowTuple = (self.firstHeight,self.deltaHeight,self.samplesWin)
533 552 samplingWindow = numpy.array(sampleWindowTuple,SAMPLING_STRUCTURE)
534 553 samplingWindow.tofile(fp)
535 554
536 555
537 556 if self.totalSpectra != 0:
538 557 spectraComb = numpy.array([],numpy.dtype('u1'))
539 558 spectraComb = self.spectraComb
540 559 spectraComb.tofile(fp)
541 560
542 561 # if self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE == PROCFLAG.DEFINE_PROCESS_CODE:
543 562 # nCode = numpy.array([self.nCode], numpy.dtype('u4')) #Probar con un dato que almacene codigo, hasta el momento no se hizo la prueba
544 563 # nCode.tofile(fp)
545 564 #
546 565 # nBaud = numpy.array([self.nBaud], numpy.dtype('u4'))
547 566 # nBaud.tofile(fp)
548 567 #
549 568 # code = self.code.reshape(self.nCode*self.nBaud)
550 569 # code = code.astype(numpy.dtype('<f4'))
551 570 # code.tofile(fp)
552 571
553 572 return 1
554 573
555 574 class RCfunction:
556 575 NONE=0
557 576 FLIP=1
558 577 CODE=2
559 578 SAMPLING=3
560 579 LIN6DIV256=4
561 580 SYNCHRO=5
562 581
563 582 class nCodeType:
564 583 NONE=0
565 584 USERDEFINE=1
566 585 BARKER2=2
567 586 BARKER3=3
568 587 BARKER4=4
569 588 BARKER5=5
570 589 BARKER7=6
571 590 BARKER11=7
572 591 BARKER13=8
573 592 AC128=9
574 593 COMPLEMENTARYCODE2=10
575 594 COMPLEMENTARYCODE4=11
576 595 COMPLEMENTARYCODE8=12
577 596 COMPLEMENTARYCODE16=13
578 597 COMPLEMENTARYCODE32=14
579 598 COMPLEMENTARYCODE64=15
580 599 COMPLEMENTARYCODE128=16
581 600 CODE_BINARY28=17
582 601
583 602 class PROCFLAG:
603
584 604 COHERENT_INTEGRATION = numpy.uint32(0x00000001)
585 605 DECODE_DATA = numpy.uint32(0x00000002)
586 606 SPECTRA_CALC = numpy.uint32(0x00000004)
587 607 INCOHERENT_INTEGRATION = numpy.uint32(0x00000008)
588 608 POST_COHERENT_INTEGRATION = numpy.uint32(0x00000010)
589 609 SHIFT_FFT_DATA = numpy.uint32(0x00000020)
590 610
591 611 DATATYPE_CHAR = numpy.uint32(0x00000040)
592 612 DATATYPE_SHORT = numpy.uint32(0x00000080)
593 613 DATATYPE_LONG = numpy.uint32(0x00000100)
594 614 DATATYPE_INT64 = numpy.uint32(0x00000200)
595 615 DATATYPE_FLOAT = numpy.uint32(0x00000400)
596 616 DATATYPE_DOUBLE = numpy.uint32(0x00000800)
597 617
598 618 DATAARRANGE_CONTIGUOUS_CH = numpy.uint32(0x00001000)
599 619 DATAARRANGE_CONTIGUOUS_H = numpy.uint32(0x00002000)
600 620 DATAARRANGE_CONTIGUOUS_P = numpy.uint32(0x00004000)
601 621
602 622 SAVE_CHANNELS_DC = numpy.uint32(0x00008000)
603 623 DEFLIP_DATA = numpy.uint32(0x00010000)
604 624 DEFINE_PROCESS_CODE = numpy.uint32(0x00020000)
605 625
606 626 ACQ_SYS_NATALIA = numpy.uint32(0x00040000)
607 627 ACQ_SYS_ECHOTEK = numpy.uint32(0x00080000)
608 628 ACQ_SYS_ADRXD = numpy.uint32(0x000C0000)
609 629 ACQ_SYS_JULIA = numpy.uint32(0x00100000)
610 630 ACQ_SYS_XXXXXX = numpy.uint32(0x00140000)
611 631
612 632 EXP_NAME_ESP = numpy.uint32(0x00200000)
613 633 CHANNEL_NAMES_ESP = numpy.uint32(0x00400000)
614 634
615 635 OPERATION_MASK = numpy.uint32(0x0000003F)
616 636 DATATYPE_MASK = numpy.uint32(0x00000FC0)
617 637 DATAARRANGE_MASK = numpy.uint32(0x00007000)
618 ACQ_SYS_MASK = numpy.uint32(0x001C0000) No newline at end of file
638 ACQ_SYS_MASK = numpy.uint32(0x001C0000)
639
640 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
641 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
642 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
643 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
644 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
645 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
646
647 NUMPY_DTYPE_LIST = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
648
649 PROCFLAG_DTYPE_LIST = [PROCFLAG.DATATYPE_CHAR,
650 PROCFLAG.DATATYPE_SHORT,
651 PROCFLAG.DATATYPE_LONG,
652 PROCFLAG.DATATYPE_INT64,
653 PROCFLAG.DATATYPE_FLOAT,
654 PROCFLAG.DATATYPE_DOUBLE]
655
656 DTYPE_WIDTH = [1, 2, 4, 8, 4, 8]
657
658 def get_dtype_index(numpy_dtype):
659
660 index = None
661
662 for i in range(len(NUMPY_DTYPE_LIST)):
663 if numpy_dtype == NUMPY_DTYPE_LIST[i]:
664 index = i
665 break
666
667 return index
668
669 def get_numpy_dtype(index):
670
671 return NUMPY_DTYPE_LIST[index]
672
673 def get_procflag_dtype(index):
674
675 return PROCFLAG_DTYPE_LIST[index]
676
677 def get_dtype_width(index):
678
679 return DTYPE_WIDTH[index] No newline at end of file
@@ -1,1465 +1,1526
1 1 '''
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6 import os
7 7 import sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import time, datetime
13 13 #import h5py
14 14 import traceback
15 15
16 16 try:
17 17 from gevent import sleep
18 18 except:
19 19 from time import sleep
20 20
21 21 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
22 from schainpy.model.data.jroheaderIO import get_dtype_index, get_numpy_dtype, get_procflag_dtype, get_dtype_width
22 23
23 24 LOCALTIME = True
24 25
25 26 def isNumber(cad):
26 27 """
27 28 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
28 29
29 30 Excepciones:
30 31 Si un determinado string no puede ser convertido a numero
31 32 Input:
32 33 str, string al cual se le analiza para determinar si convertible a un numero o no
33 34
34 35 Return:
35 36 True : si el string es uno numerico
36 37 False : no es un string numerico
37 38 """
38 39 try:
39 40 float( cad )
40 41 return True
41 42 except:
42 43 return False
43 44
44 45 def isThisFileinRange(filename, startUTSeconds, endUTSeconds):
45 46 """
46 47 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
47 48
48 49 Inputs:
49 50 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
50 51
51 52 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
52 53 segundos contados desde 01/01/1970.
53 54 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
54 55 segundos contados desde 01/01/1970.
55 56
56 57 Return:
57 58 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
58 59 fecha especificado, de lo contrario retorna False.
59 60
60 61 Excepciones:
61 62 Si el archivo no existe o no puede ser abierto
62 63 Si la cabecera no puede ser leida.
63 64
64 65 """
65 66 basicHeaderObj = BasicHeader(LOCALTIME)
66 67
67 68 try:
68 69 fp = open(filename,'rb')
69 70 except IOError:
70 71 traceback.print_exc()
71 72 raise IOError, "The file %s can't be opened" %(filename)
72 73
73 74 sts = basicHeaderObj.read(fp)
74 75 fp.close()
75 76
76 77 if not(sts):
77 78 print "Skipping the file %s because it has not a valid header" %(filename)
78 79 return 0
79 80
80 81 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
81 82 return 0
82 83
83 84 return 1
84 85
85 86 def isFileinThisTime(filename, startTime, endTime):
86 87 """
87 88 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
88 89
89 90 Inputs:
90 91 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
91 92
92 93 startTime : tiempo inicial del rango seleccionado en formato datetime.time
93 94
94 95 endTime : tiempo final del rango seleccionado en formato datetime.time
95 96
96 97 Return:
97 98 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
98 99 fecha especificado, de lo contrario retorna False.
99 100
100 101 Excepciones:
101 102 Si el archivo no existe o no puede ser abierto
102 103 Si la cabecera no puede ser leida.
103 104
104 105 """
105 106
106 107
107 108 try:
108 109 fp = open(filename,'rb')
109 110 except IOError:
110 111 traceback.print_exc()
111 112 raise IOError, "The file %s can't be opened" %(filename)
112 113
113 114 basicHeaderObj = BasicHeader(LOCALTIME)
114 115 sts = basicHeaderObj.read(fp)
115 116 fp.close()
116 117
117 118 thisDatetime = basicHeaderObj.datatime
118 119 thisTime = thisDatetime.time()
119 120
120 121 if not(sts):
121 122 print "Skipping the file %s because it has not a valid header" %(filename)
122 123 return None
123 124
124 125 if not ((startTime <= thisTime) and (endTime > thisTime)):
125 126 return None
126 127
127 128 return thisDatetime
128 129
129 130 def getFileFromSet(path, ext, set):
130 131 validFilelist = []
131 132 fileList = os.listdir(path)
132 133
133 134 # 0 1234 567 89A BCDE
134 135 # H YYYY DDD SSS .ext
135 136
136 137 for thisFile in fileList:
137 138 try:
138 139 year = int(thisFile[1:5])
139 140 doy = int(thisFile[5:8])
140 141 except:
141 142 continue
142 143
143 144 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
144 145 continue
145 146
146 147 validFilelist.append(thisFile)
147 148
148 149 myfile = fnmatch.filter(validFilelist,'*%4.4d%3.3d%3.3d*'%(year,doy,set))
149 150
150 151 if len(myfile)!= 0:
151 152 return myfile[0]
152 153 else:
153 154 filename = '*%4.4d%3.3d%3.3d%s'%(year,doy,set,ext.lower())
154 155 print 'the filename %s does not exist'%filename
155 156 print '...going to the last file: '
156 157
157 158 if validFilelist:
158 159 validFilelist = sorted( validFilelist, key=str.lower )
159 160 return validFilelist[-1]
160 161
161 162 return None
162 163
163 164 def getlastFileFromPath(path, ext):
164 165 """
165 166 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
166 167 al final de la depuracion devuelve el ultimo file de la lista que quedo.
167 168
168 169 Input:
169 170 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
170 171 ext : extension de los files contenidos en una carpeta
171 172
172 173 Return:
173 174 El ultimo file de una determinada carpeta, no se considera el path.
174 175 """
175 176 validFilelist = []
176 177 fileList = os.listdir(path)
177 178
178 179 # 0 1234 567 89A BCDE
179 180 # H YYYY DDD SSS .ext
180 181
181 182 for thisFile in fileList:
182 183
183 184 year = thisFile[1:5]
184 185 if not isNumber(year):
185 186 continue
186 187
187 188 doy = thisFile[5:8]
188 189 if not isNumber(doy):
189 190 continue
190 191
191 192 year = int(year)
192 193 doy = int(doy)
193 194
194 195 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
195 196 continue
196 197
197 198 validFilelist.append(thisFile)
198 199
199 200 if validFilelist:
200 201 validFilelist = sorted( validFilelist, key=str.lower )
201 202 return validFilelist[-1]
202 203
203 204 return None
204 205
205 206 def checkForRealPath(path, foldercounter, year, doy, set, ext):
206 207 """
207 208 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
208 209 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
209 210 el path exacto de un determinado file.
210 211
211 212 Example :
212 213 nombre correcto del file es .../.../D2009307/P2009307367.ext
213 214
214 215 Entonces la funcion prueba con las siguientes combinaciones
215 216 .../.../y2009307367.ext
216 217 .../.../Y2009307367.ext
217 218 .../.../x2009307/y2009307367.ext
218 219 .../.../x2009307/Y2009307367.ext
219 220 .../.../X2009307/y2009307367.ext
220 221 .../.../X2009307/Y2009307367.ext
221 222 siendo para este caso, la ultima combinacion de letras, identica al file buscado
222 223
223 224 Return:
224 225 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
225 226 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
226 227 para el filename
227 228 """
228 229 fullfilename = None
229 230 find_flag = False
230 231 filename = None
231 232
232 233 prefixDirList = [None,'d','D']
233 234 if ext.lower() == ".r": #voltage
234 235 prefixFileList = ['d','D']
235 236 elif ext.lower() == ".pdata": #spectra
236 237 prefixFileList = ['p','P']
237 238 else:
238 239 return None, filename
239 240
240 241 #barrido por las combinaciones posibles
241 242 for prefixDir in prefixDirList:
242 243 thispath = path
243 244 if prefixDir != None:
244 245 #formo el nombre del directorio xYYYYDDD (x=d o x=D)
245 246 if foldercounter == 0:
246 247 thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy ))
247 248 else:
248 249 thispath = os.path.join(path, "%s%04d%03d_%02d" % ( prefixDir, year, doy , foldercounter))
249 250 for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D"
250 251 filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
251 252 fullfilename = os.path.join( thispath, filename ) #formo el path completo
252 253
253 254 if os.path.exists( fullfilename ): #verifico que exista
254 255 find_flag = True
255 256 break
256 257 if find_flag:
257 258 break
258 259
259 260 if not(find_flag):
260 261 return None, filename
261 262
262 263 return fullfilename, filename
263 264
264 265 def isRadarFolder(folder):
265 266 try:
266 267 year = int(folder[1:5])
267 268 doy = int(folder[5:8])
268 269 except:
269 270 return 0
270 271
271 272 return 1
272 273
273 274 def isRadarFile(file):
274 275 try:
275 276 year = int(file[1:5])
276 277 doy = int(file[5:8])
277 278 set = int(file[8:11])
278 279 except:
279 280 return 0
280 281
281 282 return 1
282 283
283 284 def getDateFromRadarFile(file):
284 285 try:
285 286 year = int(file[1:5])
286 287 doy = int(file[5:8])
287 288 set = int(file[8:11])
288 289 except:
289 290 return None
290 291
291 292 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy-1)
292 293 return thisDate
293 294
294 295 class JRODataIO:
295 296
296 297 c = 3E8
297 298
298 299 isConfig = False
299 300
300 301 basicHeaderObj = None
301 302
302 303 systemHeaderObj = None
303 304
304 305 radarControllerHeaderObj = None
305 306
306 307 processingHeaderObj = None
307 308
308 309 online = 0
309 310
310 311 dtype = None
311 312
312 313 pathList = []
313 314
314 315 filenameList = []
315 316
316 317 filename = None
317 318
318 319 ext = None
319 320
320 321 flagIsNewFile = 1
321 322
322 323 flagDiscontinuousBlock = 0
323 324
324 325 flagIsNewBlock = 0
325 326
326 327 fp = None
327 328
328 329 firstHeaderSize = 0
329 330
330 331 basicHeaderSize = 24
331 332
332 333 versionFile = 1103
333 334
334 335 fileSize = None
335 336
336 337 # ippSeconds = None
337 338
338 339 fileSizeByHeader = None
339 340
340 341 fileIndex = None
341 342
342 343 profileIndex = None
343 344
344 345 blockIndex = None
345 346
346 347 nTotalBlocks = None
347 348
348 349 maxTimeStep = 30
349 350
350 351 lastUTTime = None
351 352
352 353 datablock = None
353 354
354 355 dataOut = None
355 356
356 357 blocksize = None
357 358
358 359 getByBlock = False
359 360
360 361 def __init__(self):
361 362
362 363 raise ValueError, "Not implemented"
363 364
364 365 def run(self):
365 366
366 367 raise ValueError, "Not implemented"
367 368
369 def getDtypeWidth(self):
370
371 dtype_index = get_dtype_index(self.dtype)
372 dtype_width = get_dtype_width(dtype_index)
373
374 return dtype_width
375
368 376 class JRODataReader(JRODataIO):
369 377
370 378 nReadBlocks = 0
371 379
372 380 delay = 10 #number of seconds waiting a new file
373 381
374 382 nTries = 3 #quantity tries
375 383
376 384 nFiles = 3 #number of files for searching
377 385
378 386 path = None
379 387
380 388 foldercounter = 0
381 389
382 390 flagNoMoreFiles = 0
383 391
384 392 datetimeList = []
385 393
386 394 __isFirstTimeOnline = 1
387 395
388 396 __printInfo = True
389 397
390 398 profileIndex = None
391 399
392 400 nTxs = 1
393 401
394 402 txIndex = None
395 403
396 404 def __init__(self):
397 405
398 406 """
399 407
400 408 """
401 409
402 410 # raise NotImplementedError, "This method has not been implemented"
403 411
404 412
405 413 def createObjByDefault(self):
406 414 """
407 415
408 416 """
409 417 raise NotImplementedError, "This method has not been implemented"
410 418
411 419 def getBlockDimension(self):
412 420
413 421 raise NotImplementedError, "No implemented"
414 422
415 423 def __searchFilesOffLine(self,
416 424 path,
417 425 startDate=None,
418 426 endDate=None,
419 427 startTime=datetime.time(0,0,0),
420 428 endTime=datetime.time(23,59,59),
421 429 set=None,
422 430 expLabel='',
423 431 ext='.r',
424 432 walk=True):
425 433
426 434 self.filenameList = []
427 435 self.datetimeList = []
428 436
429 437 pathList = []
430 438
431 439 if not walk:
432 440 #pathList.append(path)
433 441 multi_path = path.split(',')
434 442 for single_path in multi_path:
435 443
436 444 if not os.path.isdir(single_path):
437 445 continue
438 446
439 447 pathList.append(single_path)
440 448
441 449 else:
442 450 #dirList = []
443 451 multi_path = path.split(',')
444 452 for single_path in multi_path:
445 453
446 454 if not os.path.isdir(single_path):
447 455 continue
448 456
449 457 dirList = []
450 458 for thisPath in os.listdir(single_path):
451 459 if not os.path.isdir(os.path.join(single_path,thisPath)):
452 460 continue
453 461 if not isRadarFolder(thisPath):
454 462 continue
455 463
456 464 dirList.append(thisPath)
457 465
458 466 if not(dirList):
459 467 return None, None
460 468
461 469 if startDate and endDate:
462 470 thisDate = startDate
463 471
464 472 while(thisDate <= endDate):
465 473 year = thisDate.timetuple().tm_year
466 474 doy = thisDate.timetuple().tm_yday
467 475
468 476 matchlist = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy) + '*')
469 477 if len(matchlist) == 0:
470 478 thisDate += datetime.timedelta(1)
471 479 continue
472 480 for match in matchlist:
473 481 pathList.append(os.path.join(single_path,match,expLabel))
474 482
475 483 thisDate += datetime.timedelta(1)
476 484 else:
477 485 for thiDir in dirList:
478 486 pathList.append(os.path.join(single_path,thiDir,expLabel))
479 487
480 488 if pathList == []:
481 489 print "Any folder was found for the date range: %s-%s" %(startDate, endDate)
482 490 return None, None
483 491
484 492 print "%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate)
485 493
486 494 filenameList = []
487 495 datetimeList = []
488 496 pathDict = {}
489 497 filenameList_to_sort = []
490 498
491 499 for i in range(len(pathList)):
492 500
493 501 thisPath = pathList[i]
494 502
495 503 fileList = glob.glob1(thisPath, "*%s" %ext)
496 504 if len(fileList) < 1:
497 505 continue
498 506 fileList.sort()
499 507 pathDict.setdefault(fileList[0])
500 508 pathDict[fileList[0]] = i
501 509 filenameList_to_sort.append(fileList[0])
502 510
503 511 filenameList_to_sort.sort()
504 512
505 513 for file in filenameList_to_sort:
506 514 thisPath = pathList[pathDict[file]]
507 515
508 516 fileList = glob.glob1(thisPath, "*%s" %ext)
509 517 fileList.sort()
510 518
511 519 for file in fileList:
512 520
513 521 filename = os.path.join(thisPath,file)
514 522 thisDatetime = isFileinThisTime(filename, startTime, endTime)
515 523
516 524 if not(thisDatetime):
517 525 continue
518 526
519 527 filenameList.append(filename)
520 528 datetimeList.append(thisDatetime)
521 529
522 530 if not(filenameList):
523 531 print "Any file was found for the time range %s - %s" %(startTime, endTime)
524 532 return None, None
525 533
526 534 print "%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime)
527 535 print
528 536
529 537 for i in range(len(filenameList)):
530 538 print "%s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
531 539
532 540 self.filenameList = filenameList
533 541 self.datetimeList = datetimeList
534 542
535 543 return pathList, filenameList
536 544
537 545 def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True, set=None):
538 546
539 547 """
540 548 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
541 549 devuelve el archivo encontrado ademas de otros datos.
542 550
543 551 Input:
544 552 path : carpeta donde estan contenidos los files que contiene data
545 553
546 554 expLabel : Nombre del subexperimento (subfolder)
547 555
548 556 ext : extension de los files
549 557
550 558 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
551 559
552 560 Return:
553 561 directory : eL directorio donde esta el file encontrado
554 562 filename : el ultimo file de una determinada carpeta
555 563 year : el anho
556 564 doy : el numero de dia del anho
557 565 set : el set del archivo
558 566
559 567
560 568 """
561 569 dirList = []
562 570
563 571 if not walk:
564 572 fullpath = path
565 573 foldercounter = 0
566 574 else:
567 575 #Filtra solo los directorios
568 576 for thisPath in os.listdir(path):
569 577 if not os.path.isdir(os.path.join(path,thisPath)):
570 578 continue
571 579 if not isRadarFolder(thisPath):
572 580 continue
573 581
574 582 dirList.append(thisPath)
575 583
576 584 if not(dirList):
577 585 return None, None, None, None, None, None
578 586
579 587 dirList = sorted( dirList, key=str.lower )
580 588
581 589 doypath = dirList[-1]
582 590 foldercounter = int(doypath.split('_')[1]) if len(doypath.split('_'))>1 else 0
583 591 fullpath = os.path.join(path, doypath, expLabel)
584 592
585 593
586 594 print "%s folder was found: " %(fullpath )
587 595
588 596 if set == None:
589 597 filename = getlastFileFromPath(fullpath, ext)
590 598 else:
591 599 filename = getFileFromSet(fullpath, ext, set)
592 600
593 601 if not(filename):
594 602 return None, None, None, None, None, None
595 603
596 604 print "%s file was found" %(filename)
597 605
598 606 if not(self.__verifyFile(os.path.join(fullpath, filename))):
599 607 return None, None, None, None, None, None
600 608
601 609 year = int( filename[1:5] )
602 610 doy = int( filename[5:8] )
603 611 set = int( filename[8:11] )
604 612
605 613 return fullpath, foldercounter, filename, year, doy, set
606 614
607 615 def __setNextFileOffline(self):
608 616
609 617 idFile = self.fileIndex
610 618
611 619 while (True):
612 620 idFile += 1
613 621 if not(idFile < len(self.filenameList)):
614 622 self.flagNoMoreFiles = 1
615 623 # print "[Reading] No more Files"
616 624 return 0
617 625
618 626 filename = self.filenameList[idFile]
619 627
620 628 if not(self.__verifyFile(filename)):
621 629 continue
622 630
623 631 fileSize = os.path.getsize(filename)
624 632 fp = open(filename,'rb')
625 633 break
626 634
627 635 self.flagIsNewFile = 1
628 636 self.fileIndex = idFile
629 637 self.filename = filename
630 638 self.fileSize = fileSize
631 639 self.fp = fp
632 640
633 641 # print "[Reading] Setting the file: %s"%self.filename
634 642
635 643 return 1
636 644
637 645 def __setNextFileOnline(self):
638 646 """
639 647 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
640 648 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
641 649 siguientes.
642 650
643 651 Affected:
644 652 self.flagIsNewFile
645 653 self.filename
646 654 self.fileSize
647 655 self.fp
648 656 self.set
649 657 self.flagNoMoreFiles
650 658
651 659 Return:
652 660 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
653 661 1 : si el file fue abierto con exito y esta listo a ser leido
654 662
655 663 Excepciones:
656 664 Si un determinado file no puede ser abierto
657 665 """
658 666 nFiles = 0
659 667 fileOk_flag = False
660 668 firstTime_flag = True
661 669
662 670 self.set += 1
663 671
664 672 if self.set > 999:
665 673 self.set = 0
666 674 self.foldercounter += 1
667 675
668 676 #busca el 1er file disponible
669 677 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
670 678 if fullfilename:
671 679 if self.__verifyFile(fullfilename, False):
672 680 fileOk_flag = True
673 681
674 682 #si no encuentra un file entonces espera y vuelve a buscar
675 683 if not(fileOk_flag):
676 684 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
677 685
678 686 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
679 687 tries = self.nTries
680 688 else:
681 689 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
682 690
683 691 for nTries in range( tries ):
684 692 if firstTime_flag:
685 693 print "\t[Reading] Waiting %0.2f sec for the file \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
686 694 sleep( self.delay )
687 695 else:
688 696 print "\t[Reading] Searching the next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
689 697
690 698 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
691 699 if fullfilename:
692 700 if self.__verifyFile(fullfilename):
693 701 fileOk_flag = True
694 702 break
695 703
696 704 if fileOk_flag:
697 705 break
698 706
699 707 firstTime_flag = False
700 708
701 709 print "\t[Reading] Skipping the file \"%s\" due to this file doesn't exist" % filename
702 710 self.set += 1
703 711
704 712 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
705 713 self.set = 0
706 714 self.doy += 1
707 715 self.foldercounter = 0
708 716
709 717 if fileOk_flag:
710 718 self.fileSize = os.path.getsize( fullfilename )
711 719 self.filename = fullfilename
712 720 self.flagIsNewFile = 1
713 721 if self.fp != None: self.fp.close()
714 722 self.fp = open(fullfilename, 'rb')
715 723 self.flagNoMoreFiles = 0
716 724 # print '[Reading] Setting the file: %s' % fullfilename
717 725 else:
718 726 self.fileSize = 0
719 727 self.filename = None
720 728 self.flagIsNewFile = 0
721 729 self.fp = None
722 730 self.flagNoMoreFiles = 1
723 731 # print '[Reading] No more files to read'
724 732
725 733 return fileOk_flag
726 734
727 735 def setNextFile(self):
728 736 if self.fp != None:
729 737 self.fp.close()
730 738
731 739 if self.online:
732 740 newFile = self.__setNextFileOnline()
733 741 else:
734 742 newFile = self.__setNextFileOffline()
735 743
736 744 if not(newFile):
737 745 print '[Reading] No more files to read'
738 746 return 0
739 747
740 748 print '[Reading] Setting the file: %s' % self.filename
741 749
742 750 self.__readFirstHeader()
743 751 self.nReadBlocks = 0
744 752 return 1
745 753
746 754 def __waitNewBlock(self):
747 755 """
748 756 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
749 757
750 758 Si el modo de lectura es OffLine siempre retorn 0
751 759 """
752 760 if not self.online:
753 761 return 0
754 762
755 763 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
756 764 return 0
757 765
758 766 currentPointer = self.fp.tell()
759 767
760 768 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
761 769
762 770 for nTries in range( self.nTries ):
763 771
764 772 self.fp.close()
765 773 self.fp = open( self.filename, 'rb' )
766 774 self.fp.seek( currentPointer )
767 775
768 776 self.fileSize = os.path.getsize( self.filename )
769 777 currentSize = self.fileSize - currentPointer
770 778
771 779 if ( currentSize >= neededSize ):
772 780 self.basicHeaderObj.read(self.fp)
773 781 return 1
774 782
775 783 if self.fileSize == self.fileSizeByHeader:
776 784 # self.flagEoF = True
777 785 return 0
778 786
779 787 print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
780 788 sleep( self.delay )
781 789
782 790
783 791 return 0
784 792
785 793 def waitDataBlock(self,pointer_location):
786 794
787 795 currentPointer = pointer_location
788 796
789 797 neededSize = self.processingHeaderObj.blockSize #+ self.basicHeaderSize
790 798
791 799 for nTries in range( self.nTries ):
792 800 self.fp.close()
793 801 self.fp = open( self.filename, 'rb' )
794 802 self.fp.seek( currentPointer )
795 803
796 804 self.fileSize = os.path.getsize( self.filename )
797 805 currentSize = self.fileSize - currentPointer
798 806
799 807 if ( currentSize >= neededSize ):
800 808 return 1
801 809
802 810 print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
803 811 sleep( self.delay )
804 812
805 813 return 0
806 814
807 815 def __jumpToLastBlock(self):
808 816
809 817 if not(self.__isFirstTimeOnline):
810 818 return
811 819
812 820 csize = self.fileSize - self.fp.tell()
813 821 blocksize = self.processingHeaderObj.blockSize
814 822
815 823 #salta el primer bloque de datos
816 824 if csize > self.processingHeaderObj.blockSize:
817 825 self.fp.seek(self.fp.tell() + blocksize)
818 826 else:
819 827 return
820 828
821 829 csize = self.fileSize - self.fp.tell()
822 830 neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
823 831 while True:
824 832
825 833 if self.fp.tell()<self.fileSize:
826 834 self.fp.seek(self.fp.tell() + neededsize)
827 835 else:
828 836 self.fp.seek(self.fp.tell() - neededsize)
829 837 break
830 838
831 839 # csize = self.fileSize - self.fp.tell()
832 840 # neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
833 841 # factor = int(csize/neededsize)
834 842 # if factor > 0:
835 843 # self.fp.seek(self.fp.tell() + factor*neededsize)
836 844
837 845 self.flagIsNewFile = 0
838 846 self.__isFirstTimeOnline = 0
839 847
840 848 def __setNewBlock(self):
841 849
842 850 if self.fp == None:
843 851 return 0
844 852
845 853 if self.online:
846 854 self.__jumpToLastBlock()
847 855
848 856 if self.flagIsNewFile:
849 857 return 1
850 858
851 859 self.lastUTTime = self.basicHeaderObj.utc
852 860 currentSize = self.fileSize - self.fp.tell()
853 861 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
854 862
855 863 if (currentSize >= neededSize):
856 864 self.basicHeaderObj.read(self.fp)
857 865 return 1
858 866
859 867 if self.__waitNewBlock():
860 868 return 1
861 869
862 870 if not(self.setNextFile()):
863 871 return 0
864 872
865 873 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
866 874
867 875 self.flagDiscontinuousBlock = 0
868 876
869 877 if deltaTime > self.maxTimeStep:
870 878 self.flagDiscontinuousBlock = 1
871 879
872 880 return 1
873 881
874 882 def readNextBlock(self):
883
875 884 if not(self.__setNewBlock()):
876 885 return 0
877 886
878 887 if not(self.readBlock()):
879 888 return 0
880
889
890 print "[Reading] Block No. %d/%d -> %s" %(self.basicHeaderObj.dataBlock+1,
891 self.processingHeaderObj.dataBlocksPerFile,
892 self.dataOut.datatime.ctime())
881 893 return 1
882 894
883 895 def __readFirstHeader(self):
884 896
885 897 self.basicHeaderObj.read(self.fp)
886 898 self.systemHeaderObj.read(self.fp)
887 899 self.radarControllerHeaderObj.read(self.fp)
888 900 self.processingHeaderObj.read(self.fp)
889 901
890 902 self.firstHeaderSize = self.basicHeaderObj.size
891 903
892 904 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
893 905 if datatype == 0:
894 906 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
895 907 elif datatype == 1:
896 908 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
897 909 elif datatype == 2:
898 910 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
899 911 elif datatype == 3:
900 912 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
901 913 elif datatype == 4:
902 914 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
903 915 elif datatype == 5:
904 916 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
905 917 else:
906 918 raise ValueError, 'Data type was not defined'
907 919
908 920 self.dtype = datatype_str
909 921 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
910 922 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
911 923 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
912 924 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
913 925 self.getBlockDimension()
914 926
915 927 def __verifyFile(self, filename, msgFlag=True):
916 928 msg = None
917 929 try:
918 930 fp = open(filename, 'rb')
919 931 currentPosition = fp.tell()
920 932 except IOError:
921 933 traceback.print_exc()
922 934 if msgFlag:
923 935 print "[Reading] The file %s can't be opened" % (filename)
924 936 return False
925 937
926 938 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
927 939
928 940 if neededSize == 0:
929 941 basicHeaderObj = BasicHeader(LOCALTIME)
930 942 systemHeaderObj = SystemHeader()
931 943 radarControllerHeaderObj = RadarControllerHeader()
932 944 processingHeaderObj = ProcessingHeader()
933 945
934 946 try:
935 947 if not( basicHeaderObj.read(fp) ): raise IOError
936 948 if not( systemHeaderObj.read(fp) ): raise IOError
937 949 if not( radarControllerHeaderObj.read(fp) ): raise IOError
938 950 if not( processingHeaderObj.read(fp) ): raise IOError
939 951 # data_type = int(numpy.log2((processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
940 952
941 953 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
942 954
943 955 except IOError:
944 956 traceback.print_exc()
945 957 if msgFlag:
946 958 print "[Reading] The file %s is empty or it hasn't enough data" % filename
947 959
948 960 fp.close()
949 961 return False
950 962 else:
951 963 msg = "[Reading] Skipping the file %s due to it hasn't enough data" %filename
952 964
953 965 fp.close()
954 966 fileSize = os.path.getsize(filename)
955 967 currentSize = fileSize - currentPosition
956 968 if currentSize < neededSize:
957 969 if msgFlag and (msg != None):
958 970 print msg #print"\tSkipping the file %s due to it hasn't enough data" %filename
959 971 return False
960 972
961 973 return True
962 974
963 975 def findDatafiles(self, path, startDate=None, endDate=None, expLabel='', ext='.r', walk=True):
964 976
965 977 dateList = []
966 978 pathList = []
967 979
968 980 if not walk:
969 981 #pathList.append(path)
970 982 multi_path = path.split(',')
971 983 for single_path in multi_path:
972 984
973 985 if not os.path.isdir(single_path):
974 986 continue
975 987
976 988 ok = False
977 989 fileList = glob.glob1(single_path, "*"+ext)
978 990
979 991 for thisFile in fileList:
980 992
981 993 if not os.path.isfile(os.path.join(single_path, thisFile)):
982 994 continue
983 995
984 996 if not isRadarFile(thisFile):
985 997 continue
986 998
987 999 ok = True
988 1000 thisDate = getDateFromRadarFile(thisFile)
989 1001
990 1002 if thisDate not in dateList:
991 1003 dateList.append(thisDate)
992 1004
993 1005 if ok:
994 1006 pathList.append(single_path)
995 1007
996 1008 return dateList
997 1009
998 1010 multi_path = path.split(',')
999 1011 for single_path in multi_path:
1000 1012
1001 1013 if not os.path.isdir(single_path):
1002 1014 continue
1003 1015
1004 1016 dirList = []
1005 1017
1006 1018 for thisPath in os.listdir(single_path):
1007 1019
1008 1020 if not os.path.isdir(os.path.join(single_path,thisPath)):
1009 1021 continue
1010 1022
1011 1023 if not isRadarFolder(thisPath):
1012 1024 continue
1013 1025
1014 1026 dirList.append(thisPath)
1015 1027
1016 1028 if not dirList:
1017 1029 return dateList
1018 1030
1019 1031 if startDate and endDate:
1020 1032 thisDate = startDate
1021 1033
1022 1034 while(thisDate <= endDate):
1023 1035 year = thisDate.timetuple().tm_year
1024 1036 doy = thisDate.timetuple().tm_yday
1025 1037
1026 1038 matchlist = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy) + '*')
1027 1039 if len(matchlist) == 0:
1028 1040 thisDate += datetime.timedelta(1)
1029 1041 continue
1030 1042
1031 1043 for match in matchlist:
1032 1044 pathList.append(os.path.join(single_path,match,expLabel))
1033 1045 dateList.append(thisDate)
1034 1046
1035 1047 thisDate += datetime.timedelta(1)
1036 1048 else:
1037 1049 for thisDir in dirList:
1038 1050 year = int(thisDir[1:5])
1039 1051 doy = int(thisDir[5:8])
1040 1052 thisDate = datetime.date(year,1,1) + datetime.timedelta(doy-1)
1041 1053
1042 1054 pathList.append(os.path.join(single_path,thisDir,expLabel))
1043 1055 dateList.append(thisDate)
1044 1056
1045 1057 return dateList
1046 1058
1047 1059
1048 1060 def setup(self,
1049 1061 path=None,
1050 1062 startDate=None,
1051 1063 endDate=None,
1052 1064 startTime=datetime.time(0,0,0),
1053 1065 endTime=datetime.time(23,59,59),
1054 1066 set=None,
1055 1067 expLabel = "",
1056 1068 ext = None,
1057 1069 online = False,
1058 1070 delay = 60,
1059 1071 walk = True,
1060 1072 getblock = False,
1061 1073 nTxs = 1):
1062 1074
1063 1075 if path == None:
1064 1076 raise ValueError, "[Reading] The path is not valid"
1065 1077
1066 1078 if ext == None:
1067 1079 ext = self.ext
1068 1080
1069 1081 if online:
1070 1082 print "[Reading] Searching files in online mode..."
1071 1083
1072 1084 for nTries in range( self.nTries ):
1073 1085 fullpath, foldercounter, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk, set=set)
1074 1086
1075 1087 if fullpath:
1076 1088 break
1077 1089
1078 1090 print '[Reading] Waiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
1079 1091 sleep( self.delay )
1080 1092
1081 1093 if not(fullpath):
1082 1094 print "[Reading] There 'isn't any valid file in %s" % path
1083 1095 return None
1084 1096
1085 1097 self.year = year
1086 1098 self.doy = doy
1087 1099 self.set = set - 1
1088 1100 self.path = path
1089 1101 self.foldercounter = foldercounter
1090 1102 last_set = None
1091 1103
1092 1104 else:
1093 1105 print "[Reading] Searching files in offline mode ..."
1094 1106 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
1095 1107 startTime=startTime, endTime=endTime,
1096 1108 set=set, expLabel=expLabel, ext=ext,
1097 1109 walk=walk)
1098 1110
1099 1111 if not(pathList):
1100 1112 print "[Reading] No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
1101 1113 datetime.datetime.combine(startDate,startTime).ctime(),
1102 1114 datetime.datetime.combine(endDate,endTime).ctime())
1103 1115
1104 1116 sys.exit(-1)
1105 1117
1106 1118
1107 1119 self.fileIndex = -1
1108 1120 self.pathList = pathList
1109 1121 self.filenameList = filenameList
1110 1122 file_name = os.path.basename(filenameList[-1])
1111 1123 basename, ext = os.path.splitext(file_name)
1112 1124 last_set = int(basename[-3:])
1113 1125
1114 1126 self.online = online
1115 1127 self.delay = delay
1116 1128 ext = ext.lower()
1117 1129 self.ext = ext
1118 1130 self.getByBlock = getblock
1119 1131 self.nTxs = int(nTxs)
1120 1132
1121 1133 if not(self.setNextFile()):
1122 1134 if (startDate!=None) and (endDate!=None):
1123 1135 print "[Reading] No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
1124 1136 elif startDate != None:
1125 1137 print "[Reading] No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
1126 1138 else:
1127 1139 print "[Reading] No files"
1128 1140
1129 1141 sys.exit(-1)
1130 1142
1131 # self.updateDataHeader()
1143 self.getBasicHeader()
1144
1132 1145 if last_set != None:
1133 1146 self.dataOut.last_block = last_set * self.processingHeaderObj.dataBlocksPerFile + self.basicHeaderObj.dataBlock
1134 1147 return
1135 1148
1136 1149 def getBasicHeader(self):
1137 1150
1138 1151 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds
1139 1152
1140 1153 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
1141 1154
1142 1155 self.dataOut.timeZone = self.basicHeaderObj.timeZone
1143 1156
1144 1157 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
1145 1158
1146 1159 self.dataOut.errorCount = self.basicHeaderObj.errorCount
1147 1160
1148 1161 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
1149 1162
1150 1163 self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds/self.nTxs
1151 1164
1152 1165 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock*self.nTxs
1153 1166
1154 1167
1155 1168 def getFirstHeader(self):
1156 1169
1157 1170 raise ValueError, "This method has not been implemented"
1158 1171
1159 1172 def getData(self):
1160 1173
1161 1174 raise ValueError, "This method has not been implemented"
1162 1175
1163 1176 def hasNotDataInBuffer(self):
1164 1177
1165 1178 raise ValueError, "This method has not been implemented"
1166 1179
1167 1180 def readBlock(self):
1168 1181
1169 1182 raise ValueError, "This method has not been implemented"
1170 1183
1171 1184 def isEndProcess(self):
1172 1185
1173 1186 return self.flagNoMoreFiles
1174 1187
1175 1188 def printReadBlocks(self):
1176 1189
1177 1190 print "[Reading] Number of read blocks per file %04d" %self.nReadBlocks
1178 1191
1179 1192 def printTotalBlocks(self):
1180 1193
1181 1194 print "[Reading] Number of read blocks %04d" %self.nTotalBlocks
1182 1195
1183 1196 def printNumberOfBlock(self):
1184 1197
1185 1198 if self.flagIsNewBlock:
1186 print "[Reading] Block No. %04d, Total blocks %04d -> %s" %(self.basicHeaderObj.dataBlock, self.nTotalBlocks, self.dataOut.datatime.ctime())
1199 print "[Reading] Block No. %d/%d -> %s" %(self.basicHeaderObj.dataBlock+1,
1200 self.processingHeaderObj.dataBlocksPerFile,
1201 self.dataOut.datatime.ctime())
1202
1187 1203 self.dataOut.blocknow = self.basicHeaderObj.dataBlock
1188 1204
1189 1205 def printInfo(self):
1190 1206
1191 1207 if self.__printInfo == False:
1192 1208 return
1193 1209
1194 1210 self.basicHeaderObj.printInfo()
1195 1211 self.systemHeaderObj.printInfo()
1196 1212 self.radarControllerHeaderObj.printInfo()
1197 1213 self.processingHeaderObj.printInfo()
1198 1214
1199 1215 self.__printInfo = False
1200 1216
1201 1217
1202 1218 def run(self, **kwargs):
1203 1219
1204 1220 if not(self.isConfig):
1205 1221
1206 1222 # self.dataOut = dataOut
1207 1223 self.setup(**kwargs)
1208 1224 self.isConfig = True
1209 1225
1210 1226 self.getData()
1211 1227
1212 1228 class JRODataWriter(JRODataIO):
1213 1229
1214 1230 """
1215 1231 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
1216 1232 de los datos siempre se realiza por bloques.
1217 1233 """
1218 1234
1219 1235 blockIndex = 0
1220 1236
1221 1237 path = None
1222 1238
1223 1239 setFile = None
1224 1240
1225 1241 profilesPerBlock = None
1226 1242
1227 1243 blocksPerFile = None
1228 1244
1229 1245 nWriteBlocks = 0
1230 1246
1231 1247 def __init__(self, dataOut=None):
1232 1248 raise ValueError, "Not implemented"
1233 1249
1234 1250
1235 1251 def hasAllDataInBuffer(self):
1236 1252 raise ValueError, "Not implemented"
1237 1253
1238 1254
1239 1255 def setBlockDimension(self):
1240 1256 raise ValueError, "Not implemented"
1241 1257
1242 1258
1243 1259 def writeBlock(self):
1244 1260 raise ValueError, "No implemented"
1245 1261
1246 1262
1247 1263 def putData(self):
1248 1264 raise ValueError, "No implemented"
1249 1265
1250 1266
1267 def getProcessFlags(self):
1268
1269 processFlags = 0
1270
1271 dtype_index = get_dtype_index(self.dtype)
1272 procflag_dtype = get_procflag_dtype(dtype_index)
1273
1274 processFlags += procflag_dtype
1275
1276 if self.dataOut.flagDecodeData:
1277 processFlags += PROCFLAG.DECODE_DATA
1278
1279 if self.dataOut.flagDeflipData:
1280 processFlags += PROCFLAG.DEFLIP_DATA
1281
1282 if self.dataOut.code is not None:
1283 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1284
1285 if self.dataOut.nCohInt > 1:
1286 processFlags += PROCFLAG.COHERENT_INTEGRATION
1287
1288 if self.dataOut.type == "Spectra":
1289 if self.dataOut.nIncohInt > 1:
1290 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
1291
1292 if self.dataOut.data_dc is not None:
1293 processFlags += PROCFLAG.SAVE_CHANNELS_DC
1294
1295 return processFlags
1296
1251 1297 def setBasicHeader(self):
1252 1298
1253 1299 self.basicHeaderObj.size = self.basicHeaderSize #bytes
1254 1300 self.basicHeaderObj.version = self.versionFile
1255 1301 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1256 1302
1257 1303 utc = numpy.floor(self.dataOut.utctime)
1258 1304 milisecond = (self.dataOut.utctime - utc)* 1000.0
1259 1305
1260 1306 self.basicHeaderObj.utc = utc
1261 1307 self.basicHeaderObj.miliSecond = milisecond
1262 1308 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1263 1309 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1264 1310 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1265 1311
1266 1312 def setFirstHeader(self):
1267 1313 """
1268 1314 Obtiene una copia del First Header
1269 1315
1270 1316 Affected:
1271 1317
1272 1318 self.basicHeaderObj
1273 1319 self.systemHeaderObj
1274 1320 self.radarControllerHeaderObj
1275 1321 self.processingHeaderObj self.
1276 1322
1277 1323 Return:
1278 1324 None
1279 1325 """
1280 1326
1281 1327 raise ValueError, "No implemented"
1282 1328
1283 1329 def __writeFirstHeader(self):
1284 1330 """
1285 1331 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1286 1332
1287 1333 Affected:
1288 1334 __dataType
1289 1335
1290 1336 Return:
1291 1337 None
1292 1338 """
1293 1339
1294 1340 # CALCULAR PARAMETROS
1295 1341
1296 1342 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1297 1343 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1298 1344
1299 1345 self.basicHeaderObj.write(self.fp)
1300 1346 self.systemHeaderObj.write(self.fp)
1301 1347 self.radarControllerHeaderObj.write(self.fp)
1302 1348 self.processingHeaderObj.write(self.fp)
1303 1349
1304 self.dtype = self.dataOut.dtype
1350
1305 1351
1306 1352 def __setNewBlock(self):
1307 1353 """
1308 1354 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1309 1355
1310 1356 Return:
1311 1357 0 : si no pudo escribir nada
1312 1358 1 : Si escribio el Basic el First Header
1313 1359 """
1314 1360 if self.fp == None:
1315 1361 self.setNextFile()
1316 1362
1317 1363 if self.flagIsNewFile:
1318 1364 return 1
1319 1365
1320 1366 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1321 1367 self.basicHeaderObj.write(self.fp)
1322 1368 return 1
1323 1369
1324 1370 if not( self.setNextFile() ):
1325 1371 return 0
1326 1372
1327 1373 return 1
1328 1374
1329 1375
1330 1376 def writeNextBlock(self):
1331 1377 """
1332 1378 Selecciona el bloque siguiente de datos y los escribe en un file
1333 1379
1334 1380 Return:
1335 1381 0 : Si no hizo pudo escribir el bloque de datos
1336 1382 1 : Si no pudo escribir el bloque de datos
1337 1383 """
1338 1384 if not( self.__setNewBlock() ):
1339 1385 return 0
1340 1386
1341 1387 self.writeBlock()
1342
1388
1389 print "[Writing] Block No. %d/%d" %(self.blockIndex, self.processingHeaderObj.dataBlocksPerFile)
1390
1343 1391 return 1
1344 1392
1345 1393 def setNextFile(self):
1346 1394 """
1347 1395 Determina el siguiente file que sera escrito
1348 1396
1349 1397 Affected:
1350 1398 self.filename
1351 1399 self.subfolder
1352 1400 self.fp
1353 1401 self.setFile
1354 1402 self.flagIsNewFile
1355 1403
1356 1404 Return:
1357 1405 0 : Si el archivo no puede ser escrito
1358 1406 1 : Si el archivo esta listo para ser escrito
1359 1407 """
1360 1408 ext = self.ext
1361 1409 path = self.path
1362 1410
1363 1411 if self.fp != None:
1364 1412 self.fp.close()
1365 1413
1366 1414 timeTuple = time.localtime( self.dataOut.utctime)
1367 1415 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1368 1416
1369 1417 fullpath = os.path.join( path, subfolder )
1370 1418 if not( os.path.exists(fullpath) ):
1371 1419 os.mkdir(fullpath)
1372 1420 self.setFile = -1 #inicializo mi contador de seteo
1373 1421 else:
1374 1422 filesList = os.listdir( fullpath )
1375 1423 if len( filesList ) > 0:
1376 1424 filesList = sorted( filesList, key=str.lower )
1377 1425 filen = filesList[-1]
1378 1426 # el filename debera tener el siguiente formato
1379 1427 # 0 1234 567 89A BCDE (hex)
1380 1428 # x YYYY DDD SSS .ext
1381 1429 if isNumber( filen[8:11] ):
1382 1430 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1383 1431 else:
1384 1432 self.setFile = -1
1385 1433 else:
1386 1434 self.setFile = -1 #inicializo mi contador de seteo
1387 1435
1388 1436 setFile = self.setFile
1389 1437 setFile += 1
1390 1438
1391 1439 filen = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
1392 1440 timeTuple.tm_year,
1393 1441 timeTuple.tm_yday,
1394 1442 setFile,
1395 1443 ext )
1396 1444
1397 1445 filename = os.path.join( path, subfolder, filen )
1398 1446
1399 1447 fp = open( filename,'wb' )
1400 1448
1401 1449 self.blockIndex = 0
1402 1450
1403 1451 #guardando atributos
1404 1452 self.filename = filename
1405 1453 self.subfolder = subfolder
1406 1454 self.fp = fp
1407 1455 self.setFile = setFile
1408 1456 self.flagIsNewFile = 1
1409 1457
1410 1458 self.setFirstHeader()
1411 1459
1412 print '[Writing] file: %s'%self.filename
1460 print '[Writing] Opening file: %s'%self.filename
1413 1461
1414 1462 self.__writeFirstHeader()
1415 1463
1416 1464 return 1
1417 1465
1418 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=0, ext=None):
1466 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=0, ext=None, datatype=2):
1419 1467 """
1420 1468 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1421 1469
1422 1470 Inputs:
1423 path : el path destino en el cual se escribiran los files a crear
1424 format : formato en el cual sera salvado un file
1425 set : el setebo del file
1471 path : directory where data will be saved
1472 profilesPerBlock : number of profiles per block
1473 set : file set
1474 datatype : An integer number that defines data type:
1475 0 : int8 (1 byte)
1476 1 : int16 (2 bytes)
1477 2 : int32 (4 bytes)
1478 3 : int64 (8 bytes)
1479 4 : float (4 bytes)
1480 5 : double (8 bytes)
1426 1481
1427 1482 Return:
1428 1483 0 : Si no realizo un buen seteo
1429 1484 1 : Si realizo un buen seteo
1430 1485 """
1431 1486
1432 1487 if ext == None:
1433 1488 ext = self.ext
1434 1489
1435 1490 ext = ext.lower()
1436 1491
1437 1492 self.ext = ext
1438 1493
1439 1494 self.path = path
1440 1495
1441 1496 self.setFile = set - 1
1442 1497
1443 1498 self.blocksPerFile = blocksPerFile
1444 1499
1445 1500 self.profilesPerBlock = profilesPerBlock
1446 1501
1447 1502 self.dataOut = dataOut
1448 1503
1504 #By default
1505 self.dtype = self.dataOut.dtype
1506
1507 if datatype is not None:
1508 self.dtype = get_numpy_dtype(datatype)
1509
1449 1510 if not(self.setNextFile()):
1450 1511 print "[Writing] There isn't a next file"
1451 1512 return 0
1452 1513
1453 1514 self.setBlockDimension()
1454 1515
1455 1516 return 1
1456 1517
1457 1518 def run(self, dataOut, **kwargs):
1458 1519
1459 1520 if not(self.isConfig):
1460 1521
1461 1522 self.setup(dataOut, **kwargs)
1462 1523 self.isConfig = True
1463 1524
1464 1525 self.putData()
1465 1526
@@ -1,764 +1,705
1 1 '''
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6 import numpy
7 7
8 8 from jroIO_base import LOCALTIME, JRODataReader, JRODataWriter
9 9 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
10 10 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
11 11 from schainpy.model.data.jrodata import Spectra
12 12
13 13 class SpectraReader(JRODataReader, ProcessingUnit):
14 14 """
15 15 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
16 16 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
17 17 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
18 18
19 19 paresCanalesIguales * alturas * perfiles (Self Spectra)
20 20 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
21 21 canales * alturas (DC Channels)
22 22
23 23 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
24 24 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
25 25 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
26 26 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
27 27
28 28 Example:
29 29 dpath = "/home/myuser/data"
30 30
31 31 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
32 32
33 33 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
34 34
35 35 readerObj = SpectraReader()
36 36
37 37 readerObj.setup(dpath, startTime, endTime)
38 38
39 39 while(True):
40 40
41 41 readerObj.getData()
42 42
43 43 print readerObj.data_spc
44 44
45 45 print readerObj.data_cspc
46 46
47 47 print readerObj.data_dc
48 48
49 49 if readerObj.flagNoMoreFiles:
50 50 break
51 51
52 52 """
53 53
54 54 pts2read_SelfSpectra = 0
55 55
56 56 pts2read_CrossSpectra = 0
57 57
58 58 pts2read_DCchannels = 0
59 59
60 60 ext = ".pdata"
61 61
62 62 optchar = "P"
63 63
64 64 dataOut = None
65 65
66 66 nRdChannels = None
67 67
68 68 nRdPairs = None
69 69
70 70 rdPairList = []
71 71
72 72 def __init__(self):
73 73 """
74 74 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
75 75
76 76 Inputs:
77 77 dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para
78 78 almacenar un perfil de datos cada vez que se haga un requerimiento
79 79 (getData). El perfil sera obtenido a partir del buffer de datos,
80 80 si el buffer esta vacio se hara un nuevo proceso de lectura de un
81 81 bloque de datos.
82 82 Si este parametro no es pasado se creara uno internamente.
83 83
84 84 Affected:
85 85 self.dataOut
86 86
87 87 Return : None
88 88 """
89 89
90 90 #Eliminar de la base la herencia
91 91 ProcessingUnit.__init__(self)
92 92
93 93 # self.isConfig = False
94 94
95 95 self.pts2read_SelfSpectra = 0
96 96
97 97 self.pts2read_CrossSpectra = 0
98 98
99 99 self.pts2read_DCchannels = 0
100 100
101 101 self.datablock = None
102 102
103 103 self.utc = None
104 104
105 105 self.ext = ".pdata"
106 106
107 107 self.optchar = "P"
108 108
109 109 self.basicHeaderObj = BasicHeader(LOCALTIME)
110 110
111 111 self.systemHeaderObj = SystemHeader()
112 112
113 113 self.radarControllerHeaderObj = RadarControllerHeader()
114 114
115 115 self.processingHeaderObj = ProcessingHeader()
116 116
117 117 self.online = 0
118 118
119 119 self.fp = None
120 120
121 121 self.idFile = None
122 122
123 123 self.dtype = None
124 124
125 125 self.fileSizeByHeader = None
126 126
127 127 self.filenameList = []
128 128
129 129 self.filename = None
130 130
131 131 self.fileSize = None
132 132
133 133 self.firstHeaderSize = 0
134 134
135 135 self.basicHeaderSize = 24
136 136
137 137 self.pathList = []
138 138
139 139 self.lastUTTime = 0
140 140
141 141 self.maxTimeStep = 30
142 142
143 143 self.flagNoMoreFiles = 0
144 144
145 145 self.set = 0
146 146
147 147 self.path = None
148 148
149 149 self.delay = 60 #seconds
150 150
151 151 self.nTries = 3 #quantity tries
152 152
153 153 self.nFiles = 3 #number of files for searching
154 154
155 155 self.nReadBlocks = 0
156 156
157 157 self.flagIsNewFile = 1
158 158
159 159 self.__isFirstTimeOnline = 1
160 160
161 161 # self.ippSeconds = 0
162 162
163 163 self.flagDiscontinuousBlock = 0
164 164
165 165 self.flagIsNewBlock = 0
166 166
167 167 self.nTotalBlocks = 0
168 168
169 169 self.blocksize = 0
170 170
171 171 self.dataOut = self.createObjByDefault()
172 172
173 173 self.profileIndex = 1 #Always
174 174
175 175
176 176 def createObjByDefault(self):
177 177
178 178 dataObj = Spectra()
179 179
180 180 return dataObj
181 181
182 182 def __hasNotDataInBuffer(self):
183 183 return 1
184 184
185 185
186 186 def getBlockDimension(self):
187 187 """
188 188 Obtiene la cantidad de puntos a leer por cada bloque de datos
189 189
190 190 Affected:
191 191 self.nRdChannels
192 192 self.nRdPairs
193 193 self.pts2read_SelfSpectra
194 194 self.pts2read_CrossSpectra
195 195 self.pts2read_DCchannels
196 196 self.blocksize
197 197 self.dataOut.nChannels
198 198 self.dataOut.nPairs
199 199
200 200 Return:
201 201 None
202 202 """
203 203 self.nRdChannels = 0
204 204 self.nRdPairs = 0
205 205 self.rdPairList = []
206 206
207 207 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
208 208 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
209 209 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
210 210 else:
211 211 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
212 212 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
213 213
214 214 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
215 215
216 216 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
217 217 self.blocksize = self.pts2read_SelfSpectra
218 218
219 219 if self.processingHeaderObj.flag_cspc:
220 220 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
221 221 self.blocksize += self.pts2read_CrossSpectra
222 222
223 223 if self.processingHeaderObj.flag_dc:
224 224 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
225 225 self.blocksize += self.pts2read_DCchannels
226 226
227 227 # self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels
228 228
229 229
230 230 def readBlock(self):
231 231 """
232 232 Lee el bloque de datos desde la posicion actual del puntero del archivo
233 233 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
234 234 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
235 235 es seteado a 0
236 236
237 237 Return: None
238 238
239 239 Variables afectadas:
240 240
241 241 self.flagIsNewFile
242 242 self.flagIsNewBlock
243 243 self.nTotalBlocks
244 244 self.data_spc
245 245 self.data_cspc
246 246 self.data_dc
247 247
248 248 Exceptions:
249 249 Si un bloque leido no es un bloque valido
250 250 """
251 251 blockOk_flag = False
252 252 fpointer = self.fp.tell()
253 253
254 254 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
255 255 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
256 256
257 257 if self.processingHeaderObj.flag_cspc:
258 258 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
259 259 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
260 260
261 261 if self.processingHeaderObj.flag_dc:
262 262 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
263 263 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
264 264
265 265
266 266 if not(self.processingHeaderObj.shif_fft):
267 267 #desplaza a la derecha en el eje 2 determinadas posiciones
268 268 shift = int(self.processingHeaderObj.profilesPerBlock/2)
269 269 spc = numpy.roll( spc, shift , axis=2 )
270 270
271 271 if self.processingHeaderObj.flag_cspc:
272 272 #desplaza a la derecha en el eje 2 determinadas posiciones
273 273 cspc = numpy.roll( cspc, shift, axis=2 )
274 274
275 275 # self.processingHeaderObj.shif_fft = True
276
276
277 #Dimensions : nChannels, nProfiles, nSamples
277 278 spc = numpy.transpose( spc, (0,2,1) )
278 279 self.data_spc = spc
279 280
280 281 if self.processingHeaderObj.flag_cspc:
281 282 cspc = numpy.transpose( cspc, (0,2,1) )
282 283 self.data_cspc = cspc['real'] + cspc['imag']*1j
283 284 else:
284 285 self.data_cspc = None
285 286
286 287 if self.processingHeaderObj.flag_dc:
287 288 self.data_dc = dc['real'] + dc['imag']*1j
288 289 else:
289 290 self.data_dc = None
290 291
291 292 self.flagIsNewFile = 0
292 293 self.flagIsNewBlock = 1
293 294
294 295 self.nTotalBlocks += 1
295 296 self.nReadBlocks += 1
296 297
297 298 return 1
298 299
299 300 def getFirstHeader(self):
300 301
302 self.getBasicHeader()
303
301 304 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
302 305
303 306 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
304 307
305 308 # self.dataOut.ippSeconds = self.ippSeconds
306 309
307 310 # self.dataOut.timeInterval = self.radarControllerHeaderObj.ippSeconds * self.processingHeaderObj.nCohInt * self.processingHeaderObj.nIncohInt * self.processingHeaderObj.profilesPerBlock
308 311
309 312 self.dataOut.dtype = self.dtype
310 313
311 314 # self.dataOut.nPairs = self.nPairs
312 315
313 316 self.dataOut.pairsList = self.rdPairList
314 317
315 318 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
316 319
317 320 self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
318 321
319 322 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
320 323
321 324 self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
322 325
323 326 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
324 327
325 328 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
326 329
327 330 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
328 331
329 332 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
330 333
331 334 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
332 335
333 336 self.dataOut.flagDeflipData = False #asumo q la data esta sin flip
334 337
335 338 if self.radarControllerHeaderObj.code is not None:
336 339
337 340 # self.dataOut.nCode = self.radarControllerHeaderObj.nCode
338 341 #
339 342 # self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
340 343 #
341 344 # self.dataOut.code = self.radarControllerHeaderObj.code
342 345
343 346 self.dataOut.flagDecodeData = True
344 347
345 348 def getData(self):
346 349 """
347 350 First method to execute before "RUN" is called.
348 351
349 352 Copia el buffer de lectura a la clase "Spectra",
350 353 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
351 354 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
352 355
353 356 Return:
354 357 0 : Si no hay mas archivos disponibles
355 358 1 : Si hizo una buena copia del buffer
356 359
357 360 Affected:
358 361 self.dataOut
359 362
360 363 self.flagDiscontinuousBlock
361 364 self.flagIsNewBlock
362 365 """
363 366
364 367 if self.flagNoMoreFiles:
365 368 self.dataOut.flagNoData = True
366 369 print 'Process finished'
367 370 return 0
368 371
369 372 self.flagDiscontinuousBlock = 0
370 373 self.flagIsNewBlock = 0
371 374
372 375 if self.__hasNotDataInBuffer():
373 376
374 377 if not( self.readNextBlock() ):
375 378 self.dataOut.flagNoData = True
376 379 return 0
377 380
378 381 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
379 382
380 383 if self.data_dc is None:
381 384 self.dataOut.flagNoData = True
382 385 return 0
383 386
384 387 self.getBasicHeader()
385 388
386 389 self.getFirstHeader()
387 390
388 391 self.dataOut.data_spc = self.data_spc
389 392
390 393 self.dataOut.data_cspc = self.data_cspc
391 394
392 395 self.dataOut.data_dc = self.data_dc
393 396
394 397 self.dataOut.flagNoData = False
395 398
396 399 self.dataOut.realtime = self.online
397 400
398 401 return self.dataOut.data_spc
399 402
400 403 class SpectraWriter(JRODataWriter, Operation):
401 404
402 405 """
403 406 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
404 407 de los datos siempre se realiza por bloques.
405 408 """
406 409
407 410 ext = ".pdata"
408 411
409 412 optchar = "P"
410 413
411 414 shape_spc_Buffer = None
412 415
413 416 shape_cspc_Buffer = None
414 417
415 418 shape_dc_Buffer = None
416 419
417 420 data_spc = None
418 421
419 422 data_cspc = None
420 423
421 424 data_dc = None
422 425
423 426 # dataOut = None
424 427
425 428 def __init__(self):
426 429 """
427 430 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
428 431
429 432 Affected:
430 433 self.dataOut
431 434 self.basicHeaderObj
432 435 self.systemHeaderObj
433 436 self.radarControllerHeaderObj
434 437 self.processingHeaderObj
435 438
436 439 Return: None
437 440 """
438 441
439 442 Operation.__init__(self)
440 443
441 444 self.isConfig = False
442 445
443 446 self.nTotalBlocks = 0
444 447
445 448 self.data_spc = None
446 449
447 450 self.data_cspc = None
448 451
449 452 self.data_dc = None
450 453
451 454 self.fp = None
452 455
453 456 self.flagIsNewFile = 1
454 457
455 458 self.nTotalBlocks = 0
456 459
457 460 self.flagIsNewBlock = 0
458 461
459 462 self.setFile = None
460 463
461 464 self.dtype = None
462 465
463 466 self.path = None
464 467
465 468 self.noMoreFiles = 0
466 469
467 470 self.filename = None
468 471
469 472 self.basicHeaderObj = BasicHeader(LOCALTIME)
470 473
471 474 self.systemHeaderObj = SystemHeader()
472 475
473 476 self.radarControllerHeaderObj = RadarControllerHeader()
474 477
475 478 self.processingHeaderObj = ProcessingHeader()
476 479
477 480
478 481 def hasAllDataInBuffer(self):
479 482 return 1
480 483
481 484
482 485 def setBlockDimension(self):
483 486 """
484 487 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
485 488
486 489 Affected:
487 490 self.shape_spc_Buffer
488 491 self.shape_cspc_Buffer
489 492 self.shape_dc_Buffer
490 493
491 494 Return: None
492 495 """
493 496 self.shape_spc_Buffer = (self.dataOut.nChannels,
494 497 self.processingHeaderObj.nHeights,
495 498 self.processingHeaderObj.profilesPerBlock)
496 499
497 500 self.shape_cspc_Buffer = (self.dataOut.nPairs,
498 501 self.processingHeaderObj.nHeights,
499 502 self.processingHeaderObj.profilesPerBlock)
500 503
501 504 self.shape_dc_Buffer = (self.dataOut.nChannels,
502 505 self.processingHeaderObj.nHeights)
503 506
504 507
505 508 def writeBlock(self):
506 509 """
507 510 Escribe el buffer en el file designado
508 511
509 512 Affected:
510 513 self.data_spc
511 514 self.data_cspc
512 515 self.data_dc
513 516 self.flagIsNewFile
514 517 self.flagIsNewBlock
515 518 self.nTotalBlocks
516 519 self.nWriteBlocks
517 520
518 521 Return: None
519 522 """
520 523
521 524 spc = numpy.transpose( self.data_spc, (0,2,1) )
522 525 if not( self.processingHeaderObj.shif_fft ):
523 526 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
524 527 data = spc.reshape((-1))
525 528 data = data.astype(self.dtype[0])
526 529 data.tofile(self.fp)
527 530
528 531 if self.data_cspc is not None:
529 532 data = numpy.zeros( self.shape_cspc_Buffer, self.dtype )
530 533 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
531 534 if not( self.processingHeaderObj.shif_fft ):
532 535 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
533 536 data['real'] = cspc.real
534 537 data['imag'] = cspc.imag
535 538 data = data.reshape((-1))
536 539 data.tofile(self.fp)
537 540
538 541 if self.data_dc is not None:
539 542 data = numpy.zeros( self.shape_dc_Buffer, self.dtype )
540 543 dc = self.data_dc
541 544 data['real'] = dc.real
542 545 data['imag'] = dc.imag
543 546 data = data.reshape((-1))
544 547 data.tofile(self.fp)
545 548
546 549 self.data_spc.fill(0)
547 550
548 551 if self.data_dc is not None:
549 552 self.data_dc.fill(0)
550 553
551 554 if self.data_cspc is not None:
552 555 self.data_cspc.fill(0)
553 556
554 557 self.flagIsNewFile = 0
555 558 self.flagIsNewBlock = 1
556 559 self.nTotalBlocks += 1
557 560 self.nWriteBlocks += 1
558 561 self.blockIndex += 1
559 562
560 563 # print "[Writing] Block = %d04" %self.blockIndex
561 564
562 565 def putData(self):
563 566 """
564 567 Setea un bloque de datos y luego los escribe en un file
565 568
566 569 Affected:
567 570 self.data_spc
568 571 self.data_cspc
569 572 self.data_dc
570 573
571 574 Return:
572 575 0 : Si no hay data o no hay mas files que puedan escribirse
573 576 1 : Si se escribio la data de un bloque en un file
574 577 """
575 578
576 579 if self.dataOut.flagNoData:
577 580 return 0
578 581
579 582 self.flagIsNewBlock = 0
580 583
581 584 if self.dataOut.flagDiscontinuousBlock:
582 585 self.data_spc.fill(0)
583 586 self.data_cspc.fill(0)
584 587 self.data_dc.fill(0)
585 588 self.setNextFile()
586 589
587 590 if self.flagIsNewFile == 0:
588 591 self.setBasicHeader()
589 592
590 593 self.data_spc = self.dataOut.data_spc.copy()
591 594 if self.dataOut.data_cspc is not None:
592 595 self.data_cspc = self.dataOut.data_cspc.copy()
593 596 self.data_dc = self.dataOut.data_dc.copy()
594 597
595 598 # #self.processingHeaderObj.dataBlocksPerFile)
596 599 if self.hasAllDataInBuffer():
597 600 # self.setFirstHeader()
598 601 self.writeNextBlock()
599 602
600 603 return 1
601 604
602
603 def __getProcessFlags(self):
604
605 processFlags = 0
606
607 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
608 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
609 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
610 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
611 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
612 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
613
614 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
615
616
617
618 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
619 PROCFLAG.DATATYPE_SHORT,
620 PROCFLAG.DATATYPE_LONG,
621 PROCFLAG.DATATYPE_INT64,
622 PROCFLAG.DATATYPE_FLOAT,
623 PROCFLAG.DATATYPE_DOUBLE]
624
625
626 for index in range(len(dtypeList)):
627 if self.dataOut.dtype == dtypeList[index]:
628 dtypeValue = datatypeValueList[index]
629 break
630
631 processFlags += dtypeValue
632
633 if self.dataOut.flagDecodeData:
634 processFlags += PROCFLAG.DECODE_DATA
635
636 if self.dataOut.flagDeflipData:
637 processFlags += PROCFLAG.DEFLIP_DATA
638
639 if self.dataOut.code is not None:
640 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
641
642 if self.dataOut.nIncohInt > 1:
643 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
644
645 if self.dataOut.data_dc is not None:
646 processFlags += PROCFLAG.SAVE_CHANNELS_DC
647
648 return processFlags
649
650
651 605 def __getBlockSize(self):
652 606 '''
653 607 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
654 608 '''
655 609
656 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
657 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
658 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
659 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
660 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
661 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
662
663 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
664 datatypeValueList = [1,2,4,8,4,8]
665 for index in range(len(dtypeList)):
666 if self.dataOut.dtype == dtypeList[index]:
667 datatypeValue = datatypeValueList[index]
668 break
669
610 dtype_width = self.getDtypeWidth()
670 611
671 612 pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints
672 613
673 614 pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write)
674 blocksize = (pts2write_SelfSpectra*datatypeValue)
615 blocksize = (pts2write_SelfSpectra*dtype_width)
675 616
676 617 if self.dataOut.data_cspc is not None:
677 618 pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write)
678 blocksize += (pts2write_CrossSpectra*datatypeValue*2)
619 blocksize += (pts2write_CrossSpectra*dtype_width*2)
679 620
680 621 if self.dataOut.data_dc is not None:
681 622 pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights)
682 blocksize += (pts2write_DCchannels*datatypeValue*2)
623 blocksize += (pts2write_DCchannels*dtype_width*2)
683 624
684 blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
625 # blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
685 626
686 627 return blocksize
687 628
688 629 def setFirstHeader(self):
689 630
690 631 """
691 632 Obtiene una copia del First Header
692 633
693 634 Affected:
694 635 self.systemHeaderObj
695 636 self.radarControllerHeaderObj
696 637 self.dtype
697 638
698 639 Return:
699 640 None
700 641 """
701 642
702 643 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
703 644 self.systemHeaderObj.nChannels = self.dataOut.nChannels
704 645 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
705 646 old_code_size = self.dataOut.radarControllerHeaderObj.code_size
706 647 new_code_size = int(numpy.ceil(self.dataOut.nBaud/32.))*self.dataOut.nCode*4
707 648 self.radarControllerHeaderObj.size = self.radarControllerHeaderObj.size - old_code_size + new_code_size
708 649
709 650 self.setBasicHeader()
710 651
711 652 processingHeaderSize = 40 # bytes
712 653 self.processingHeaderObj.dtype = 1 # Spectra
713 654 self.processingHeaderObj.blockSize = self.__getBlockSize()
714 655 self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints
715 656 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
716 657 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
717 self.processingHeaderObj.processFlags = self.__getProcessFlags()
658 self.processingHeaderObj.processFlags = self.getProcessFlags()
718 659 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval
719 660 self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt
720 661 self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
721 662 self.processingHeaderObj.shif_fft = self.dataOut.flagShiftFFT
722 663
723 664 if self.processingHeaderObj.totalSpectra > 0:
724 665 channelList = []
725 666 for channel in range(self.dataOut.nChannels):
726 667 channelList.append(channel)
727 668 channelList.append(channel)
728 669
729 670 pairsList = []
730 671 if self.dataOut.nPairs > 0:
731 672 for pair in self.dataOut.pairsList:
732 673 pairsList.append(pair[0])
733 674 pairsList.append(pair[1])
734 675
735 676 spectraComb = channelList + pairsList
736 677 spectraComb = numpy.array(spectraComb,dtype="u1")
737 678 self.processingHeaderObj.spectraComb = spectraComb
738 679 sizeOfSpcComb = len(spectraComb)
739 680 processingHeaderSize += sizeOfSpcComb
740 681
741 682 # The processing header should not have information about code
742 683 # if self.dataOut.code is not None:
743 684 # self.processingHeaderObj.code = self.dataOut.code
744 685 # self.processingHeaderObj.nCode = self.dataOut.nCode
745 686 # self.processingHeaderObj.nBaud = self.dataOut.nBaud
746 687 # nCodeSize = 4 # bytes
747 688 # nBaudSize = 4 # bytes
748 689 # codeSize = 4 # bytes
749 690 # sizeOfCode = int(nCodeSize + nBaudSize + codeSize * self.dataOut.nCode * self.dataOut.nBaud)
750 691 # processingHeaderSize += sizeOfCode
751 692
752 693 if self.processingHeaderObj.nWindows != 0:
753 694 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
754 695 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
755 696 self.processingHeaderObj.nHeights = self.dataOut.nHeights
756 697 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
757 698 sizeOfFirstHeight = 4
758 699 sizeOfdeltaHeight = 4
759 700 sizeOfnHeights = 4
760 701 sizeOfWindows = (sizeOfFirstHeight + sizeOfdeltaHeight + sizeOfnHeights)*self.processingHeaderObj.nWindows
761 702 processingHeaderSize += sizeOfWindows
762 703
763 704 self.processingHeaderObj.size = processingHeaderSize
764 705
@@ -1,652 +1,599
1 1 '''
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6
7 7 import numpy
8 8
9 9 from jroIO_base import LOCALTIME, JRODataReader, JRODataWriter
10 10 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
11 11 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
12 12 from schainpy.model.data.jrodata import Voltage
13 13
14 14 class VoltageReader(JRODataReader, ProcessingUnit):
15 15 """
16 16 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
17 17 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
18 18 perfiles*alturas*canales) son almacenados en la variable "buffer".
19 19
20 20 perfiles * alturas * canales
21 21
22 22 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
23 23 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
24 24 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
25 25 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
26 26
27 27 Example:
28 28
29 29 dpath = "/home/myuser/data"
30 30
31 31 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
32 32
33 33 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
34 34
35 35 readerObj = VoltageReader()
36 36
37 37 readerObj.setup(dpath, startTime, endTime)
38 38
39 39 while(True):
40 40
41 41 #to get one profile
42 42 profile = readerObj.getData()
43 43
44 44 #print the profile
45 45 print profile
46 46
47 47 #If you want to see all datablock
48 48 print readerObj.datablock
49 49
50 50 if readerObj.flagNoMoreFiles:
51 51 break
52 52
53 53 """
54 54
55 55 ext = ".r"
56 56
57 57 optchar = "D"
58 58 dataOut = None
59 59
60 60
61 61 def __init__(self):
62 62 """
63 63 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
64 64
65 65 Input:
66 66 dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para
67 67 almacenar un perfil de datos cada vez que se haga un requerimiento
68 68 (getData). El perfil sera obtenido a partir del buffer de datos,
69 69 si el buffer esta vacio se hara un nuevo proceso de lectura de un
70 70 bloque de datos.
71 71 Si este parametro no es pasado se creara uno internamente.
72 72
73 73 Variables afectadas:
74 74 self.dataOut
75 75
76 76 Return:
77 77 None
78 78 """
79 79
80 80 ProcessingUnit.__init__(self)
81 81
82 82 self.isConfig = False
83 83
84 84 self.datablock = None
85 85
86 86 self.utc = 0
87 87
88 88 self.ext = ".r"
89 89
90 90 self.optchar = "D"
91 91
92 92 self.basicHeaderObj = BasicHeader(LOCALTIME)
93 93
94 94 self.systemHeaderObj = SystemHeader()
95 95
96 96 self.radarControllerHeaderObj = RadarControllerHeader()
97 97
98 98 self.processingHeaderObj = ProcessingHeader()
99 99
100 100 self.online = 0
101 101
102 102 self.fp = None
103 103
104 104 self.idFile = None
105 105
106 106 self.dtype = None
107 107
108 108 self.fileSizeByHeader = None
109 109
110 110 self.filenameList = []
111 111
112 112 self.filename = None
113 113
114 114 self.fileSize = None
115 115
116 116 self.firstHeaderSize = 0
117 117
118 118 self.basicHeaderSize = 24
119 119
120 120 self.pathList = []
121 121
122 122 self.filenameList = []
123 123
124 124 self.lastUTTime = 0
125 125
126 126 self.maxTimeStep = 30
127 127
128 128 self.flagNoMoreFiles = 0
129 129
130 130 self.set = 0
131 131
132 132 self.path = None
133 133
134 134 self.profileIndex = 2**32-1
135 135
136 136 self.delay = 3 #seconds
137 137
138 138 self.nTries = 3 #quantity tries
139 139
140 140 self.nFiles = 3 #number of files for searching
141 141
142 142 self.nReadBlocks = 0
143 143
144 144 self.flagIsNewFile = 1
145 145
146 146 self.__isFirstTimeOnline = 1
147 147
148 148 # self.ippSeconds = 0
149 149
150 150 self.flagDiscontinuousBlock = 0
151 151
152 152 self.flagIsNewBlock = 0
153 153
154 154 self.nTotalBlocks = 0
155 155
156 156 self.blocksize = 0
157 157
158 158 self.dataOut = self.createObjByDefault()
159 159
160 160 self.nTxs = 1
161 161
162 162 self.txIndex = 0
163 163
164 164 def createObjByDefault(self):
165 165
166 166 dataObj = Voltage()
167 167
168 168 return dataObj
169 169
170 170 def __hasNotDataInBuffer(self):
171 171
172 172 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
173 173 return 1
174 174
175 175 return 0
176 176
177 177
178 178 def getBlockDimension(self):
179 179 """
180 180 Obtiene la cantidad de puntos a leer por cada bloque de datos
181 181
182 182 Affected:
183 183 self.blocksize
184 184
185 185 Return:
186 186 None
187 187 """
188 188 pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
189 189 self.blocksize = pts2read
190 190
191 191
192 192 def readBlock(self):
193 193 """
194 194 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
195 195 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
196 196 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
197 197 es seteado a 0
198 198
199 199 Inputs:
200 200 None
201 201
202 202 Return:
203 203 None
204 204
205 205 Affected:
206 206 self.profileIndex
207 207 self.datablock
208 208 self.flagIsNewFile
209 209 self.flagIsNewBlock
210 210 self.nTotalBlocks
211 211
212 212 Exceptions:
213 213 Si un bloque leido no es un bloque valido
214 214 """
215 215 current_pointer_location = self.fp.tell()
216 216 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
217 217
218 218 try:
219 219 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
220 220 except:
221 221 #print "The read block (%3d) has not enough data" %self.nReadBlocks
222 222
223 223 if self.waitDataBlock(pointer_location=current_pointer_location):
224 224 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
225 225 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
226 226 # return 0
227
228 #Dimensions : nChannels, nProfiles, nSamples
227 229
228 230 junk = numpy.transpose(junk, (2,0,1))
229 231 self.datablock = junk['real'] + junk['imag']*1j
230 232
231 233 self.profileIndex = 0
232 234
233 235 self.flagIsNewFile = 0
234 236 self.flagIsNewBlock = 1
235 237
236 238 self.nTotalBlocks += 1
237 239 self.nReadBlocks += 1
238 240
239 241 return 1
240 242
241 243 def getFirstHeader(self):
242 244
245 self.getBasicHeader()
246
243 247 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
244 248
245 249 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
246 250
247 251 if self.nTxs > 1:
248 252 self.dataOut.radarControllerHeaderObj.ippSeconds = self.radarControllerHeaderObj.ippSeconds/self.nTxs
249 253
250 254 # self.dataOut.timeInterval = self.radarControllerHeaderObj.ippSeconds * self.processingHeaderObj.nCohInt
251 255 #
252 256 # if self.radarControllerHeaderObj.code is not None:
253 257 #
254 258 # self.dataOut.nCode = self.radarControllerHeaderObj.nCode
255 259 #
256 260 # self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
257 261 #
258 262 # self.dataOut.code = self.radarControllerHeaderObj.code
259 263
260 264 self.dataOut.dtype = self.dtype
261 265
262 266 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock*self.nTxs
263 267
264 268 if self.processingHeaderObj.nHeights % self.nTxs != 0:
265 269 raise ValueError, "nTxs (%d) should be a multiple of nHeights (%d)" %(self.nTxs, self.processingHeaderObj.nHeights)
266 270
267 271 xf = self.processingHeaderObj.firstHeight + int(self.processingHeaderObj.nHeights/self.nTxs)*self.processingHeaderObj.deltaHeight
268 272
269 273 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
270 274
271 275 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
272 276
273 277 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
274 278
275 279 self.dataOut.flagShiftFFT = False
276 280
277 281 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
278 282
279 283 self.dataOut.flagDeflipData = False #asumo q la data no esta sin flip
280 284
281 285 self.dataOut.flagShiftFFT = False
282 286
283 287 def getData(self):
284 288 """
285 289 getData obtiene una unidad de datos del buffer de lectura, un perfil, y la copia al objeto self.dataOut
286 290 del tipo "Voltage" con todos los parametros asociados a este (metadata). cuando no hay datos
287 291 en el buffer de lectura es necesario hacer una nueva lectura de los bloques de datos usando
288 292 "readNextBlock"
289 293
290 294 Ademas incrementa el contador del buffer "self.profileIndex" en 1.
291 295
292 296 Return:
293 297
294 298 Si el flag self.getByBlock ha sido seteado el bloque completo es copiado a self.dataOut y el self.profileIndex
295 299 es igual al total de perfiles leidos desde el archivo.
296 300
297 301 Si self.getByBlock == False:
298 302
299 303 self.dataOut.data = buffer[:, thisProfile, :]
300 304
301 305 shape = [nChannels, nHeis]
302 306
303 307 Si self.getByBlock == True:
304 308
305 309 self.dataOut.data = buffer[:, :, :]
306 310
307 311 shape = [nChannels, nProfiles, nHeis]
308 312
309 313 Variables afectadas:
310 314 self.dataOut
311 315 self.profileIndex
312 316
313 317 Affected:
314 318 self.dataOut
315 319 self.profileIndex
316 320 self.flagDiscontinuousBlock
317 321 self.flagIsNewBlock
318 322 """
319 323
320 324 if self.flagNoMoreFiles:
321 325 self.dataOut.flagNoData = True
322 326 print 'Process finished'
323 327 return 0
324 328
325 329 self.flagDiscontinuousBlock = 0
326 330 self.flagIsNewBlock = 0
327 331
328 332 if self.__hasNotDataInBuffer():
329 333
330 334 if not( self.readNextBlock() ):
331 335 return 0
332 336
333 337 self.getFirstHeader()
334 338
335 339 if self.datablock is None:
336 340 self.dataOut.flagNoData = True
337 341 return 0
338 342
339 343 if not self.getByBlock:
340 344
341 345 """
342 346 Return profile by profile
343 347
344 348 If nTxs > 1 then one profile is divided by nTxs and number of total
345 349 blocks is increased by nTxs (nProfiles *= nTxs)
346 350 """
347 351 self.dataOut.flagDataAsBlock = False
348 352
349 353 if self.nTxs == 1:
350 354 self.dataOut.data = self.datablock[:,self.profileIndex,:]
351 355 self.dataOut.profileIndex = self.profileIndex
352 356
353 357 self.profileIndex += 1
354 358
355 359 else:
356 360 iniHei_ForThisTx = (self.txIndex)*int(self.processingHeaderObj.nHeights/self.nTxs)
357 361 endHei_ForThisTx = (self.txIndex+1)*int(self.processingHeaderObj.nHeights/self.nTxs)
358 362
359 363 # print iniHei_ForThisTx, endHei_ForThisTx
360 364
361 365 self.dataOut.data = self.datablock[:, self.profileIndex, iniHei_ForThisTx:endHei_ForThisTx]
362 366 self.dataOut.profileIndex = self.profileIndex*self.nTxs + self.txIndex
363 367
364 368 self.txIndex += 1
365 369
366 370 if self.txIndex == self.nTxs:
367 371 self.txIndex = 0
368 372 self.profileIndex += 1
369 373
370 374 else:
371 375 """
372 376 Return all block
373 377 """
374 378 self.dataOut.flagDataAsBlock = True
375 379 self.dataOut.data = self.datablock
376 380 self.dataOut.profileIndex = self.processingHeaderObj.profilesPerBlock
377 381
378 382 self.profileIndex = self.processingHeaderObj.profilesPerBlock
379 383
380 384 self.dataOut.flagNoData = False
381 385
382 386 self.getBasicHeader()
383 387
384 388 self.dataOut.realtime = self.online
385 389
386 390 return self.dataOut.data
387 391
388 392 class VoltageWriter(JRODataWriter, Operation):
389 393 """
390 394 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
391 395 de los datos siempre se realiza por bloques.
392 396 """
393 397
394 398 ext = ".r"
395 399
396 400 optchar = "D"
397 401
398 402 shapeBuffer = None
399 403
400 404
401 405 def __init__(self):
402 406 """
403 407 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
404 408
405 409 Affected:
406 410 self.dataOut
407 411
408 412 Return: None
409 413 """
410 414 Operation.__init__(self)
411 415
412 416 self.nTotalBlocks = 0
413 417
414 418 self.profileIndex = 0
415 419
416 420 self.isConfig = False
417 421
418 422 self.fp = None
419 423
420 424 self.flagIsNewFile = 1
421 425
422 self.nTotalBlocks = 0
426 self.blockIndex = 0
423 427
424 428 self.flagIsNewBlock = 0
425 429
426 430 self.setFile = None
427 431
428 432 self.dtype = None
429 433
430 434 self.path = None
431 435
432 436 self.filename = None
433 437
434 438 self.basicHeaderObj = BasicHeader(LOCALTIME)
435 439
436 440 self.systemHeaderObj = SystemHeader()
437 441
438 442 self.radarControllerHeaderObj = RadarControllerHeader()
439 443
440 444 self.processingHeaderObj = ProcessingHeader()
441 445
442 446 def hasAllDataInBuffer(self):
443 447 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
444 448 return 1
445 449 return 0
446 450
447 451
448 452 def setBlockDimension(self):
449 453 """
450 454 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
451 455
452 456 Affected:
453 457 self.shape_spc_Buffer
454 458 self.shape_cspc_Buffer
455 459 self.shape_dc_Buffer
456 460
457 461 Return: None
458 462 """
459 463 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
460 464 self.processingHeaderObj.nHeights,
461 465 self.systemHeaderObj.nChannels)
462 466
463 467 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
464 468 self.processingHeaderObj.profilesPerBlock,
465 469 self.processingHeaderObj.nHeights),
466 470 dtype=numpy.dtype('complex64'))
467 471
468 472 def writeBlock(self):
469 473 """
470 474 Escribe el buffer en el file designado
471 475
472 476 Affected:
473 477 self.profileIndex
474 478 self.flagIsNewFile
475 479 self.flagIsNewBlock
476 480 self.nTotalBlocks
477 481 self.blockIndex
478 482
479 483 Return: None
480 484 """
481 485 data = numpy.zeros( self.shapeBuffer, self.dtype )
482 486
483 487 junk = numpy.transpose(self.datablock, (1,2,0))
484 488
485 489 data['real'] = junk.real
486 490 data['imag'] = junk.imag
487 491
488 492 data = data.reshape( (-1) )
489 493
490 494 data.tofile( self.fp )
491 495
492 496 self.datablock.fill(0)
493 497
494 498 self.profileIndex = 0
495 499 self.flagIsNewFile = 0
496 500 self.flagIsNewBlock = 1
497 501
498 502 self.blockIndex += 1
499 503 self.nTotalBlocks += 1
500 504
501 505 # print "[Writing] Block = %04d" %self.blockIndex
502 506
503 507 def putData(self):
504 508 """
505 509 Setea un bloque de datos y luego los escribe en un file
506 510
507 511 Affected:
508 512 self.flagIsNewBlock
509 513 self.profileIndex
510 514
511 515 Return:
512 516 0 : Si no hay data o no hay mas files que puedan escribirse
513 517 1 : Si se escribio la data de un bloque en un file
514 518 """
515 519 if self.dataOut.flagNoData:
516 520 return 0
517 521
518 522 self.flagIsNewBlock = 0
519 523
520 524 if self.dataOut.flagDiscontinuousBlock:
521 525 self.datablock.fill(0)
522 526 self.profileIndex = 0
523 527 self.setNextFile()
524 528
525 529 if self.profileIndex == 0:
526 530 self.setBasicHeader()
527 531
528 532 self.datablock[:,self.profileIndex,:] = self.dataOut.data
529 533
530 534 self.profileIndex += 1
531 535
532 536 if self.hasAllDataInBuffer():
533 537 #if self.flagIsNewFile:
534 538 self.writeNextBlock()
535 539 # self.setFirstHeader()
536 540
537 541 return 1
538 542
539 def __getProcessFlags(self):
540
541 processFlags = 0
542
543 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
544 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
545 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
546 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
547 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
548 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
549
550 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
551
552
553
554 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
555 PROCFLAG.DATATYPE_SHORT,
556 PROCFLAG.DATATYPE_LONG,
557 PROCFLAG.DATATYPE_INT64,
558 PROCFLAG.DATATYPE_FLOAT,
559 PROCFLAG.DATATYPE_DOUBLE]
560
561
562 for index in range(len(dtypeList)):
563 if self.dataOut.dtype == dtypeList[index]:
564 dtypeValue = datatypeValueList[index]
565 break
566
567 processFlags += dtypeValue
568
569 if self.dataOut.flagDecodeData:
570 processFlags += PROCFLAG.DECODE_DATA
571
572 if self.dataOut.flagDeflipData:
573 processFlags += PROCFLAG.DEFLIP_DATA
574
575 if self.dataOut.code is not None:
576 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
577
578 if self.dataOut.nCohInt > 1:
579 processFlags += PROCFLAG.COHERENT_INTEGRATION
580
581 return processFlags
582
583
584 543 def __getBlockSize(self):
585 544 '''
586 545 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
587 546 '''
588 547
589 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
590 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
591 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
592 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
593 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
594 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
595
596 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
597 datatypeValueList = [1,2,4,8,4,8]
598 for index in range(len(dtypeList)):
599 if self.dataOut.dtype == dtypeList[index]:
600 datatypeValue = datatypeValueList[index]
601 break
548 dtype_width = self.getDtypeWidth()
602 549
603 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels * self.profilesPerBlock * datatypeValue * 2)
550 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels * self.profilesPerBlock * dtype_width * 2)
604 551
605 552 return blocksize
606 553
607 554 def setFirstHeader(self):
608 555
609 556 """
610 557 Obtiene una copia del First Header
611 558
612 559 Affected:
613 560 self.systemHeaderObj
614 561 self.radarControllerHeaderObj
615 562 self.dtype
616 563
617 564 Return:
618 565 None
619 566 """
620 567
621 568 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
622 569 self.systemHeaderObj.nChannels = self.dataOut.nChannels
623 570 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
624 571
625 572 self.setBasicHeader()
626 573
627 574 processingHeaderSize = 40 # bytes
628 575 self.processingHeaderObj.dtype = 0 # Voltage
629 576 self.processingHeaderObj.blockSize = self.__getBlockSize()
630 577 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
631 578 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
632 579 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
633 self.processingHeaderObj.processFlags = self.__getProcessFlags()
580 self.processingHeaderObj.processFlags = self.getProcessFlags()
634 581 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
635 582 self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage
636 583 self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage
637 584
638 585 # if self.dataOut.code is not None:
639 586 # self.processingHeaderObj.code = self.dataOut.code
640 587 # self.processingHeaderObj.nCode = self.dataOut.nCode
641 588 # self.processingHeaderObj.nBaud = self.dataOut.nBaud
642 589 # codesize = int(8 + 4 * self.dataOut.nCode * self.dataOut.nBaud)
643 590 # processingHeaderSize += codesize
644 591
645 592 if self.processingHeaderObj.nWindows != 0:
646 593 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
647 594 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
648 595 self.processingHeaderObj.nHeights = self.dataOut.nHeights
649 596 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
650 597 processingHeaderSize += 12
651 598
652 599 self.processingHeaderObj.size = processingHeaderSize No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now