##// END OF EJS Templates
Bug fixed writing pdata files
Miguel Valdez -
r584:aa8cbd77726d
parent child
Show More
@@ -1,616 +1,618
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JROHeaderIO.py 151 2012-10-31 19:00:51Z murco $
5 5 '''
6 6 import numpy
7 7 import copy
8 8 import datetime
9 9
10 10 BASIC_STRUCTURE = numpy.dtype([
11 11 ('nSize','<u4'),
12 12 ('nVersion','<u2'),
13 13 ('nDataBlockId','<u4'),
14 14 ('nUtime','<u4'),
15 15 ('nMilsec','<u2'),
16 16 ('nTimezone','<i2'),
17 17 ('nDstflag','<i2'),
18 18 ('nErrorCount','<u4')
19 19 ])
20 20
21 21 SYSTEM_STRUCTURE = numpy.dtype([
22 22 ('nSize','<u4'),
23 23 ('nNumSamples','<u4'),
24 24 ('nNumProfiles','<u4'),
25 25 ('nNumChannels','<u4'),
26 26 ('nADCResolution','<u4'),
27 27 ('nPCDIOBusWidth','<u4'),
28 28 ])
29 29
30 30 RADAR_STRUCTURE = numpy.dtype([
31 31 ('nSize','<u4'),
32 32 ('nExpType','<u4'),
33 33 ('nNTx','<u4'),
34 34 ('fIpp','<f4'),
35 35 ('fTxA','<f4'),
36 36 ('fTxB','<f4'),
37 37 ('nNumWindows','<u4'),
38 38 ('nNumTaus','<u4'),
39 39 ('nCodeType','<u4'),
40 40 ('nLine6Function','<u4'),
41 41 ('nLine5Function','<u4'),
42 42 ('fClock','<f4'),
43 43 ('nPrePulseBefore','<u4'),
44 44 ('nPrePulseAfter','<u4'),
45 45 ('sRangeIPP','<a20'),
46 46 ('sRangeTxA','<a20'),
47 47 ('sRangeTxB','<a20'),
48 48 ])
49 49
50 50 SAMPLING_STRUCTURE = numpy.dtype([('h0','<f4'),('dh','<f4'),('nsa','<u4')])
51 51
52 52
53 53 PROCESSING_STRUCTURE = numpy.dtype([
54 54 ('nSize','<u4'),
55 55 ('nDataType','<u4'),
56 56 ('nSizeOfDataBlock','<u4'),
57 57 ('nProfilesperBlock','<u4'),
58 58 ('nDataBlocksperFile','<u4'),
59 59 ('nNumWindows','<u4'),
60 60 ('nProcessFlags','<u4'),
61 61 ('nCoherentIntegrations','<u4'),
62 62 ('nIncoherentIntegrations','<u4'),
63 63 ('nTotalSpectra','<u4')
64 64 ])
65 65
66 66 class Header(object):
67 67
68 68 def __init__(self):
69 69 raise
70 70
71 71 def copy(self):
72 72 return copy.deepcopy(self)
73 73
74 74 def read(self):
75 75
76 76 raise ValueError
77 77
78 78 def write(self):
79 79
80 80 raise ValueError
81 81
82 82 def printInfo(self):
83 83
84 84 print "#"*100
85 85 print self.__class__.__name__.upper()
86 86 print "#"*100
87 87 for key in self.__dict__.keys():
88 88 print "%s = %s" %(key, self.__dict__[key])
89 89
90 90 class BasicHeader(Header):
91 91
92 92 size = None
93 93 version = None
94 94 dataBlock = None
95 95 utc = None
96 96 ltc = None
97 97 miliSecond = None
98 98 timeZone = None
99 99 dstFlag = None
100 100 errorCount = None
101 101 datatime = None
102 102
103 103 __LOCALTIME = None
104 104
105 105 def __init__(self, useLocalTime=True):
106 106
107 107 self.size = 24
108 108 self.version = 0
109 109 self.dataBlock = 0
110 110 self.utc = 0
111 111 self.miliSecond = 0
112 112 self.timeZone = 0
113 113 self.dstFlag = 0
114 114 self.errorCount = 0
115 115
116 116 self.useLocalTime = useLocalTime
117 117
118 118 def read(self, fp):
119 119 try:
120 120
121 121 header = numpy.fromfile(fp, BASIC_STRUCTURE,1)
122 122
123 123 self.size = int(header['nSize'][0])
124 124 self.version = int(header['nVersion'][0])
125 125 self.dataBlock = int(header['nDataBlockId'][0])
126 126 self.utc = int(header['nUtime'][0])
127 127 self.miliSecond = int(header['nMilsec'][0])
128 128 self.timeZone = int(header['nTimezone'][0])
129 129 self.dstFlag = int(header['nDstflag'][0])
130 130 self.errorCount = int(header['nErrorCount'][0])
131 131
132 132 except Exception, e:
133 133 print "BasicHeader: "
134 134 print e
135 135 return 0
136 136
137 137 return 1
138 138
139 139 def write(self, fp):
140 140
141 141 headerTuple = (self.size,self.version,self.dataBlock,self.utc,self.miliSecond,self.timeZone,self.dstFlag,self.errorCount)
142 142 header = numpy.array(headerTuple, BASIC_STRUCTURE)
143 143 header.tofile(fp)
144 144
145 145 return 1
146 146
147 147 def get_ltc(self):
148 148
149 149 return self.utc - self.timeZone*60
150 150
151 151 def set_ltc(self, value):
152 152
153 153 self.utc = value + self.timeZone*60
154 154
155 155 def get_datatime(self):
156 156
157 157 return datetime.datetime.utcfromtimestamp(self.ltc)
158 158
159 159 ltc = property(get_ltc, set_ltc)
160 160 datatime = property(get_datatime)
161 161
162 162 class SystemHeader(Header):
163 163
164 164 size = None
165 165 nSamples = None
166 166 nProfiles = None
167 167 nChannels = None
168 168 adcResolution = None
169 169 pciDioBusWidth = None
170 170
171 171 def __init__(self, nSamples=0, nProfiles=0, nChannels=0, adcResolution=14, pciDioBusWith=0):
172 172
173 173 self.size = 24
174 174 self.nSamples = nSamples
175 175 self.nProfiles = nProfiles
176 176 self.nChannels = nChannels
177 177 self.adcResolution = adcResolution
178 178 self.pciDioBusWidth = pciDioBusWith
179 179
180 180 def read(self, fp):
181 181
182 182 try:
183 183 header = numpy.fromfile(fp,SYSTEM_STRUCTURE,1)
184 184 self.size = header['nSize'][0]
185 185 self.nSamples = header['nNumSamples'][0]
186 186 self.nProfiles = header['nNumProfiles'][0]
187 187 self.nChannels = header['nNumChannels'][0]
188 188 self.adcResolution = header['nADCResolution'][0]
189 189 self.pciDioBusWidth = header['nPCDIOBusWidth'][0]
190 190
191 191 except Exception, e:
192 192 print "SystemHeader: " + e
193 193 return 0
194 194
195 195 return 1
196 196
197 197 def write(self, fp):
198 198
199 199 headerTuple = (self.size,self.nSamples,self.nProfiles,self.nChannels,self.adcResolution,self.pciDioBusWidth)
200 200 header = numpy.array(headerTuple,SYSTEM_STRUCTURE)
201 201 header.tofile(fp)
202 202
203 203 return 1
204 204
205 205 class RadarControllerHeader(Header):
206 206
207 207 size = None
208 208 expType = None
209 209 nTx = None
210 210 ipp = None
211 211 txA = None
212 212 txB = None
213 213 nWindows = None
214 214 numTaus = None
215 215 codeType = None
216 216 line6Function = None
217 217 line5Function = None
218 218 fClock = None
219 219 prePulseBefore = None
220 220 prePulserAfter = None
221 221 rangeIpp = None
222 222 rangeTxA = None
223 223 rangeTxB = None
224 224
225 225 __C = 3e8
226 226
227 227 def __init__(self, expType=2, nTx=1,
228 228 ippKm=None, txA=0, txB=0,
229 229 nWindows=None, nHeights=None, firstHeight=None, deltaHeight=None,
230 230 numTaus=0, line6Function=0, line5Function=0, fClock=0,
231 231 prePulseBefore=0, prePulseAfter=0,
232 232 codeType=0, nCode=0, nBaud=0, code=None,
233 233 flip1=0, flip2=0):
234 234
235 235 self.size = 116
236 236 self.expType = expType
237 237 self.nTx = nTx
238 238 self.ipp = ippKm
239 239 self.txA = txA
240 240 self.txB = txB
241 241 self.rangeIpp = ippKm
242 242 self.rangeTxA = txA
243 243 self.rangeTxB = txB
244 244
245 245 self.nWindows = nWindows
246 246 self.numTaus = numTaus
247 247 self.codeType = codeType
248 248 self.line6Function = line6Function
249 249 self.line5Function = line5Function
250 250 self.fClock = fClock
251 251 self.prePulseBefore = prePulseBefore
252 252 self.prePulserAfter = prePulseAfter
253 253
254 254 self.nHeights = nHeights
255 255 self.firstHeight = firstHeight
256 256 self.deltaHeight = deltaHeight
257 257 self.samplesWin = nHeights
258 258
259 259 self.nCode = nCode
260 260 self.nBaud = nBaud
261 261 self.code = code
262 262 self.flip1 = flip1
263 263 self.flip2 = flip2
264 264
265 self.code_size = int(numpy.ceil(self.nBaud/32.))*self.nCode*4
265 266 # self.dynamic = numpy.array([],numpy.dtype('byte'))
266 267
267 268
268 269 def read(self, fp):
269 270
270 271 try:
271 272 startFp = fp.tell()
272 273 header = numpy.fromfile(fp,RADAR_STRUCTURE,1)
273 274
274 275 self.size = int(header['nSize'][0])
275 276 self.expType = int(header['nExpType'][0])
276 277 self.nTx = int(header['nNTx'][0])
277 278 self.ipp = float(header['fIpp'][0])
278 279 self.txA = float(header['fTxA'][0])
279 280 self.txB = float(header['fTxB'][0])
280 281 self.nWindows = int(header['nNumWindows'][0])
281 282 self.numTaus = int(header['nNumTaus'][0])
282 283 self.codeType = int(header['nCodeType'][0])
283 284 self.line6Function = int(header['nLine6Function'][0])
284 285 self.line5Function = int(header['nLine5Function'][0])
285 286 self.fClock = float(header['fClock'][0])
286 287 self.prePulseBefore = int(header['nPrePulseBefore'][0])
287 288 self.prePulserAfter = int(header['nPrePulseAfter'][0])
288 289 self.rangeIpp = header['sRangeIPP'][0]
289 290 self.rangeTxA = header['sRangeTxA'][0]
290 291 self.rangeTxB = header['sRangeTxB'][0]
291 292 # jump Dynamic Radar Controller Header
292 293 # jumpFp = self.size - 116
293 294 # self.dynamic = numpy.fromfile(fp,numpy.dtype('byte'),jumpFp)
294 295 #pointer backward to dynamic header and read
295 296 # backFp = fp.tell() - jumpFp
296 297 # fp.seek(backFp)
297 298
298 299 samplingWindow = numpy.fromfile(fp,SAMPLING_STRUCTURE,self.nWindows)
299 300
300 301 self.nHeights = int(numpy.sum(samplingWindow['nsa']))
301 302 self.firstHeight = samplingWindow['h0']
302 303 self.deltaHeight = samplingWindow['dh']
303 304 self.samplesWin = samplingWindow['nsa']
304 305
305 306 self.Taus = numpy.fromfile(fp,'<f4',self.numTaus)
306 307
308 self.code_size = 0
307 309 if self.codeType != 0:
308 310 self.nCode = int(numpy.fromfile(fp,'<u4',1))
309 311 self.nBaud = int(numpy.fromfile(fp,'<u4',1))
310 312 self.code = numpy.empty([self.nCode,self.nBaud],dtype='i1')
311 313
312 314 for ic in range(self.nCode):
313 315 temp = numpy.fromfile(fp,'u4',int(numpy.ceil(self.nBaud/32.)))
314 316 for ib in range(self.nBaud-1,-1,-1):
315 317 self.code[ic,ib] = temp[ib/32]%2
316 318 temp[ib/32] = temp[ib/32]/2
317 319 self.code = 2.0*self.code - 1.0
318 320 self.code_size = int(numpy.ceil(self.nBaud/32.))*self.nCode*4
319 321
320 322 if self.line5Function == RCfunction.FLIP:
321 323 self.flip1 = numpy.fromfile(fp,'<u4',1)
322 324
323 325 if self.line6Function == RCfunction.FLIP:
324 326 self.flip2 = numpy.fromfile(fp,'<u4',1)
325 327
326 328 endFp = self.size + startFp
327 329 jumpFp = endFp - fp.tell()
328 330 if jumpFp > 0:
329 331 fp.seek(jumpFp)
330 332
331 333 except Exception, e:
332 334 print "RadarControllerHeader: " + e
333 335 return 0
334 336
335 337 return 1
336 338
337 339 def write(self, fp):
338 340 headerTuple = (self.size,
339 341 self.expType,
340 342 self.nTx,
341 343 self.ipp,
342 344 self.txA,
343 345 self.txB,
344 346 self.nWindows,
345 347 self.numTaus,
346 348 self.codeType,
347 349 self.line6Function,
348 350 self.line5Function,
349 351 self.fClock,
350 352 self.prePulseBefore,
351 353 self.prePulserAfter,
352 354 self.rangeIpp,
353 355 self.rangeTxA,
354 356 self.rangeTxB)
355 357
356 358 header = numpy.array(headerTuple,RADAR_STRUCTURE)
357 359 header.tofile(fp)
358 360
359 361 #dynamic = self.dynamic
360 362 #dynamic.tofile(fp)
361 363
362 364 sampleWindowTuple = (self.firstHeight,self.deltaHeight,self.samplesWin)
363 365 samplingWindow = numpy.array(sampleWindowTuple,SAMPLING_STRUCTURE)
364 366 samplingWindow.tofile(fp)
365 367
366 368 if self.numTaus > 0:
367 369 self.Taus.tofile(fp)
368 370
369 371 if self.codeType !=0:
370 372 nCode = numpy.array(self.nCode, '<u4')
371 373 nCode.tofile(fp)
372 374 nBaud = numpy.array(self.nBaud, '<u4')
373 375 nBaud.tofile(fp)
374 376 code1 = (self.code + 1.0)/2.
375 377
376 378 for ic in range(self.nCode):
377 379 tempx = numpy.zeros(numpy.ceil(self.nBaud/32.))
378 380 start = 0
379 381 end = 32
380 382 for i in range(len(tempx)):
381 383 code_selected = code1[ic,start:end]
382 384 for j in range(len(code_selected)-1,-1,-1):
383 385 if code_selected[j] == 1:
384 386 tempx[i] = tempx[i] + 2**(len(code_selected)-1-j)
385 387 start = start + 32
386 388 end = end + 32
387 389
388 390 tempx = tempx.astype('u4')
389 391 tempx.tofile(fp)
390 392
391 393 if self.line5Function == RCfunction.FLIP:
392 394 self.flip1.tofile(fp)
393 395
394 396 if self.line6Function == RCfunction.FLIP:
395 397 self.flip2.tofile(fp)
396 398
397 399 return 1
398 400
399 401 def get_ippSeconds(self):
400 402 '''
401 403 '''
402 404 ippSeconds = 2.0 * 1000 * self.ipp / self.__C
403 405
404 406 return ippSeconds
405 407
406 408 def set_ippSeconds(self, ippSeconds):
407 409 '''
408 410 '''
409 411
410 412 self.ipp = ippSeconds * self.__C / (2.0*1000)
411 413
412 414 return
413 415
414 416 ippSeconds = property(get_ippSeconds, set_ippSeconds)
415 417
416 418 class ProcessingHeader(Header):
417 419
418 420 size = None
419 421 dtype = None
420 422 blockSize = None
421 423 profilesPerBlock = None
422 424 dataBlocksPerFile = None
423 425 nWindows = None
424 426 processFlags = None
425 427 nCohInt = None
426 428 nIncohInt = None
427 429 totalSpectra = None
428 430
429 431 flag_dc = None
430 432 flag_cspc = None
431 433
432 434 def __init__(self):
433 435
434 436 self.size = 0
435 437 self.dtype = 0
436 438 self.blockSize = 0
437 439 self.profilesPerBlock = 0
438 440 self.dataBlocksPerFile = 0
439 441 self.nWindows = 0
440 442 self.processFlags = 0
441 443 self.nCohInt = 0
442 444 self.nIncohInt = 0
443 445 self.totalSpectra = 0
444 446
445 447 self.nHeights = 0
446 448 self.firstHeight = 0
447 449 self.deltaHeight = 0
448 450 self.samplesWin = 0
449 451 self.spectraComb = 0
450 452 # self.nCode = None
451 453 # self.code = None
452 454 # self.nBaud = None
453 455 self.shif_fft = False
454 456 self.flag_dc = False
455 457 self.flag_cspc = False
456 458
457 459 def read(self, fp):
458 460 # try:
459 461 header = numpy.fromfile(fp,PROCESSING_STRUCTURE,1)
460 462 self.size = int(header['nSize'][0])
461 463 self.dtype = int(header['nDataType'][0])
462 464 self.blockSize = int(header['nSizeOfDataBlock'][0])
463 465 self.profilesPerBlock = int(header['nProfilesperBlock'][0])
464 466 self.dataBlocksPerFile = int(header['nDataBlocksperFile'][0])
465 467 self.nWindows = int(header['nNumWindows'][0])
466 468 self.processFlags = header['nProcessFlags']
467 469 self.nCohInt = int(header['nCoherentIntegrations'][0])
468 470 self.nIncohInt = int(header['nIncoherentIntegrations'][0])
469 471 self.totalSpectra = int(header['nTotalSpectra'][0])
470 472
471 473 samplingWindow = numpy.fromfile(fp,SAMPLING_STRUCTURE,self.nWindows)
472 474
473 475 self.nHeights = int(numpy.sum(samplingWindow['nsa']))
474 476 self.firstHeight = float(samplingWindow['h0'][0])
475 477 self.deltaHeight = float(samplingWindow['dh'][0])
476 478 self.samplesWin = samplingWindow['nsa'][0]
477 479 self.spectraComb = numpy.fromfile(fp,'u1',2*self.totalSpectra)
478 480
479 481 # if ((self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE) == PROCFLAG.DEFINE_PROCESS_CODE):
480 482 # self.nCode = int(numpy.fromfile(fp,'<u4',1))
481 483 # self.nBaud = int(numpy.fromfile(fp,'<u4',1))
482 484 # self.code = numpy.fromfile(fp,'<f4',self.nCode*self.nBaud).reshape(self.nCode,self.nBaud)
483 485
484 486 if ((self.processFlags & PROCFLAG.SHIFT_FFT_DATA) == PROCFLAG.SHIFT_FFT_DATA):
485 487 self.shif_fft = True
486 488 else:
487 489 self.shif_fft = False
488 490
489 491 if ((self.processFlags & PROCFLAG.SAVE_CHANNELS_DC) == PROCFLAG.SAVE_CHANNELS_DC):
490 492 self.flag_dc = True
491 493
492 494 nChannels = 0
493 495 nPairs = 0
494 496 pairList = []
495 497
496 498 for i in range( 0, self.totalSpectra*2, 2 ):
497 499 if self.spectraComb[i] == self.spectraComb[i+1]:
498 500 nChannels = nChannels + 1 #par de canales iguales
499 501 else:
500 502 nPairs = nPairs + 1 #par de canales diferentes
501 503 pairList.append( (self.spectraComb[i], self.spectraComb[i+1]) )
502 504
503 505 self.flag_cspc = False
504 506 if nPairs > 0:
505 507 self.flag_cspc = True
506 508
507 509 # except Exception, e:
508 510 # print "Error ProcessingHeader: "
509 511 # return 0
510 512
511 513 return 1
512 514
513 515 def write(self, fp):
514 516
515 517 headerTuple = (self.size,
516 518 self.dtype,
517 519 self.blockSize,
518 520 self.profilesPerBlock,
519 521 self.dataBlocksPerFile,
520 522 self.nWindows,
521 523 self.processFlags,
522 524 self.nCohInt,
523 525 self.nIncohInt,
524 526 self.totalSpectra)
525 527
526 528 header = numpy.array(headerTuple,PROCESSING_STRUCTURE)
527 529 header.tofile(fp)
528 530
529 531 if self.nWindows != 0:
530 532 sampleWindowTuple = (self.firstHeight,self.deltaHeight,self.samplesWin)
531 533 samplingWindow = numpy.array(sampleWindowTuple,SAMPLING_STRUCTURE)
532 534 samplingWindow.tofile(fp)
533 535
534 536
535 537 if self.totalSpectra != 0:
536 538 spectraComb = numpy.array([],numpy.dtype('u1'))
537 539 spectraComb = self.spectraComb
538 540 spectraComb.tofile(fp)
539 541
540 542 # if self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE == PROCFLAG.DEFINE_PROCESS_CODE:
541 543 # nCode = numpy.array([self.nCode], numpy.dtype('u4')) #Probar con un dato que almacene codigo, hasta el momento no se hizo la prueba
542 544 # nCode.tofile(fp)
543 545 #
544 546 # nBaud = numpy.array([self.nBaud], numpy.dtype('u4'))
545 547 # nBaud.tofile(fp)
546 548 #
547 549 # code = self.code.reshape(self.nCode*self.nBaud)
548 550 # code = code.astype(numpy.dtype('<f4'))
549 551 # code.tofile(fp)
550 552
551 553 return 1
552 554
553 555 class RCfunction:
554 556 NONE=0
555 557 FLIP=1
556 558 CODE=2
557 559 SAMPLING=3
558 560 LIN6DIV256=4
559 561 SYNCHRO=5
560 562
561 563 class nCodeType:
562 564 NONE=0
563 565 USERDEFINE=1
564 566 BARKER2=2
565 567 BARKER3=3
566 568 BARKER4=4
567 569 BARKER5=5
568 570 BARKER7=6
569 571 BARKER11=7
570 572 BARKER13=8
571 573 AC128=9
572 574 COMPLEMENTARYCODE2=10
573 575 COMPLEMENTARYCODE4=11
574 576 COMPLEMENTARYCODE8=12
575 577 COMPLEMENTARYCODE16=13
576 578 COMPLEMENTARYCODE32=14
577 579 COMPLEMENTARYCODE64=15
578 580 COMPLEMENTARYCODE128=16
579 581 CODE_BINARY28=17
580 582
581 583 class PROCFLAG:
582 584 COHERENT_INTEGRATION = numpy.uint32(0x00000001)
583 585 DECODE_DATA = numpy.uint32(0x00000002)
584 586 SPECTRA_CALC = numpy.uint32(0x00000004)
585 587 INCOHERENT_INTEGRATION = numpy.uint32(0x00000008)
586 588 POST_COHERENT_INTEGRATION = numpy.uint32(0x00000010)
587 589 SHIFT_FFT_DATA = numpy.uint32(0x00000020)
588 590
589 591 DATATYPE_CHAR = numpy.uint32(0x00000040)
590 592 DATATYPE_SHORT = numpy.uint32(0x00000080)
591 593 DATATYPE_LONG = numpy.uint32(0x00000100)
592 594 DATATYPE_INT64 = numpy.uint32(0x00000200)
593 595 DATATYPE_FLOAT = numpy.uint32(0x00000400)
594 596 DATATYPE_DOUBLE = numpy.uint32(0x00000800)
595 597
596 598 DATAARRANGE_CONTIGUOUS_CH = numpy.uint32(0x00001000)
597 599 DATAARRANGE_CONTIGUOUS_H = numpy.uint32(0x00002000)
598 600 DATAARRANGE_CONTIGUOUS_P = numpy.uint32(0x00004000)
599 601
600 602 SAVE_CHANNELS_DC = numpy.uint32(0x00008000)
601 603 DEFLIP_DATA = numpy.uint32(0x00010000)
602 604 DEFINE_PROCESS_CODE = numpy.uint32(0x00020000)
603 605
604 606 ACQ_SYS_NATALIA = numpy.uint32(0x00040000)
605 607 ACQ_SYS_ECHOTEK = numpy.uint32(0x00080000)
606 608 ACQ_SYS_ADRXD = numpy.uint32(0x000C0000)
607 609 ACQ_SYS_JULIA = numpy.uint32(0x00100000)
608 610 ACQ_SYS_XXXXXX = numpy.uint32(0x00140000)
609 611
610 612 EXP_NAME_ESP = numpy.uint32(0x00200000)
611 613 CHANNEL_NAMES_ESP = numpy.uint32(0x00400000)
612 614
613 615 OPERATION_MASK = numpy.uint32(0x0000003F)
614 616 DATATYPE_MASK = numpy.uint32(0x00000FC0)
615 617 DATAARRANGE_MASK = numpy.uint32(0x00007000)
616 618 ACQ_SYS_MASK = numpy.uint32(0x001C0000) No newline at end of file
@@ -1,1345 +1,1348
1 1 '''
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6 import os
7 7 import sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import time, datetime
13 13 #import h5py
14 14 import traceback
15 15
16 16 try:
17 17 from gevent import sleep
18 18 except:
19 19 from time import sleep
20 20
21 21 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
22 22
23 23 LOCALTIME = True
24 24
25 25 def isNumber(cad):
26 26 """
27 27 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
28 28
29 29 Excepciones:
30 30 Si un determinado string no puede ser convertido a numero
31 31 Input:
32 32 str, string al cual se le analiza para determinar si convertible a un numero o no
33 33
34 34 Return:
35 35 True : si el string es uno numerico
36 36 False : no es un string numerico
37 37 """
38 38 try:
39 39 float( cad )
40 40 return True
41 41 except:
42 42 return False
43 43
44 44 def isThisFileinRange(filename, startUTSeconds, endUTSeconds):
45 45 """
46 46 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
47 47
48 48 Inputs:
49 49 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
50 50
51 51 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
52 52 segundos contados desde 01/01/1970.
53 53 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
54 54 segundos contados desde 01/01/1970.
55 55
56 56 Return:
57 57 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
58 58 fecha especificado, de lo contrario retorna False.
59 59
60 60 Excepciones:
61 61 Si el archivo no existe o no puede ser abierto
62 62 Si la cabecera no puede ser leida.
63 63
64 64 """
65 65 basicHeaderObj = BasicHeader(LOCALTIME)
66 66
67 67 try:
68 68 fp = open(filename,'rb')
69 69 except IOError:
70 70 traceback.print_exc()
71 71 raise IOError, "The file %s can't be opened" %(filename)
72 72
73 73 sts = basicHeaderObj.read(fp)
74 74 fp.close()
75 75
76 76 if not(sts):
77 77 print "Skipping the file %s because it has not a valid header" %(filename)
78 78 return 0
79 79
80 80 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
81 81 return 0
82 82
83 83 return 1
84 84
85 85 def isFileinThisTime(filename, startTime, endTime):
86 86 """
87 87 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
88 88
89 89 Inputs:
90 90 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
91 91
92 92 startTime : tiempo inicial del rango seleccionado en formato datetime.time
93 93
94 94 endTime : tiempo final del rango seleccionado en formato datetime.time
95 95
96 96 Return:
97 97 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
98 98 fecha especificado, de lo contrario retorna False.
99 99
100 100 Excepciones:
101 101 Si el archivo no existe o no puede ser abierto
102 102 Si la cabecera no puede ser leida.
103 103
104 104 """
105 105
106 106
107 107 try:
108 108 fp = open(filename,'rb')
109 109 except IOError:
110 110 traceback.print_exc()
111 111 raise IOError, "The file %s can't be opened" %(filename)
112 112
113 113 basicHeaderObj = BasicHeader(LOCALTIME)
114 114 sts = basicHeaderObj.read(fp)
115 115 fp.close()
116 116
117 117 thisDatetime = basicHeaderObj.datatime
118 118 thisTime = thisDatetime.time()
119 119
120 120 if not(sts):
121 121 print "Skipping the file %s because it has not a valid header" %(filename)
122 122 return None
123 123
124 124 if not ((startTime <= thisTime) and (endTime > thisTime)):
125 125 return None
126 126
127 127 return thisDatetime
128 128
129 129 def getFileFromSet(path, ext, set):
130 130 validFilelist = []
131 131 fileList = os.listdir(path)
132 132
133 133 # 0 1234 567 89A BCDE
134 134 # H YYYY DDD SSS .ext
135 135
136 136 for thisFile in fileList:
137 137 try:
138 138 year = int(thisFile[1:5])
139 139 doy = int(thisFile[5:8])
140 140 except:
141 141 continue
142 142
143 143 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
144 144 continue
145 145
146 146 validFilelist.append(thisFile)
147 147
148 148 myfile = fnmatch.filter(validFilelist,'*%4.4d%3.3d%3.3d*'%(year,doy,set))
149 149
150 150 if len(myfile)!= 0:
151 151 return myfile[0]
152 152 else:
153 153 filename = '*%4.4d%3.3d%3.3d%s'%(year,doy,set,ext.lower())
154 154 print 'the filename %s does not exist'%filename
155 155 print '...going to the last file: '
156 156
157 157 if validFilelist:
158 158 validFilelist = sorted( validFilelist, key=str.lower )
159 159 return validFilelist[-1]
160 160
161 161 return None
162 162
163 163 def getlastFileFromPath(path, ext):
164 164 """
165 165 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
166 166 al final de la depuracion devuelve el ultimo file de la lista que quedo.
167 167
168 168 Input:
169 169 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
170 170 ext : extension de los files contenidos en una carpeta
171 171
172 172 Return:
173 173 El ultimo file de una determinada carpeta, no se considera el path.
174 174 """
175 175 validFilelist = []
176 176 fileList = os.listdir(path)
177 177
178 178 # 0 1234 567 89A BCDE
179 179 # H YYYY DDD SSS .ext
180 180
181 181 for thisFile in fileList:
182 182
183 183 year = thisFile[1:5]
184 184 if not isNumber(year):
185 185 continue
186 186
187 187 doy = thisFile[5:8]
188 188 if not isNumber(doy):
189 189 continue
190 190
191 191 year = int(year)
192 192 doy = int(doy)
193 193
194 194 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
195 195 continue
196 196
197 197 validFilelist.append(thisFile)
198 198
199 199 if validFilelist:
200 200 validFilelist = sorted( validFilelist, key=str.lower )
201 201 return validFilelist[-1]
202 202
203 203 return None
204 204
205 205 def checkForRealPath(path, foldercounter, year, doy, set, ext):
206 206 """
207 207 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
208 208 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
209 209 el path exacto de un determinado file.
210 210
211 211 Example :
212 212 nombre correcto del file es .../.../D2009307/P2009307367.ext
213 213
214 214 Entonces la funcion prueba con las siguientes combinaciones
215 215 .../.../y2009307367.ext
216 216 .../.../Y2009307367.ext
217 217 .../.../x2009307/y2009307367.ext
218 218 .../.../x2009307/Y2009307367.ext
219 219 .../.../X2009307/y2009307367.ext
220 220 .../.../X2009307/Y2009307367.ext
221 221 siendo para este caso, la ultima combinacion de letras, identica al file buscado
222 222
223 223 Return:
224 224 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
225 225 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
226 226 para el filename
227 227 """
228 228 fullfilename = None
229 229 find_flag = False
230 230 filename = None
231 231
232 232 prefixDirList = [None,'d','D']
233 233 if ext.lower() == ".r": #voltage
234 234 prefixFileList = ['d','D']
235 235 elif ext.lower() == ".pdata": #spectra
236 236 prefixFileList = ['p','P']
237 237 else:
238 238 return None, filename
239 239
240 240 #barrido por las combinaciones posibles
241 241 for prefixDir in prefixDirList:
242 242 thispath = path
243 243 if prefixDir != None:
244 244 #formo el nombre del directorio xYYYYDDD (x=d o x=D)
245 245 if foldercounter == 0:
246 246 thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy ))
247 247 else:
248 248 thispath = os.path.join(path, "%s%04d%03d_%02d" % ( prefixDir, year, doy , foldercounter))
249 249 for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D"
250 250 filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
251 251 fullfilename = os.path.join( thispath, filename ) #formo el path completo
252 252
253 253 if os.path.exists( fullfilename ): #verifico que exista
254 254 find_flag = True
255 255 break
256 256 if find_flag:
257 257 break
258 258
259 259 if not(find_flag):
260 260 return None, filename
261 261
262 262 return fullfilename, filename
263 263
264 264 def isDoyFolder(folder):
265 265 try:
266 266 year = int(folder[1:5])
267 267 except:
268 268 return 0
269 269
270 270 try:
271 271 doy = int(folder[5:8])
272 272 except:
273 273 return 0
274 274
275 275 return 1
276 276
277 277 class JRODataIO:
278 278
279 279 c = 3E8
280 280
281 281 isConfig = False
282 282
283 283 basicHeaderObj = None
284 284
285 285 systemHeaderObj = None
286 286
287 287 radarControllerHeaderObj = None
288 288
289 289 processingHeaderObj = None
290 290
291 291 online = 0
292 292
293 293 dtype = None
294 294
295 295 pathList = []
296 296
297 297 filenameList = []
298 298
299 299 filename = None
300 300
301 301 ext = None
302 302
303 303 flagIsNewFile = 1
304 304
305 305 flagDiscontinuousBlock = 0
306 306
307 307 flagIsNewBlock = 0
308 308
309 309 fp = None
310 310
311 311 firstHeaderSize = 0
312 312
313 313 basicHeaderSize = 24
314 314
315 315 versionFile = 1103
316 316
317 317 fileSize = None
318 318
319 319 # ippSeconds = None
320 320
321 321 fileSizeByHeader = None
322 322
323 323 fileIndex = None
324 324
325 325 profileIndex = None
326 326
327 327 blockIndex = None
328 328
329 329 nTotalBlocks = None
330 330
331 331 maxTimeStep = 30
332 332
333 333 lastUTTime = None
334 334
335 335 datablock = None
336 336
337 337 dataOut = None
338 338
339 339 blocksize = None
340 340
341 341 getByBlock = False
342 342
343 343 def __init__(self):
344 344
345 345 raise ValueError, "Not implemented"
346 346
347 347 def run(self):
348 348
349 349 raise ValueError, "Not implemented"
350 350
351 351 class JRODataReader(JRODataIO):
352 352
353 353 nReadBlocks = 0
354 354
355 355 delay = 10 #number of seconds waiting a new file
356 356
357 357 nTries = 3 #quantity tries
358 358
359 359 nFiles = 3 #number of files for searching
360 360
361 361 path = None
362 362
363 363 foldercounter = 0
364 364
365 365 flagNoMoreFiles = 0
366 366
367 367 datetimeList = []
368 368
369 369 __isFirstTimeOnline = 1
370 370
371 371 __printInfo = True
372 372
373 373 profileIndex = None
374 374
375 375 nTxs = 1
376 376
377 377 txIndex = None
378 378
379 379 def __init__(self):
380 380
381 381 """
382 382
383 383 """
384 384
385 385 raise ValueError, "This method has not been implemented"
386 386
387 387
388 388 def createObjByDefault(self):
389 389 """
390 390
391 391 """
392 392 raise ValueError, "This method has not been implemented"
393 393
394 394 def getBlockDimension(self):
395 395
396 396 raise ValueError, "No implemented"
397 397
398 398 def __searchFilesOffLine(self,
399 399 path,
400 400 startDate,
401 401 endDate,
402 402 startTime=datetime.time(0,0,0),
403 403 endTime=datetime.time(23,59,59),
404 404 set=None,
405 405 expLabel='',
406 406 ext='.r',
407 407 walk=True):
408 408
409 409 pathList = []
410 410
411 411 if not walk:
412 412 #pathList.append(path)
413 413 multi_path = path.split(',')
414 414 for single_path in multi_path:
415 415 pathList.append(single_path)
416 416
417 417 else:
418 418 #dirList = []
419 419 multi_path = path.split(',')
420 420 for single_path in multi_path:
421 421 dirList = []
422 422 for thisPath in os.listdir(single_path):
423 423 if not os.path.isdir(os.path.join(single_path,thisPath)):
424 424 continue
425 425 if not isDoyFolder(thisPath):
426 426 continue
427 427
428 428 dirList.append(thisPath)
429 429
430 430 if not(dirList):
431 431 return None, None
432 432
433 433 thisDate = startDate
434 434
435 435 while(thisDate <= endDate):
436 436 year = thisDate.timetuple().tm_year
437 437 doy = thisDate.timetuple().tm_yday
438 438
439 439 matchlist = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy) + '*')
440 440 if len(matchlist) == 0:
441 441 thisDate += datetime.timedelta(1)
442 442 continue
443 443 for match in matchlist:
444 444 pathList.append(os.path.join(single_path,match,expLabel))
445 445
446 446 thisDate += datetime.timedelta(1)
447 447
448 448 if pathList == []:
449 449 print "Any folder was found for the date range: %s-%s" %(startDate, endDate)
450 450 return None, None
451 451
452 452 print "%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate)
453 453
454 454 filenameList = []
455 455 datetimeList = []
456 456 pathDict = {}
457 457 filenameList_to_sort = []
458 458
459 459 for i in range(len(pathList)):
460 460
461 461 thisPath = pathList[i]
462 462
463 463 fileList = glob.glob1(thisPath, "*%s" %ext)
464 464 if len(fileList) < 1:
465 465 continue
466 466 fileList.sort()
467 467 pathDict.setdefault(fileList[0])
468 468 pathDict[fileList[0]] = i
469 469 filenameList_to_sort.append(fileList[0])
470 470
471 471 filenameList_to_sort.sort()
472 472
473 473 for file in filenameList_to_sort:
474 474 thisPath = pathList[pathDict[file]]
475 475
476 476 fileList = glob.glob1(thisPath, "*%s" %ext)
477 477 fileList.sort()
478 478
479 479 for file in fileList:
480 480
481 481 filename = os.path.join(thisPath,file)
482 482 thisDatetime = isFileinThisTime(filename, startTime, endTime)
483 483
484 484 if not(thisDatetime):
485 485 continue
486 486
487 487 filenameList.append(filename)
488 488 datetimeList.append(thisDatetime)
489 489
490 490 if not(filenameList):
491 491 print "Any file was found for the time range %s - %s" %(startTime, endTime)
492 492 return None, None
493 493
494 494 print "%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime)
495 495 print
496 496
497 497 for i in range(len(filenameList)):
498 498 print "%s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
499 499
500 500 self.filenameList = filenameList
501 501 self.datetimeList = datetimeList
502 502
503 503 return pathList, filenameList
504 504
505 505 def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True, set=None):
506 506
507 507 """
508 508 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
509 509 devuelve el archivo encontrado ademas de otros datos.
510 510
511 511 Input:
512 512 path : carpeta donde estan contenidos los files que contiene data
513 513
514 514 expLabel : Nombre del subexperimento (subfolder)
515 515
516 516 ext : extension de los files
517 517
518 518 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
519 519
520 520 Return:
521 521 directory : eL directorio donde esta el file encontrado
522 522 filename : el ultimo file de una determinada carpeta
523 523 year : el anho
524 524 doy : el numero de dia del anho
525 525 set : el set del archivo
526 526
527 527
528 528 """
529 529 dirList = []
530 530
531 531 if not walk:
532 532 fullpath = path
533 533 foldercounter = 0
534 534 else:
535 535 #Filtra solo los directorios
536 536 for thisPath in os.listdir(path):
537 537 if not os.path.isdir(os.path.join(path,thisPath)):
538 538 continue
539 539 if not isDoyFolder(thisPath):
540 540 continue
541 541
542 542 dirList.append(thisPath)
543 543
544 544 if not(dirList):
545 545 return None, None, None, None, None, None
546 546
547 547 dirList = sorted( dirList, key=str.lower )
548 548
549 549 doypath = dirList[-1]
550 550 foldercounter = int(doypath.split('_')[1]) if len(doypath.split('_'))>1 else 0
551 551 fullpath = os.path.join(path, doypath, expLabel)
552 552
553 553
554 554 print "%s folder was found: " %(fullpath )
555 555
556 556 if set == None:
557 557 filename = getlastFileFromPath(fullpath, ext)
558 558 else:
559 559 filename = getFileFromSet(fullpath, ext, set)
560 560
561 561 if not(filename):
562 562 return None, None, None, None, None, None
563 563
564 564 print "%s file was found" %(filename)
565 565
566 566 if not(self.__verifyFile(os.path.join(fullpath, filename))):
567 567 return None, None, None, None, None, None
568 568
569 569 year = int( filename[1:5] )
570 570 doy = int( filename[5:8] )
571 571 set = int( filename[8:11] )
572 572
573 573 return fullpath, foldercounter, filename, year, doy, set
574 574
575 575 def __setNextFileOffline(self):
576 576
577 577 idFile = self.fileIndex
578 578
579 579 while (True):
580 580 idFile += 1
581 581 if not(idFile < len(self.filenameList)):
582 582 self.flagNoMoreFiles = 1
583 print "No more Files"
583 # print "[Reading] No more Files"
584 584 return 0
585 585
586 586 filename = self.filenameList[idFile]
587 587
588 588 if not(self.__verifyFile(filename)):
589 589 continue
590 590
591 591 fileSize = os.path.getsize(filename)
592 592 fp = open(filename,'rb')
593 593 break
594 594
595 595 self.flagIsNewFile = 1
596 596 self.fileIndex = idFile
597 597 self.filename = filename
598 598 self.fileSize = fileSize
599 599 self.fp = fp
600 600
601 print "[Reading] Setting the file: %s"%self.filename
601 # print "[Reading] Setting the file: %s"%self.filename
602 602
603 603 return 1
604 604
605 605 def __setNextFileOnline(self):
606 606 """
607 607 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
608 608 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
609 609 siguientes.
610 610
611 611 Affected:
612 612 self.flagIsNewFile
613 613 self.filename
614 614 self.fileSize
615 615 self.fp
616 616 self.set
617 617 self.flagNoMoreFiles
618 618
619 619 Return:
620 620 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
621 621 1 : si el file fue abierto con exito y esta listo a ser leido
622 622
623 623 Excepciones:
624 624 Si un determinado file no puede ser abierto
625 625 """
626 626 nFiles = 0
627 627 fileOk_flag = False
628 628 firstTime_flag = True
629 629
630 630 self.set += 1
631 631
632 632 if self.set > 999:
633 633 self.set = 0
634 634 self.foldercounter += 1
635 635
636 636 #busca el 1er file disponible
637 637 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
638 638 if fullfilename:
639 639 if self.__verifyFile(fullfilename, False):
640 640 fileOk_flag = True
641 641
642 642 #si no encuentra un file entonces espera y vuelve a buscar
643 643 if not(fileOk_flag):
644 644 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
645 645
646 646 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
647 647 tries = self.nTries
648 648 else:
649 649 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
650 650
651 651 for nTries in range( tries ):
652 652 if firstTime_flag:
653 print "\tWaiting %0.2f sec for the file \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
653 print "\t[Reading] Waiting %0.2f sec for the file \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
654 654 sleep( self.delay )
655 655 else:
656 print "\tSearching next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
656 print "\t[Reading] Searching the next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
657 657
658 658 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
659 659 if fullfilename:
660 660 if self.__verifyFile(fullfilename):
661 661 fileOk_flag = True
662 662 break
663 663
664 664 if fileOk_flag:
665 665 break
666 666
667 667 firstTime_flag = False
668 668
669 print "\tSkipping the file \"%s\" due to this file doesn't exist" % filename
669 print "\t[Reading] Skipping the file \"%s\" due to this file doesn't exist" % filename
670 670 self.set += 1
671 671
672 672 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
673 673 self.set = 0
674 674 self.doy += 1
675 675 self.foldercounter = 0
676 676
677 677 if fileOk_flag:
678 678 self.fileSize = os.path.getsize( fullfilename )
679 679 self.filename = fullfilename
680 680 self.flagIsNewFile = 1
681 681 if self.fp != None: self.fp.close()
682 682 self.fp = open(fullfilename, 'rb')
683 683 self.flagNoMoreFiles = 0
684 print '[Reading] Setting the file: %s' % fullfilename
684 # print '[Reading] Setting the file: %s' % fullfilename
685 685 else:
686 686 self.fileSize = 0
687 687 self.filename = None
688 688 self.flagIsNewFile = 0
689 689 self.fp = None
690 690 self.flagNoMoreFiles = 1
691 print '[Reading] No more files to read'
691 # print '[Reading] No more files to read'
692 692
693 693 return fileOk_flag
694 694
695 695 def setNextFile(self):
696 696 if self.fp != None:
697 697 self.fp.close()
698 698
699 699 if self.online:
700 700 newFile = self.__setNextFileOnline()
701 701 else:
702 702 newFile = self.__setNextFileOffline()
703 703
704 704 if not(newFile):
705 print '[Reading] No more files to read'
705 706 return 0
706 707
708 print '[Reading] Setting the file: %s' % self.filename
709
707 710 self.__readFirstHeader()
708 711 self.nReadBlocks = 0
709 712 return 1
710 713
711 714 def __waitNewBlock(self):
712 715 """
713 716 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
714 717
715 718 Si el modo de lectura es OffLine siempre retorn 0
716 719 """
717 720 if not self.online:
718 721 return 0
719 722
720 723 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
721 724 return 0
722 725
723 726 currentPointer = self.fp.tell()
724 727
725 728 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
726 729
727 730 for nTries in range( self.nTries ):
728 731
729 732 self.fp.close()
730 733 self.fp = open( self.filename, 'rb' )
731 734 self.fp.seek( currentPointer )
732 735
733 736 self.fileSize = os.path.getsize( self.filename )
734 737 currentSize = self.fileSize - currentPointer
735 738
736 739 if ( currentSize >= neededSize ):
737 740 self.basicHeaderObj.read(self.fp)
738 741 return 1
739 742
740 743 if self.fileSize == self.fileSizeByHeader:
741 744 # self.flagEoF = True
742 745 return 0
743 746
744 747 print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
745 748 sleep( self.delay )
746 749
747 750
748 751 return 0
749 752
750 753 def waitDataBlock(self,pointer_location):
751 754
752 755 currentPointer = pointer_location
753 756
754 757 neededSize = self.processingHeaderObj.blockSize #+ self.basicHeaderSize
755 758
756 759 for nTries in range( self.nTries ):
757 760 self.fp.close()
758 761 self.fp = open( self.filename, 'rb' )
759 762 self.fp.seek( currentPointer )
760 763
761 764 self.fileSize = os.path.getsize( self.filename )
762 765 currentSize = self.fileSize - currentPointer
763 766
764 767 if ( currentSize >= neededSize ):
765 768 return 1
766 769
767 770 print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
768 771 sleep( self.delay )
769 772
770 773 return 0
771 774
772 775 def __jumpToLastBlock(self):
773 776
774 777 if not(self.__isFirstTimeOnline):
775 778 return
776 779
777 780 csize = self.fileSize - self.fp.tell()
778 781 blocksize = self.processingHeaderObj.blockSize
779 782
780 783 #salta el primer bloque de datos
781 784 if csize > self.processingHeaderObj.blockSize:
782 785 self.fp.seek(self.fp.tell() + blocksize)
783 786 else:
784 787 return
785 788
786 789 csize = self.fileSize - self.fp.tell()
787 790 neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
788 791 while True:
789 792
790 793 if self.fp.tell()<self.fileSize:
791 794 self.fp.seek(self.fp.tell() + neededsize)
792 795 else:
793 796 self.fp.seek(self.fp.tell() - neededsize)
794 797 break
795 798
796 799 # csize = self.fileSize - self.fp.tell()
797 800 # neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
798 801 # factor = int(csize/neededsize)
799 802 # if factor > 0:
800 803 # self.fp.seek(self.fp.tell() + factor*neededsize)
801 804
802 805 self.flagIsNewFile = 0
803 806 self.__isFirstTimeOnline = 0
804 807
805 808 def __setNewBlock(self):
806 809
807 810 if self.fp == None:
808 811 return 0
809 812
810 813 if self.online:
811 814 self.__jumpToLastBlock()
812 815
813 816 if self.flagIsNewFile:
814 817 return 1
815 818
816 819 self.lastUTTime = self.basicHeaderObj.utc
817 820 currentSize = self.fileSize - self.fp.tell()
818 821 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
819 822
820 823 if (currentSize >= neededSize):
821 824 self.basicHeaderObj.read(self.fp)
822 825 return 1
823 826
824 827 if self.__waitNewBlock():
825 828 return 1
826 829
827 830 if not(self.setNextFile()):
828 831 return 0
829 832
830 833 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
831 834
832 835 self.flagDiscontinuousBlock = 0
833 836
834 837 if deltaTime > self.maxTimeStep:
835 838 self.flagDiscontinuousBlock = 1
836 839
837 840 return 1
838 841
839 842 def readNextBlock(self):
840 843 if not(self.__setNewBlock()):
841 844 return 0
842 845
843 846 if not(self.readBlock()):
844 847 return 0
845 848
846 849 return 1
847 850
848 851 def __readFirstHeader(self):
849 852
850 853 self.basicHeaderObj.read(self.fp)
851 854 self.systemHeaderObj.read(self.fp)
852 855 self.radarControllerHeaderObj.read(self.fp)
853 856 self.processingHeaderObj.read(self.fp)
854 857
855 858 self.firstHeaderSize = self.basicHeaderObj.size
856 859
857 860 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
858 861 if datatype == 0:
859 862 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
860 863 elif datatype == 1:
861 864 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
862 865 elif datatype == 2:
863 866 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
864 867 elif datatype == 3:
865 868 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
866 869 elif datatype == 4:
867 870 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
868 871 elif datatype == 5:
869 872 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
870 873 else:
871 874 raise ValueError, 'Data type was not defined'
872 875
873 876 self.dtype = datatype_str
874 877 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
875 878 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
876 879 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
877 880 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
878 881 self.getBlockDimension()
879 882
880 883 def __verifyFile(self, filename, msgFlag=True):
881 884 msg = None
882 885 try:
883 886 fp = open(filename, 'rb')
884 887 currentPosition = fp.tell()
885 888 except IOError:
886 889 traceback.print_exc()
887 890 if msgFlag:
888 891 print "[Reading] The file %s can't be opened" % (filename)
889 892 return False
890 893
891 894 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
892 895
893 896 if neededSize == 0:
894 897 basicHeaderObj = BasicHeader(LOCALTIME)
895 898 systemHeaderObj = SystemHeader()
896 899 radarControllerHeaderObj = RadarControllerHeader()
897 900 processingHeaderObj = ProcessingHeader()
898 901
899 902 try:
900 903 if not( basicHeaderObj.read(fp) ): raise IOError
901 904 if not( systemHeaderObj.read(fp) ): raise IOError
902 905 if not( radarControllerHeaderObj.read(fp) ): raise IOError
903 906 if not( processingHeaderObj.read(fp) ): raise IOError
904 907 # data_type = int(numpy.log2((processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
905 908
906 909 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
907 910
908 911 except IOError:
909 912 traceback.print_exc()
910 913 if msgFlag:
911 914 print "[Reading] The file %s is empty or it hasn't enough data" % filename
912 915
913 916 fp.close()
914 917 return False
915 918 else:
916 919 msg = "[Reading] Skipping the file %s due to it hasn't enough data" %filename
917 920
918 921 fp.close()
919 922 fileSize = os.path.getsize(filename)
920 923 currentSize = fileSize - currentPosition
921 924 if currentSize < neededSize:
922 925 if msgFlag and (msg != None):
923 926 print msg #print"\tSkipping the file %s due to it hasn't enough data" %filename
924 927 return False
925 928
926 929 return True
927 930
928 931 def setup(self,
929 932 path=None,
930 933 startDate=None,
931 934 endDate=None,
932 935 startTime=datetime.time(0,0,0),
933 936 endTime=datetime.time(23,59,59),
934 937 set=None,
935 938 expLabel = "",
936 939 ext = None,
937 940 online = False,
938 941 delay = 60,
939 942 walk = True,
940 943 getblock = False,
941 944 nTxs = 1):
942 945
943 946 if path == None:
944 947 raise ValueError, "[Reading] The path is not valid"
945 948
946 949 if ext == None:
947 950 ext = self.ext
948 951
949 952 if online:
950 953 print "[Reading] Searching files in online mode..."
951 954
952 955 for nTries in range( self.nTries ):
953 956 fullpath, foldercounter, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk, set=set)
954 957
955 958 if fullpath:
956 959 break
957 960
958 961 print '[Reading] Waiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
959 962 sleep( self.delay )
960 963
961 964 if not(fullpath):
962 965 print "[Reading] There 'isn't any valid file in %s" % path
963 966 return None
964 967
965 968 self.year = year
966 969 self.doy = doy
967 970 self.set = set - 1
968 971 self.path = path
969 972 self.foldercounter = foldercounter
970 973 last_set = None
971 974
972 975 else:
973 976 print "[Reading] Searching files in offline mode ..."
974 977 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
975 978 startTime=startTime, endTime=endTime,
976 979 set=set, expLabel=expLabel, ext=ext,
977 980 walk=walk)
978 981
979 982 if not(pathList):
980 983 print "[Reading] No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
981 984 datetime.datetime.combine(startDate,startTime).ctime(),
982 985 datetime.datetime.combine(endDate,endTime).ctime())
983 986
984 987 sys.exit(-1)
985 988
986 989
987 990 self.fileIndex = -1
988 991 self.pathList = pathList
989 992 self.filenameList = filenameList
990 993 file_name = os.path.basename(filenameList[-1])
991 994 basename, ext = os.path.splitext(file_name)
992 995 last_set = int(basename[-3:])
993 996
994 997 self.online = online
995 998 self.delay = delay
996 999 ext = ext.lower()
997 1000 self.ext = ext
998 1001 self.getByBlock = getblock
999 1002 self.nTxs = int(nTxs)
1000 1003
1001 1004 if not(self.setNextFile()):
1002 1005 if (startDate!=None) and (endDate!=None):
1003 1006 print "[Reading] No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
1004 1007 elif startDate != None:
1005 1008 print "[Reading] No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
1006 1009 else:
1007 1010 print "[Reading] No files"
1008 1011
1009 1012 sys.exit(-1)
1010 1013
1011 1014 # self.updateDataHeader()
1012 1015 if last_set != None:
1013 1016 self.dataOut.last_block = last_set * self.processingHeaderObj.dataBlocksPerFile + self.basicHeaderObj.dataBlock
1014 1017 return
1015 1018
1016 1019 def getBasicHeader(self):
1017 1020
1018 1021 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds
1019 1022
1020 1023 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
1021 1024
1022 1025 self.dataOut.timeZone = self.basicHeaderObj.timeZone
1023 1026
1024 1027 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
1025 1028
1026 1029 self.dataOut.errorCount = self.basicHeaderObj.errorCount
1027 1030
1028 1031 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
1029 1032
1030 1033 self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds/self.nTxs
1031 1034
1032 1035 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock*self.nTxs
1033 1036
1034 1037
1035 1038 def getFirstHeader(self):
1036 1039
1037 1040 raise ValueError, "This method has not been implemented"
1038 1041
1039 1042 def getData(self):
1040 1043
1041 1044 raise ValueError, "This method has not been implemented"
1042 1045
1043 1046 def hasNotDataInBuffer(self):
1044 1047
1045 1048 raise ValueError, "This method has not been implemented"
1046 1049
1047 1050 def readBlock(self):
1048 1051
1049 1052 raise ValueError, "This method has not been implemented"
1050 1053
1051 1054 def isEndProcess(self):
1052 1055
1053 1056 return self.flagNoMoreFiles
1054 1057
1055 1058 def printReadBlocks(self):
1056 1059
1057 1060 print "[Reading] Number of read blocks per file %04d" %self.nReadBlocks
1058 1061
1059 1062 def printTotalBlocks(self):
1060 1063
1061 1064 print "[Reading] Number of read blocks %04d" %self.nTotalBlocks
1062 1065
1063 1066 def printNumberOfBlock(self):
1064 1067
1065 1068 if self.flagIsNewBlock:
1066 1069 print "[Reading] Block No. %04d, Total blocks %04d -> %s" %(self.basicHeaderObj.dataBlock, self.nTotalBlocks, self.dataOut.datatime.ctime())
1067 1070 self.dataOut.blocknow = self.basicHeaderObj.dataBlock
1068 1071
1069 1072 def printInfo(self):
1070 1073
1071 1074 if self.__printInfo == False:
1072 1075 return
1073 1076
1074 1077 self.basicHeaderObj.printInfo()
1075 1078 self.systemHeaderObj.printInfo()
1076 1079 self.radarControllerHeaderObj.printInfo()
1077 1080 self.processingHeaderObj.printInfo()
1078 1081
1079 1082 self.__printInfo = False
1080 1083
1081 1084
1082 1085 def run(self, **kwargs):
1083 1086
1084 1087 if not(self.isConfig):
1085 1088
1086 1089 # self.dataOut = dataOut
1087 1090 self.setup(**kwargs)
1088 1091 self.isConfig = True
1089 1092
1090 1093 self.getData()
1091 1094
1092 1095 class JRODataWriter(JRODataIO):
1093 1096
1094 1097 """
1095 1098 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
1096 1099 de los datos siempre se realiza por bloques.
1097 1100 """
1098 1101
1099 1102 blockIndex = 0
1100 1103
1101 1104 path = None
1102 1105
1103 1106 setFile = None
1104 1107
1105 1108 profilesPerBlock = None
1106 1109
1107 1110 blocksPerFile = None
1108 1111
1109 1112 nWriteBlocks = 0
1110 1113
1111 1114 def __init__(self, dataOut=None):
1112 1115 raise ValueError, "Not implemented"
1113 1116
1114 1117
1115 1118 def hasAllDataInBuffer(self):
1116 1119 raise ValueError, "Not implemented"
1117 1120
1118 1121
1119 1122 def setBlockDimension(self):
1120 1123 raise ValueError, "Not implemented"
1121 1124
1122 1125
1123 1126 def writeBlock(self):
1124 1127 raise ValueError, "No implemented"
1125 1128
1126 1129
1127 1130 def putData(self):
1128 1131 raise ValueError, "No implemented"
1129 1132
1130 1133
1131 1134 def setBasicHeader(self):
1132 1135
1133 1136 self.basicHeaderObj.size = self.basicHeaderSize #bytes
1134 1137 self.basicHeaderObj.version = self.versionFile
1135 1138 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1136 1139
1137 1140 utc = numpy.floor(self.dataOut.utctime)
1138 1141 milisecond = (self.dataOut.utctime - utc)* 1000.0
1139 1142
1140 1143 self.basicHeaderObj.utc = utc
1141 1144 self.basicHeaderObj.miliSecond = milisecond
1142 1145 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1143 1146 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1144 1147 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1145 1148
1146 1149 def setFirstHeader(self):
1147 1150 """
1148 1151 Obtiene una copia del First Header
1149 1152
1150 1153 Affected:
1151 1154
1152 1155 self.basicHeaderObj
1153 1156 self.systemHeaderObj
1154 1157 self.radarControllerHeaderObj
1155 1158 self.processingHeaderObj self.
1156 1159
1157 1160 Return:
1158 1161 None
1159 1162 """
1160 1163
1161 1164 raise ValueError, "No implemented"
1162 1165
1163 1166 def __writeFirstHeader(self):
1164 1167 """
1165 1168 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1166 1169
1167 1170 Affected:
1168 1171 __dataType
1169 1172
1170 1173 Return:
1171 1174 None
1172 1175 """
1173 1176
1174 1177 # CALCULAR PARAMETROS
1175 1178
1176 1179 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1177 1180 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1178 1181
1179 1182 self.basicHeaderObj.write(self.fp)
1180 1183 self.systemHeaderObj.write(self.fp)
1181 1184 self.radarControllerHeaderObj.write(self.fp)
1182 1185 self.processingHeaderObj.write(self.fp)
1183 1186
1184 1187 self.dtype = self.dataOut.dtype
1185 1188
1186 1189 def __setNewBlock(self):
1187 1190 """
1188 1191 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1189 1192
1190 1193 Return:
1191 1194 0 : si no pudo escribir nada
1192 1195 1 : Si escribio el Basic el First Header
1193 1196 """
1194 1197 if self.fp == None:
1195 1198 self.setNextFile()
1196 1199
1197 1200 if self.flagIsNewFile:
1198 1201 return 1
1199 1202
1200 1203 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1201 1204 self.basicHeaderObj.write(self.fp)
1202 1205 return 1
1203 1206
1204 1207 if not( self.setNextFile() ):
1205 1208 return 0
1206 1209
1207 1210 return 1
1208 1211
1209 1212
1210 1213 def writeNextBlock(self):
1211 1214 """
1212 1215 Selecciona el bloque siguiente de datos y los escribe en un file
1213 1216
1214 1217 Return:
1215 1218 0 : Si no hizo pudo escribir el bloque de datos
1216 1219 1 : Si no pudo escribir el bloque de datos
1217 1220 """
1218 1221 if not( self.__setNewBlock() ):
1219 1222 return 0
1220 1223
1221 1224 self.writeBlock()
1222 1225
1223 1226 return 1
1224 1227
1225 1228 def setNextFile(self):
1226 1229 """
1227 1230 Determina el siguiente file que sera escrito
1228 1231
1229 1232 Affected:
1230 1233 self.filename
1231 1234 self.subfolder
1232 1235 self.fp
1233 1236 self.setFile
1234 1237 self.flagIsNewFile
1235 1238
1236 1239 Return:
1237 1240 0 : Si el archivo no puede ser escrito
1238 1241 1 : Si el archivo esta listo para ser escrito
1239 1242 """
1240 1243 ext = self.ext
1241 1244 path = self.path
1242 1245
1243 1246 if self.fp != None:
1244 1247 self.fp.close()
1245 1248
1246 1249 timeTuple = time.localtime( self.dataOut.utctime)
1247 1250 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1248 1251
1249 1252 fullpath = os.path.join( path, subfolder )
1250 1253 if not( os.path.exists(fullpath) ):
1251 1254 os.mkdir(fullpath)
1252 1255 self.setFile = -1 #inicializo mi contador de seteo
1253 1256 else:
1254 1257 filesList = os.listdir( fullpath )
1255 1258 if len( filesList ) > 0:
1256 1259 filesList = sorted( filesList, key=str.lower )
1257 1260 filen = filesList[-1]
1258 1261 # el filename debera tener el siguiente formato
1259 1262 # 0 1234 567 89A BCDE (hex)
1260 1263 # x YYYY DDD SSS .ext
1261 1264 if isNumber( filen[8:11] ):
1262 1265 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1263 1266 else:
1264 1267 self.setFile = -1
1265 1268 else:
1266 1269 self.setFile = -1 #inicializo mi contador de seteo
1267 1270
1268 1271 setFile = self.setFile
1269 1272 setFile += 1
1270 1273
1271 1274 filen = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
1272 1275 timeTuple.tm_year,
1273 1276 timeTuple.tm_yday,
1274 1277 setFile,
1275 1278 ext )
1276 1279
1277 1280 filename = os.path.join( path, subfolder, filen )
1278 1281
1279 1282 fp = open( filename,'wb' )
1280 1283
1281 1284 self.blockIndex = 0
1282 1285
1283 1286 #guardando atributos
1284 1287 self.filename = filename
1285 1288 self.subfolder = subfolder
1286 1289 self.fp = fp
1287 1290 self.setFile = setFile
1288 1291 self.flagIsNewFile = 1
1289 1292
1290 1293 self.setFirstHeader()
1291 1294
1292 1295 print '[Writing] file: %s'%self.filename
1293 1296
1294 1297 self.__writeFirstHeader()
1295 1298
1296 1299 return 1
1297 1300
1298 1301 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=0, ext=None):
1299 1302 """
1300 1303 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1301 1304
1302 1305 Inputs:
1303 1306 path : el path destino en el cual se escribiran los files a crear
1304 1307 format : formato en el cual sera salvado un file
1305 1308 set : el setebo del file
1306 1309
1307 1310 Return:
1308 1311 0 : Si no realizo un buen seteo
1309 1312 1 : Si realizo un buen seteo
1310 1313 """
1311 1314
1312 1315 if ext == None:
1313 1316 ext = self.ext
1314 1317
1315 1318 ext = ext.lower()
1316 1319
1317 1320 self.ext = ext
1318 1321
1319 1322 self.path = path
1320 1323
1321 1324 self.setFile = set - 1
1322 1325
1323 1326 self.blocksPerFile = blocksPerFile
1324 1327
1325 1328 self.profilesPerBlock = profilesPerBlock
1326 1329
1327 1330 self.dataOut = dataOut
1328 1331
1329 1332 if not(self.setNextFile()):
1330 1333 print "[Writing] There isn't a next file"
1331 1334 return 0
1332 1335
1333 1336 self.setBlockDimension()
1334 1337
1335 1338 return 1
1336 1339
1337 1340 def run(self, dataOut, **kwargs):
1338 1341
1339 1342 if not(self.isConfig):
1340 1343
1341 1344 self.setup(dataOut, **kwargs)
1342 1345 self.isConfig = True
1343 1346
1344 1347 self.putData()
1345 1348
@@ -1,764 +1,764
1 1 '''
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6 import numpy
7 7
8 8 from jroIO_base import LOCALTIME, JRODataReader, JRODataWriter
9 9 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
10 10 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
11 11 from schainpy.model.data.jrodata import Spectra
12 12
13 13 class SpectraReader(JRODataReader, ProcessingUnit):
14 14 """
15 15 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
16 16 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
17 17 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
18 18
19 19 paresCanalesIguales * alturas * perfiles (Self Spectra)
20 20 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
21 21 canales * alturas (DC Channels)
22 22
23 23 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
24 24 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
25 25 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
26 26 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
27 27
28 28 Example:
29 29 dpath = "/home/myuser/data"
30 30
31 31 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
32 32
33 33 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
34 34
35 35 readerObj = SpectraReader()
36 36
37 37 readerObj.setup(dpath, startTime, endTime)
38 38
39 39 while(True):
40 40
41 41 readerObj.getData()
42 42
43 43 print readerObj.data_spc
44 44
45 45 print readerObj.data_cspc
46 46
47 47 print readerObj.data_dc
48 48
49 49 if readerObj.flagNoMoreFiles:
50 50 break
51 51
52 52 """
53 53
54 54 pts2read_SelfSpectra = 0
55 55
56 56 pts2read_CrossSpectra = 0
57 57
58 58 pts2read_DCchannels = 0
59 59
60 60 ext = ".pdata"
61 61
62 62 optchar = "P"
63 63
64 64 dataOut = None
65 65
66 66 nRdChannels = None
67 67
68 68 nRdPairs = None
69 69
70 70 rdPairList = []
71 71
72 72 def __init__(self):
73 73 """
74 74 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
75 75
76 76 Inputs:
77 77 dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para
78 78 almacenar un perfil de datos cada vez que se haga un requerimiento
79 79 (getData). El perfil sera obtenido a partir del buffer de datos,
80 80 si el buffer esta vacio se hara un nuevo proceso de lectura de un
81 81 bloque de datos.
82 82 Si este parametro no es pasado se creara uno internamente.
83 83
84 84 Affected:
85 85 self.dataOut
86 86
87 87 Return : None
88 88 """
89 89
90 90 #Eliminar de la base la herencia
91 91 ProcessingUnit.__init__(self)
92 92
93 93 # self.isConfig = False
94 94
95 95 self.pts2read_SelfSpectra = 0
96 96
97 97 self.pts2read_CrossSpectra = 0
98 98
99 99 self.pts2read_DCchannels = 0
100 100
101 101 self.datablock = None
102 102
103 103 self.utc = None
104 104
105 105 self.ext = ".pdata"
106 106
107 107 self.optchar = "P"
108 108
109 109 self.basicHeaderObj = BasicHeader(LOCALTIME)
110 110
111 111 self.systemHeaderObj = SystemHeader()
112 112
113 113 self.radarControllerHeaderObj = RadarControllerHeader()
114 114
115 115 self.processingHeaderObj = ProcessingHeader()
116 116
117 117 self.online = 0
118 118
119 119 self.fp = None
120 120
121 121 self.idFile = None
122 122
123 123 self.dtype = None
124 124
125 125 self.fileSizeByHeader = None
126 126
127 127 self.filenameList = []
128 128
129 129 self.filename = None
130 130
131 131 self.fileSize = None
132 132
133 133 self.firstHeaderSize = 0
134 134
135 135 self.basicHeaderSize = 24
136 136
137 137 self.pathList = []
138 138
139 139 self.lastUTTime = 0
140 140
141 141 self.maxTimeStep = 30
142 142
143 143 self.flagNoMoreFiles = 0
144 144
145 145 self.set = 0
146 146
147 147 self.path = None
148 148
149 149 self.delay = 60 #seconds
150 150
151 151 self.nTries = 3 #quantity tries
152 152
153 153 self.nFiles = 3 #number of files for searching
154 154
155 155 self.nReadBlocks = 0
156 156
157 157 self.flagIsNewFile = 1
158 158
159 159 self.__isFirstTimeOnline = 1
160 160
161 161 # self.ippSeconds = 0
162 162
163 163 self.flagDiscontinuousBlock = 0
164 164
165 165 self.flagIsNewBlock = 0
166 166
167 167 self.nTotalBlocks = 0
168 168
169 169 self.blocksize = 0
170 170
171 171 self.dataOut = self.createObjByDefault()
172 172
173 173 self.profileIndex = 1 #Always
174 174
175 175
176 176 def createObjByDefault(self):
177 177
178 178 dataObj = Spectra()
179 179
180 180 return dataObj
181 181
182 182 def __hasNotDataInBuffer(self):
183 183 return 1
184 184
185 185
186 186 def getBlockDimension(self):
187 187 """
188 188 Obtiene la cantidad de puntos a leer por cada bloque de datos
189 189
190 190 Affected:
191 191 self.nRdChannels
192 192 self.nRdPairs
193 193 self.pts2read_SelfSpectra
194 194 self.pts2read_CrossSpectra
195 195 self.pts2read_DCchannels
196 196 self.blocksize
197 197 self.dataOut.nChannels
198 198 self.dataOut.nPairs
199 199
200 200 Return:
201 201 None
202 202 """
203 203 self.nRdChannels = 0
204 204 self.nRdPairs = 0
205 205 self.rdPairList = []
206 206
207 207 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
208 208 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
209 209 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
210 210 else:
211 211 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
212 212 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
213 213
214 214 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
215 215
216 216 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
217 217 self.blocksize = self.pts2read_SelfSpectra
218 218
219 219 if self.processingHeaderObj.flag_cspc:
220 220 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
221 221 self.blocksize += self.pts2read_CrossSpectra
222 222
223 223 if self.processingHeaderObj.flag_dc:
224 224 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
225 225 self.blocksize += self.pts2read_DCchannels
226 226
227 227 # self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels
228 228
229 229
230 230 def readBlock(self):
231 231 """
232 232 Lee el bloque de datos desde la posicion actual del puntero del archivo
233 233 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
234 234 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
235 235 es seteado a 0
236 236
237 237 Return: None
238 238
239 239 Variables afectadas:
240 240
241 241 self.flagIsNewFile
242 242 self.flagIsNewBlock
243 243 self.nTotalBlocks
244 244 self.data_spc
245 245 self.data_cspc
246 246 self.data_dc
247 247
248 248 Exceptions:
249 249 Si un bloque leido no es un bloque valido
250 250 """
251 251 blockOk_flag = False
252 252 fpointer = self.fp.tell()
253 253
254 254 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
255 255 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
256 256
257 257 if self.processingHeaderObj.flag_cspc:
258 258 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
259 259 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
260 260
261 261 if self.processingHeaderObj.flag_dc:
262 262 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
263 263 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
264 264
265 265
266 266 if not(self.processingHeaderObj.shif_fft):
267 267 #desplaza a la derecha en el eje 2 determinadas posiciones
268 268 shift = int(self.processingHeaderObj.profilesPerBlock/2)
269 269 spc = numpy.roll( spc, shift , axis=2 )
270 270
271 271 if self.processingHeaderObj.flag_cspc:
272 272 #desplaza a la derecha en el eje 2 determinadas posiciones
273 273 cspc = numpy.roll( cspc, shift, axis=2 )
274 274
275 275 # self.processingHeaderObj.shif_fft = True
276 276
277 277 spc = numpy.transpose( spc, (0,2,1) )
278 278 self.data_spc = spc
279 279
280 280 if self.processingHeaderObj.flag_cspc:
281 281 cspc = numpy.transpose( cspc, (0,2,1) )
282 282 self.data_cspc = cspc['real'] + cspc['imag']*1j
283 283 else:
284 284 self.data_cspc = None
285 285
286 286 if self.processingHeaderObj.flag_dc:
287 287 self.data_dc = dc['real'] + dc['imag']*1j
288 288 else:
289 289 self.data_dc = None
290 290
291 291 self.flagIsNewFile = 0
292 292 self.flagIsNewBlock = 1
293 293
294 294 self.nTotalBlocks += 1
295 295 self.nReadBlocks += 1
296 296
297 297 return 1
298 298
299 299 def getFirstHeader(self):
300 300
301 301 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
302 302
303 303 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
304 304
305 305 # self.dataOut.ippSeconds = self.ippSeconds
306 306
307 307 # self.dataOut.timeInterval = self.radarControllerHeaderObj.ippSeconds * self.processingHeaderObj.nCohInt * self.processingHeaderObj.nIncohInt * self.processingHeaderObj.profilesPerBlock
308 308
309 309 self.dataOut.dtype = self.dtype
310 310
311 311 # self.dataOut.nPairs = self.nPairs
312 312
313 313 self.dataOut.pairsList = self.rdPairList
314 314
315 315 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
316 316
317 317 self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
318 318
319 319 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
320 320
321 321 self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
322 322
323 323 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
324 324
325 325 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
326 326
327 327 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
328 328
329 329 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
330 330
331 331 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
332 332
333 333 self.dataOut.flagDeflipData = False #asumo q la data esta sin flip
334 334
335 335 if self.radarControllerHeaderObj.code != None:
336 336
337 337 # self.dataOut.nCode = self.radarControllerHeaderObj.nCode
338 338 #
339 339 # self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
340 340 #
341 341 # self.dataOut.code = self.radarControllerHeaderObj.code
342 342
343 343 self.dataOut.flagDecodeData = True
344 344
345 345 def getData(self):
346 346 """
347 347 First method to execute before "RUN" is called.
348 348
349 349 Copia el buffer de lectura a la clase "Spectra",
350 350 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
351 351 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
352 352
353 353 Return:
354 354 0 : Si no hay mas archivos disponibles
355 355 1 : Si hizo una buena copia del buffer
356 356
357 357 Affected:
358 358 self.dataOut
359 359
360 360 self.flagDiscontinuousBlock
361 361 self.flagIsNewBlock
362 362 """
363 363
364 364 if self.flagNoMoreFiles:
365 365 self.dataOut.flagNoData = True
366 366 print 'Process finished'
367 367 return 0
368 368
369 369 self.flagDiscontinuousBlock = 0
370 370 self.flagIsNewBlock = 0
371 371
372 372 if self.__hasNotDataInBuffer():
373 373
374 374 if not( self.readNextBlock() ):
375 375 self.dataOut.flagNoData = True
376 376 return 0
377 377
378 378 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
379 379
380 380 if self.data_dc == None:
381 381 self.dataOut.flagNoData = True
382 382 return 0
383 383
384 384 self.getBasicHeader()
385 385
386 386 self.getFirstHeader()
387 387
388 388 self.dataOut.data_spc = self.data_spc
389 389
390 390 self.dataOut.data_cspc = self.data_cspc
391 391
392 392 self.dataOut.data_dc = self.data_dc
393 393
394 394 self.dataOut.flagNoData = False
395 395
396 396 self.dataOut.realtime = self.online
397 397
398 398 return self.dataOut.data_spc
399 399
400 400 class SpectraWriter(JRODataWriter, Operation):
401 401
402 402 """
403 403 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
404 404 de los datos siempre se realiza por bloques.
405 405 """
406 406
407 407 ext = ".pdata"
408 408
409 409 optchar = "P"
410 410
411 411 shape_spc_Buffer = None
412 412
413 413 shape_cspc_Buffer = None
414 414
415 415 shape_dc_Buffer = None
416 416
417 417 data_spc = None
418 418
419 419 data_cspc = None
420 420
421 421 data_dc = None
422 422
423 423 # dataOut = None
424 424
425 425 def __init__(self):
426 426 """
427 427 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
428 428
429 429 Affected:
430 430 self.dataOut
431 431 self.basicHeaderObj
432 432 self.systemHeaderObj
433 433 self.radarControllerHeaderObj
434 434 self.processingHeaderObj
435 435
436 436 Return: None
437 437 """
438 438
439 439 Operation.__init__(self)
440 440
441 441 self.isConfig = False
442 442
443 443 self.nTotalBlocks = 0
444 444
445 445 self.data_spc = None
446 446
447 447 self.data_cspc = None
448 448
449 449 self.data_dc = None
450 450
451 451 self.fp = None
452 452
453 453 self.flagIsNewFile = 1
454 454
455 455 self.nTotalBlocks = 0
456 456
457 457 self.flagIsNewBlock = 0
458 458
459 459 self.setFile = None
460 460
461 461 self.dtype = None
462 462
463 463 self.path = None
464 464
465 465 self.noMoreFiles = 0
466 466
467 467 self.filename = None
468 468
469 469 self.basicHeaderObj = BasicHeader(LOCALTIME)
470 470
471 471 self.systemHeaderObj = SystemHeader()
472 472
473 473 self.radarControllerHeaderObj = RadarControllerHeader()
474 474
475 475 self.processingHeaderObj = ProcessingHeader()
476 476
477 477
478 478 def hasAllDataInBuffer(self):
479 479 return 1
480 480
481 481
482 482 def setBlockDimension(self):
483 483 """
484 484 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
485 485
486 486 Affected:
487 487 self.shape_spc_Buffer
488 488 self.shape_cspc_Buffer
489 489 self.shape_dc_Buffer
490 490
491 491 Return: None
492 492 """
493 493 self.shape_spc_Buffer = (self.dataOut.nChannels,
494 494 self.processingHeaderObj.nHeights,
495 495 self.processingHeaderObj.profilesPerBlock)
496 496
497 497 self.shape_cspc_Buffer = (self.dataOut.nPairs,
498 498 self.processingHeaderObj.nHeights,
499 499 self.processingHeaderObj.profilesPerBlock)
500 500
501 501 self.shape_dc_Buffer = (self.dataOut.nChannels,
502 502 self.processingHeaderObj.nHeights)
503 503
504 504
505 505 def writeBlock(self):
506 506 """
507 507 Escribe el buffer en el file designado
508 508
509 509 Affected:
510 510 self.data_spc
511 511 self.data_cspc
512 512 self.data_dc
513 513 self.flagIsNewFile
514 514 self.flagIsNewBlock
515 515 self.nTotalBlocks
516 516 self.nWriteBlocks
517 517
518 518 Return: None
519 519 """
520 520
521 521 spc = numpy.transpose( self.data_spc, (0,2,1) )
522 522 if not( self.processingHeaderObj.shif_fft ):
523 523 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
524 524 data = spc.reshape((-1))
525 525 data = data.astype(self.dtype[0])
526 526 data.tofile(self.fp)
527 527
528 528 if self.data_cspc != None:
529 529 data = numpy.zeros( self.shape_cspc_Buffer, self.dtype )
530 530 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
531 531 if not( self.processingHeaderObj.shif_fft ):
532 532 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
533 533 data['real'] = cspc.real
534 534 data['imag'] = cspc.imag
535 535 data = data.reshape((-1))
536 536 data.tofile(self.fp)
537 537
538 538 if self.data_dc != None:
539 539 data = numpy.zeros( self.shape_dc_Buffer, self.dtype )
540 540 dc = self.data_dc
541 541 data['real'] = dc.real
542 542 data['imag'] = dc.imag
543 543 data = data.reshape((-1))
544 544 data.tofile(self.fp)
545 545
546 546 self.data_spc.fill(0)
547 547
548 548 if self.data_dc != None:
549 549 self.data_dc.fill(0)
550 550
551 551 if self.data_cspc != None:
552 552 self.data_cspc.fill(0)
553 553
554 554 self.flagIsNewFile = 0
555 555 self.flagIsNewBlock = 1
556 556 self.nTotalBlocks += 1
557 557 self.nWriteBlocks += 1
558 558 self.blockIndex += 1
559 559
560 print "[Writing] Block = ", self.blockIndex
560 # print "[Writing] Block = %d04" %self.blockIndex
561 561
562 562 def putData(self):
563 563 """
564 564 Setea un bloque de datos y luego los escribe en un file
565 565
566 566 Affected:
567 567 self.data_spc
568 568 self.data_cspc
569 569 self.data_dc
570 570
571 571 Return:
572 572 0 : Si no hay data o no hay mas files que puedan escribirse
573 573 1 : Si se escribio la data de un bloque en un file
574 574 """
575 575
576 576 if self.dataOut.flagNoData:
577 577 return 0
578 578
579 579 self.flagIsNewBlock = 0
580 580
581 581 if self.dataOut.flagDiscontinuousBlock:
582 582 self.data_spc.fill(0)
583 583 self.data_cspc.fill(0)
584 584 self.data_dc.fill(0)
585 585 self.setNextFile()
586 586
587 587 if self.flagIsNewFile == 0:
588 588 self.setBasicHeader()
589 589
590 590 self.data_spc = self.dataOut.data_spc.copy()
591 591 if self.dataOut.data_cspc != None:
592 592 self.data_cspc = self.dataOut.data_cspc.copy()
593 593 self.data_dc = self.dataOut.data_dc.copy()
594 594
595 595 # #self.processingHeaderObj.dataBlocksPerFile)
596 596 if self.hasAllDataInBuffer():
597 597 # self.setFirstHeader()
598 598 self.writeNextBlock()
599 599
600 600 return 1
601 601
602 602
603 603 def __getProcessFlags(self):
604 604
605 605 processFlags = 0
606 606
607 607 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
608 608 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
609 609 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
610 610 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
611 611 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
612 612 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
613 613
614 614 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
615 615
616 616
617 617
618 618 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
619 619 PROCFLAG.DATATYPE_SHORT,
620 620 PROCFLAG.DATATYPE_LONG,
621 621 PROCFLAG.DATATYPE_INT64,
622 622 PROCFLAG.DATATYPE_FLOAT,
623 623 PROCFLAG.DATATYPE_DOUBLE]
624 624
625 625
626 626 for index in range(len(dtypeList)):
627 627 if self.dataOut.dtype == dtypeList[index]:
628 628 dtypeValue = datatypeValueList[index]
629 629 break
630 630
631 631 processFlags += dtypeValue
632 632
633 633 if self.dataOut.flagDecodeData:
634 634 processFlags += PROCFLAG.DECODE_DATA
635 635
636 636 if self.dataOut.flagDeflipData:
637 637 processFlags += PROCFLAG.DEFLIP_DATA
638 638
639 639 if self.dataOut.code != None:
640 640 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
641 641
642 642 if self.dataOut.nIncohInt > 1:
643 643 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
644 644
645 645 if self.dataOut.data_dc != None:
646 646 processFlags += PROCFLAG.SAVE_CHANNELS_DC
647 647
648 648 return processFlags
649 649
650 650
651 651 def __getBlockSize(self):
652 652 '''
653 653 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
654 654 '''
655 655
656 656 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
657 657 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
658 658 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
659 659 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
660 660 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
661 661 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
662 662
663 663 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
664 664 datatypeValueList = [1,2,4,8,4,8]
665 665 for index in range(len(dtypeList)):
666 666 if self.dataOut.dtype == dtypeList[index]:
667 667 datatypeValue = datatypeValueList[index]
668 668 break
669 669
670 670
671 671 pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints
672 672
673 673 pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write)
674 674 blocksize = (pts2write_SelfSpectra*datatypeValue)
675 675
676 676 if self.dataOut.data_cspc != None:
677 677 pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write)
678 678 blocksize += (pts2write_CrossSpectra*datatypeValue*2)
679 679
680 680 if self.dataOut.data_dc != None:
681 681 pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights)
682 682 blocksize += (pts2write_DCchannels*datatypeValue*2)
683 683
684 684 blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
685 685
686 686 return blocksize
687 687
688 688 def setFirstHeader(self):
689 689
690 690 """
691 691 Obtiene una copia del First Header
692 692
693 693 Affected:
694 694 self.systemHeaderObj
695 695 self.radarControllerHeaderObj
696 696 self.dtype
697 697
698 698 Return:
699 699 None
700 700 """
701 701
702 702 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
703 703 self.systemHeaderObj.nChannels = self.dataOut.nChannels
704 704 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
705 705 old_code_size = self.dataOut.radarControllerHeaderObj.code_size
706 706 new_code_size = int(numpy.ceil(self.dataOut.nBaud/32.))*self.dataOut.nCode*4
707 707 self.radarControllerHeaderObj.size = self.radarControllerHeaderObj.size - old_code_size + new_code_size
708 708
709 709 self.setBasicHeader()
710 710
711 711 processingHeaderSize = 40 # bytes
712 712 self.processingHeaderObj.dtype = 1 # Spectra
713 713 self.processingHeaderObj.blockSize = self.__getBlockSize()
714 714 self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints
715 715 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
716 716 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
717 717 self.processingHeaderObj.processFlags = self.__getProcessFlags()
718 718 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval
719 719 self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt
720 720 self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
721 721 self.processingHeaderObj.shif_fft = self.dataOut.flagShiftFFT
722 722
723 723 if self.processingHeaderObj.totalSpectra > 0:
724 724 channelList = []
725 725 for channel in range(self.dataOut.nChannels):
726 726 channelList.append(channel)
727 727 channelList.append(channel)
728 728
729 729 pairsList = []
730 730 if self.dataOut.nPairs > 0:
731 731 for pair in self.dataOut.pairsList:
732 732 pairsList.append(pair[0])
733 733 pairsList.append(pair[1])
734 734
735 735 spectraComb = channelList + pairsList
736 736 spectraComb = numpy.array(spectraComb,dtype="u1")
737 737 self.processingHeaderObj.spectraComb = spectraComb
738 738 sizeOfSpcComb = len(spectraComb)
739 739 processingHeaderSize += sizeOfSpcComb
740 740
741 741 # The processing header should not have information about code
742 742 # if self.dataOut.code != None:
743 743 # self.processingHeaderObj.code = self.dataOut.code
744 744 # self.processingHeaderObj.nCode = self.dataOut.nCode
745 745 # self.processingHeaderObj.nBaud = self.dataOut.nBaud
746 746 # nCodeSize = 4 # bytes
747 747 # nBaudSize = 4 # bytes
748 748 # codeSize = 4 # bytes
749 749 # sizeOfCode = int(nCodeSize + nBaudSize + codeSize * self.dataOut.nCode * self.dataOut.nBaud)
750 750 # processingHeaderSize += sizeOfCode
751 751
752 752 if self.processingHeaderObj.nWindows != 0:
753 753 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
754 754 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
755 755 self.processingHeaderObj.nHeights = self.dataOut.nHeights
756 756 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
757 757 sizeOfFirstHeight = 4
758 758 sizeOfdeltaHeight = 4
759 759 sizeOfnHeights = 4
760 760 sizeOfWindows = (sizeOfFirstHeight + sizeOfdeltaHeight + sizeOfnHeights)*self.processingHeaderObj.nWindows
761 761 processingHeaderSize += sizeOfWindows
762 762
763 763 self.processingHeaderObj.size = processingHeaderSize
764 764
General Comments 0
You need to be logged in to leave comments. Login now