##// END OF EJS Templates
formatting y raise cuando digitalrf recibe spectra
José Chávez -
r1120:4cb1c6729a0e
parent child
Show More
This diff has been collapsed as it changes many lines, (659 lines changed) Show them Hide them
@@ -1,876 +1,905
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JROHeaderIO.py 151 2012-10-31 19:00:51Z murco $
5 5 '''
6 6 import sys
7 7 import numpy
8 8 import copy
9 9 import datetime
10 10 import inspect
11 11
12 12 SPEED_OF_LIGHT = 299792458
13 13 SPEED_OF_LIGHT = 3e8
14 14
15 15 BASIC_STRUCTURE = numpy.dtype([
16 ('nSize','<u4'),
17 ('nVersion','<u2'),
18 ('nDataBlockId','<u4'),
19 ('nUtime','<u4'),
20 ('nMilsec','<u2'),
21 ('nTimezone','<i2'),
22 ('nDstflag','<i2'),
23 ('nErrorCount','<u4')
24 ])
16 ('nSize', '<u4'),
17 ('nVersion', '<u2'),
18 ('nDataBlockId', '<u4'),
19 ('nUtime', '<u4'),
20 ('nMilsec', '<u2'),
21 ('nTimezone', '<i2'),
22 ('nDstflag', '<i2'),
23 ('nErrorCount', '<u4')
24 ])
25 25
26 26 SYSTEM_STRUCTURE = numpy.dtype([
27 ('nSize','<u4'),
28 ('nNumSamples','<u4'),
29 ('nNumProfiles','<u4'),
30 ('nNumChannels','<u4'),
31 ('nADCResolution','<u4'),
32 ('nPCDIOBusWidth','<u4'),
33 ])
27 ('nSize', '<u4'),
28 ('nNumSamples', '<u4'),
29 ('nNumProfiles', '<u4'),
30 ('nNumChannels', '<u4'),
31 ('nADCResolution', '<u4'),
32 ('nPCDIOBusWidth', '<u4'),
33 ])
34 34
35 35 RADAR_STRUCTURE = numpy.dtype([
36 ('nSize','<u4'),
37 ('nExpType','<u4'),
38 ('nNTx','<u4'),
39 ('fIpp','<f4'),
40 ('fTxA','<f4'),
41 ('fTxB','<f4'),
42 ('nNumWindows','<u4'),
43 ('nNumTaus','<u4'),
44 ('nCodeType','<u4'),
45 ('nLine6Function','<u4'),
46 ('nLine5Function','<u4'),
47 ('fClock','<f4'),
48 ('nPrePulseBefore','<u4'),
49 ('nPrePulseAfter','<u4'),
50 ('sRangeIPP','<a20'),
51 ('sRangeTxA','<a20'),
52 ('sRangeTxB','<a20'),
53 ])
54
55 SAMPLING_STRUCTURE = numpy.dtype([('h0','<f4'),('dh','<f4'),('nsa','<u4')])
36 ('nSize', '<u4'),
37 ('nExpType', '<u4'),
38 ('nNTx', '<u4'),
39 ('fIpp', '<f4'),
40 ('fTxA', '<f4'),
41 ('fTxB', '<f4'),
42 ('nNumWindows', '<u4'),
43 ('nNumTaus', '<u4'),
44 ('nCodeType', '<u4'),
45 ('nLine6Function', '<u4'),
46 ('nLine5Function', '<u4'),
47 ('fClock', '<f4'),
48 ('nPrePulseBefore', '<u4'),
49 ('nPrePulseAfter', '<u4'),
50 ('sRangeIPP', '<a20'),
51 ('sRangeTxA', '<a20'),
52 ('sRangeTxB', '<a20'),
53 ])
54
55 SAMPLING_STRUCTURE = numpy.dtype(
56 [('h0', '<f4'), ('dh', '<f4'), ('nsa', '<u4')])
56 57
57 58
58 59 PROCESSING_STRUCTURE = numpy.dtype([
59 ('nSize','<u4'),
60 ('nDataType','<u4'),
61 ('nSizeOfDataBlock','<u4'),
62 ('nProfilesperBlock','<u4'),
63 ('nDataBlocksperFile','<u4'),
64 ('nNumWindows','<u4'),
65 ('nProcessFlags','<u4'),
66 ('nCoherentIntegrations','<u4'),
67 ('nIncoherentIntegrations','<u4'),
68 ('nTotalSpectra','<u4')
69 ])
60 ('nSize', '<u4'),
61 ('nDataType', '<u4'),
62 ('nSizeOfDataBlock', '<u4'),
63 ('nProfilesperBlock', '<u4'),
64 ('nDataBlocksperFile', '<u4'),
65 ('nNumWindows', '<u4'),
66 ('nProcessFlags', '<u4'),
67 ('nCoherentIntegrations', '<u4'),
68 ('nIncoherentIntegrations', '<u4'),
69 ('nTotalSpectra', '<u4')
70 ])
71
70 72
71 73 class Header(object):
72
74
73 75 def __init__(self):
74 76 raise NotImplementedError
75
77
76 78 def copy(self):
77 79 return copy.deepcopy(self)
78
80
79 81 def read(self):
80
82
81 83 raise NotImplementedError
82
84
83 85 def write(self):
84
86
85 87 raise NotImplementedError
86 88
87 89 def getAllowedArgs(self):
88 90 args = inspect.getargspec(self.__init__).args
89 91 try:
90 92 args.remove('self')
91 93 except:
92 94 pass
93 95 return args
94 96
95 97 def getAsDict(self):
96 98 args = self.getAllowedArgs()
97 99 asDict = {}
98 100 for x in args:
99 101 asDict[x] = self[x]
100 102 return asDict
101
103
102 104 def __getitem__(self, name):
103 105 return getattr(self, name)
104 106
105 107 def printInfo(self):
106
107 message = "#"*50 + "\n"
108
109 message = "#" * 50 + "\n"
108 110 message += self.__class__.__name__.upper() + "\n"
109 message += "#"*50 + "\n"
110
111 message += "#" * 50 + "\n"
112
111 113 keyList = self.__dict__.keys()
112 114 keyList.sort()
113
115
114 116 for key in keyList:
115 message += "%s = %s" %(key, self.__dict__[key]) + "\n"
116
117 message += "%s = %s" % (key, self.__dict__[key]) + "\n"
118
117 119 if "size" not in keyList:
118 120 attr = getattr(self, "size")
119
121
120 122 if attr:
121 message += "%s = %s" %("size", attr) + "\n"
122
123 message += "%s = %s" % ("size", attr) + "\n"
124
123 125 print message
124 126
127
125 128 class BasicHeader(Header):
126
129
127 130 size = None
128 131 version = None
129 132 dataBlock = None
130 133 utc = None
131 134 ltc = None
132 135 miliSecond = None
133 136 timeZone = None
134 137 dstFlag = None
135 138 errorCount = None
136 139 datatime = None
137 140 structure = BASIC_STRUCTURE
138 141 __LOCALTIME = None
139
142
140 143 def __init__(self, useLocalTime=True):
141
144
142 145 self.size = 24
143 146 self.version = 0
144 147 self.dataBlock = 0
145 148 self.utc = 0
146 149 self.miliSecond = 0
147 150 self.timeZone = 0
148 151 self.dstFlag = 0
149 152 self.errorCount = 0
150
153
151 154 self.useLocalTime = useLocalTime
152
155
153 156 def read(self, fp):
154
157
155 158 self.length = 0
156 159 try:
157 160 if hasattr(fp, 'read'):
158 header = numpy.fromfile(fp, BASIC_STRUCTURE,1)
161 header = numpy.fromfile(fp, BASIC_STRUCTURE, 1)
159 162 else:
160 header = numpy.fromstring(fp, BASIC_STRUCTURE,1)
163 header = numpy.fromstring(fp, BASIC_STRUCTURE, 1)
161 164 except Exception, e:
162 165 print "BasicHeader: "
163 166 print e
164 167 return 0
165 168
166 169 self.size = int(header['nSize'][0])
167 170 self.version = int(header['nVersion'][0])
168 171 self.dataBlock = int(header['nDataBlockId'][0])
169 172 self.utc = int(header['nUtime'][0])
170 173 self.miliSecond = int(header['nMilsec'][0])
171 174 self.timeZone = int(header['nTimezone'][0])
172 175 self.dstFlag = int(header['nDstflag'][0])
173 176 self.errorCount = int(header['nErrorCount'][0])
174
177
175 178 if self.size < 24:
176 179 return 0
177 180
178 181 self.length = header.nbytes
179 182 return 1
180
183
181 184 def write(self, fp):
182
183 headerTuple = (self.size,self.version,self.dataBlock,self.utc,self.miliSecond,self.timeZone,self.dstFlag,self.errorCount)
184 header = numpy.array(headerTuple, BASIC_STRUCTURE)
185
186 headerTuple = (self.size, self.version, self.dataBlock, self.utc,
187 self.miliSecond, self.timeZone, self.dstFlag, self.errorCount)
188 header = numpy.array(headerTuple, BASIC_STRUCTURE)
185 189 header.tofile(fp)
186
190
187 191 return 1
188
192
189 193 def get_ltc(self):
190
191 return self.utc - self.timeZone*60
192
194
195 return self.utc - self.timeZone * 60
196
193 197 def set_ltc(self, value):
194
195 self.utc = value + self.timeZone*60
196
198
199 self.utc = value + self.timeZone * 60
200
197 201 def get_datatime(self):
198
202
199 203 return datetime.datetime.utcfromtimestamp(self.ltc)
200
204
201 205 ltc = property(get_ltc, set_ltc)
202 206 datatime = property(get_datatime)
203 207
208
204 209 class SystemHeader(Header):
205
210
206 211 size = None
207 212 nSamples = None
208 213 nProfiles = None
209 214 nChannels = None
210 215 adcResolution = None
211 216 pciDioBusWidth = None
212 217 structure = SYSTEM_STRUCTURE
213 218
214 219 def __init__(self, nSamples=0, nProfiles=0, nChannels=0, adcResolution=14, pciDioBusWidth=0):
215
216 self.size = 24
220
221 self.size = 24
217 222 self.nSamples = nSamples
218 223 self.nProfiles = nProfiles
219 224 self.nChannels = nChannels
220 225 self.adcResolution = adcResolution
221 226 self.pciDioBusWidth = pciDioBusWidth
222 227
223 228 def read(self, fp):
224 229 self.length = 0
225 230 try:
226 231 startFp = fp.tell()
227 232 except Exception, e:
228 233 startFp = None
229 234 pass
230 235
231 236 try:
232 237 if hasattr(fp, 'read'):
233 header = numpy.fromfile(fp, SYSTEM_STRUCTURE,1)
238 header = numpy.fromfile(fp, SYSTEM_STRUCTURE, 1)
234 239 else:
235 header = numpy.fromstring(fp, SYSTEM_STRUCTURE,1)
240 header = numpy.fromstring(fp, SYSTEM_STRUCTURE, 1)
236 241 except Exception, e:
237 242 print "System Header: " + str(e)
238 243 return 0
239
244
240 245 self.size = header['nSize'][0]
241 246 self.nSamples = header['nNumSamples'][0]
242 247 self.nProfiles = header['nNumProfiles'][0]
243 248 self.nChannels = header['nNumChannels'][0]
244 249 self.adcResolution = header['nADCResolution'][0]
245 250 self.pciDioBusWidth = header['nPCDIOBusWidth'][0]
246
247
251
248 252 if startFp is not None:
249 253 endFp = self.size + startFp
250
254
251 255 if fp.tell() > endFp:
252 sys.stderr.write("Warning %s: Size value read from System Header is lower than it has to be\n" %fp.name)
256 sys.stderr.write(
257 "Warning %s: Size value read from System Header is lower than it has to be\n" % fp.name)
253 258 return 0
254
259
255 260 if fp.tell() < endFp:
256 sys.stderr.write("Warning %s: Size value read from System Header size is greater than it has to be\n" %fp.name)
261 sys.stderr.write(
262 "Warning %s: Size value read from System Header size is greater than it has to be\n" % fp.name)
257 263 return 0
258
264
259 265 self.length = header.nbytes
260 266 return 1
261
267
262 268 def write(self, fp):
263
264 headerTuple = (self.size,self.nSamples,self.nProfiles,self.nChannels,self.adcResolution,self.pciDioBusWidth)
265 header = numpy.array(headerTuple,SYSTEM_STRUCTURE)
269
270 headerTuple = (self.size, self.nSamples, self.nProfiles,
271 self.nChannels, self.adcResolution, self.pciDioBusWidth)
272 header = numpy.array(headerTuple, SYSTEM_STRUCTURE)
266 273 header.tofile(fp)
267
274
268 275 return 1
269 276
277
270 278 class RadarControllerHeader(Header):
271
279
272 280 expType = None
273 281 nTx = None
274 282 ipp = None
275 283 txA = None
276 284 txB = None
277 285 nWindows = None
278 286 numTaus = None
279 287 codeType = None
280 288 line6Function = None
281 289 line5Function = None
282 290 fClock = None
283 291 prePulseBefore = None
284 292 prePulseAfter = None
285 293 rangeIpp = None
286 294 rangeTxA = None
287 295 rangeTxB = None
288 296 structure = RADAR_STRUCTURE
289 297 __size = None
290
298
291 299 def __init__(self, expType=2, nTx=1,
292 300 ipp=None, txA=0, txB=0,
293 301 nWindows=None, nHeights=None, firstHeight=None, deltaHeight=None,
294 302 numTaus=0, line6Function=0, line5Function=0, fClock=None,
295 303 prePulseBefore=0, prePulseAfter=0,
296 304 codeType=0, nCode=0, nBaud=0, code=None,
297 305 flip1=0, flip2=0):
298
299 # self.size = 116
306
307 # self.size = 116
300 308 self.expType = expType
301 309 self.nTx = nTx
302 310 self.ipp = ipp
303 311 self.txA = txA
304 312 self.txB = txB
305 313 self.rangeIpp = ipp
306 314 self.rangeTxA = txA
307 315 self.rangeTxB = txB
308
316
309 317 self.nWindows = nWindows
310 318 self.numTaus = numTaus
311 319 self.codeType = codeType
312 320 self.line6Function = line6Function
313 321 self.line5Function = line5Function
314 322 self.fClock = fClock
315 323 self.prePulseBefore = prePulseBefore
316 324 self.prePulseAfter = prePulseAfter
317
325
318 326 self.nHeights = nHeights
319 327 self.firstHeight = firstHeight
320 328 self.deltaHeight = deltaHeight
321 329 self.samplesWin = nHeights
322
330
323 331 self.nCode = nCode
324 332 self.nBaud = nBaud
325 333 self.code = code
326 334 self.flip1 = flip1
327 335 self.flip2 = flip2
328
329 self.code_size = int(numpy.ceil(self.nBaud/32.))*self.nCode*4
336
337 self.code_size = int(numpy.ceil(self.nBaud / 32.)) * self.nCode * 4
330 338 # self.dynamic = numpy.array([],numpy.dtype('byte'))
331
339
332 340 if self.fClock is None and self.deltaHeight is not None:
333 self.fClock = 0.15/(deltaHeight*1e-6) #0.15Km / (height * 1u)
341 self.fClock = 0.15 / (deltaHeight * 1e-6) # 0.15Km / (height * 1u)
334 342
335 343 def read(self, fp):
336 344 self.length = 0
337 345 try:
338 346 startFp = fp.tell()
339 347 except Exception, e:
340 348 startFp = None
341 349 pass
342 350
343 351 try:
344 352 if hasattr(fp, 'read'):
345 header = numpy.fromfile(fp, RADAR_STRUCTURE,1)
353 header = numpy.fromfile(fp, RADAR_STRUCTURE, 1)
346 354 else:
347 header = numpy.fromstring(fp, RADAR_STRUCTURE,1)
355 header = numpy.fromstring(fp, RADAR_STRUCTURE, 1)
348 356 self.length += header.nbytes
349 357 except Exception, e:
350 358 print "RadarControllerHeader: " + str(e)
351 359 return 0
352
360
353 361 size = int(header['nSize'][0])
354 362 self.expType = int(header['nExpType'][0])
355 363 self.nTx = int(header['nNTx'][0])
356 364 self.ipp = float(header['fIpp'][0])
357 365 self.txA = float(header['fTxA'][0])
358 366 self.txB = float(header['fTxB'][0])
359 367 self.nWindows = int(header['nNumWindows'][0])
360 368 self.numTaus = int(header['nNumTaus'][0])
361 369 self.codeType = int(header['nCodeType'][0])
362 370 self.line6Function = int(header['nLine6Function'][0])
363 371 self.line5Function = int(header['nLine5Function'][0])
364 372 self.fClock = float(header['fClock'][0])
365 373 self.prePulseBefore = int(header['nPrePulseBefore'][0])
366 374 self.prePulseAfter = int(header['nPrePulseAfter'][0])
367 375 self.rangeIpp = header['sRangeIPP'][0]
368 376 self.rangeTxA = header['sRangeTxA'][0]
369 377 self.rangeTxB = header['sRangeTxB'][0]
370
378
371 379 try:
372 380 if hasattr(fp, 'read'):
373 samplingWindow = numpy.fromfile(fp, SAMPLING_STRUCTURE, self.nWindows)
381 samplingWindow = numpy.fromfile(
382 fp, SAMPLING_STRUCTURE, self.nWindows)
374 383 else:
375 samplingWindow = numpy.fromstring(fp[self.length:], SAMPLING_STRUCTURE, self.nWindows)
384 samplingWindow = numpy.fromstring(
385 fp[self.length:], SAMPLING_STRUCTURE, self.nWindows)
376 386 self.length += samplingWindow.nbytes
377 387 except Exception, e:
378 388 print "RadarControllerHeader: " + str(e)
379 389 return 0
380 390 self.nHeights = int(numpy.sum(samplingWindow['nsa']))
381 391 self.firstHeight = samplingWindow['h0']
382 392 self.deltaHeight = samplingWindow['dh']
383 393 self.samplesWin = samplingWindow['nsa']
384
385
386 394
387 395 try:
388 396 if hasattr(fp, 'read'):
389 397 self.Taus = numpy.fromfile(fp, '<f4', self.numTaus)
390 398 else:
391 self.Taus = numpy.fromstring(fp[self.length:], '<f4', self.numTaus)
399 self.Taus = numpy.fromstring(
400 fp[self.length:], '<f4', self.numTaus)
392 401 self.length += self.Taus.nbytes
393 402 except Exception, e:
394 403 print "RadarControllerHeader: " + str(e)
395 404 return 0
396
397
398
405
399 406 self.code_size = 0
400 407 if self.codeType != 0:
401
408
402 409 try:
403 410 if hasattr(fp, 'read'):
404 411 self.nCode = numpy.fromfile(fp, '<u4', 1)[0]
405 412 self.length += self.nCode.nbytes
406 413 self.nBaud = numpy.fromfile(fp, '<u4', 1)[0]
407 414 self.length += self.nBaud.nbytes
408 415 else:
409 self.nCode = numpy.fromstring(fp[self.length:], '<u4', 1)[0]
416 self.nCode = numpy.fromstring(
417 fp[self.length:], '<u4', 1)[0]
410 418 self.length += self.nCode.nbytes
411 self.nBaud = numpy.fromstring(fp[self.length:], '<u4', 1)[0]
419 self.nBaud = numpy.fromstring(
420 fp[self.length:], '<u4', 1)[0]
412 421 self.length += self.nBaud.nbytes
413 422 except Exception, e:
414 423 print "RadarControllerHeader: " + str(e)
415 return 0
416 code = numpy.empty([self.nCode,self.nBaud],dtype='i1')
417
424 return 0
425 code = numpy.empty([self.nCode, self.nBaud], dtype='i1')
426
418 427 for ic in range(self.nCode):
419 428 try:
420 429 if hasattr(fp, 'read'):
421 temp = numpy.fromfile(fp,'u4', int(numpy.ceil(self.nBaud/32.)))
430 temp = numpy.fromfile(fp, 'u4', int(
431 numpy.ceil(self.nBaud / 32.)))
422 432 else:
423 temp = numpy.fromstring(fp,'u4', int(numpy.ceil(self.nBaud/32.)))
433 temp = numpy.fromstring(
434 fp, 'u4', int(numpy.ceil(self.nBaud / 32.)))
424 435 self.length += temp.nbytes
425 436 except Exception, e:
426 437 print "RadarControllerHeader: " + str(e)
427 438 return 0
428 439
429 for ib in range(self.nBaud-1,-1,-1):
430 code[ic,ib] = temp[ib/32]%2
431 temp[ib/32] = temp[ib/32]/2
432
433 self.code = 2.0*code - 1.0
434 self.code_size = int(numpy.ceil(self.nBaud/32.))*self.nCode*4
435
440 for ib in range(self.nBaud - 1, -1, -1):
441 code[ic, ib] = temp[ib / 32] % 2
442 temp[ib / 32] = temp[ib / 32] / 2
443
444 self.code = 2.0 * code - 1.0
445 self.code_size = int(numpy.ceil(self.nBaud / 32.)) * self.nCode * 4
446
436 447 # if self.line5Function == RCfunction.FLIP:
437 448 # self.flip1 = numpy.fromfile(fp,'<u4',1)
438 #
449 #
439 450 # if self.line6Function == RCfunction.FLIP:
440 451 # self.flip2 = numpy.fromfile(fp,'<u4',1)
441 452 if startFp is not None:
442 453 endFp = size + startFp
443
454
444 455 if fp.tell() != endFp:
445 # fp.seek(endFp)
446 print "%s: Radar Controller Header size is not consistent: from data [%d] != from header field [%d]" %(fp.name, fp.tell()-startFp, size)
456 # fp.seek(endFp)
457 print "%s: Radar Controller Header size is not consistent: from data [%d] != from header field [%d]" % (fp.name, fp.tell() - startFp, size)
447 458 # return 0
448 459
449 460 if fp.tell() > endFp:
450 sys.stderr.write("Warning %s: Size value read from Radar Controller header is lower than it has to be\n" %fp.name)
461 sys.stderr.write(
462 "Warning %s: Size value read from Radar Controller header is lower than it has to be\n" % fp.name)
451 463 # return 0
452
464
453 465 if fp.tell() < endFp:
454 sys.stderr.write("Warning %s: Size value read from Radar Controller header is greater than it has to be\n" %fp.name)
455
456
466 sys.stderr.write(
467 "Warning %s: Size value read from Radar Controller header is greater than it has to be\n" % fp.name)
468
457 469 return 1
458
470
459 471 def write(self, fp):
460
472
461 473 headerTuple = (self.size,
462 474 self.expType,
463 475 self.nTx,
464 476 self.ipp,
465 477 self.txA,
466 478 self.txB,
467 479 self.nWindows,
468 480 self.numTaus,
469 481 self.codeType,
470 482 self.line6Function,
471 483 self.line5Function,
472 484 self.fClock,
473 485 self.prePulseBefore,
474 486 self.prePulseAfter,
475 487 self.rangeIpp,
476 488 self.rangeTxA,
477 489 self.rangeTxB)
478
479 header = numpy.array(headerTuple,RADAR_STRUCTURE)
490
491 header = numpy.array(headerTuple, RADAR_STRUCTURE)
480 492 header.tofile(fp)
481
482 sampleWindowTuple = (self.firstHeight,self.deltaHeight,self.samplesWin)
483 samplingWindow = numpy.array(sampleWindowTuple,SAMPLING_STRUCTURE)
493
494 sampleWindowTuple = (
495 self.firstHeight, self.deltaHeight, self.samplesWin)
496 samplingWindow = numpy.array(sampleWindowTuple, SAMPLING_STRUCTURE)
484 497 samplingWindow.tofile(fp)
485
498
486 499 if self.numTaus > 0:
487 500 self.Taus.tofile(fp)
488
489 if self.codeType !=0:
501
502 if self.codeType != 0:
490 503 nCode = numpy.array(self.nCode, '<u4')
491 504 nCode.tofile(fp)
492 505 nBaud = numpy.array(self.nBaud, '<u4')
493 506 nBaud.tofile(fp)
494 code1 = (self.code + 1.0)/2.
495
507 code1 = (self.code + 1.0) / 2.
508
496 509 for ic in range(self.nCode):
497 tempx = numpy.zeros(numpy.ceil(self.nBaud/32.))
510 tempx = numpy.zeros(numpy.ceil(self.nBaud / 32.))
498 511 start = 0
499 512 end = 32
500 513 for i in range(len(tempx)):
501 code_selected = code1[ic,start:end]
502 for j in range(len(code_selected)-1,-1,-1):
514 code_selected = code1[ic, start:end]
515 for j in range(len(code_selected) - 1, -1, -1):
503 516 if code_selected[j] == 1:
504 tempx[i] = tempx[i] + 2**(len(code_selected)-1-j)
517 tempx[i] = tempx[i] + \
518 2**(len(code_selected) - 1 - j)
505 519 start = start + 32
506 520 end = end + 32
507
521
508 522 tempx = tempx.astype('u4')
509 523 tempx.tofile(fp)
510
524
511 525 # if self.line5Function == RCfunction.FLIP:
512 526 # self.flip1.tofile(fp)
513 #
527 #
514 528 # if self.line6Function == RCfunction.FLIP:
515 529 # self.flip2.tofile(fp)
516
530
517 531 return 1
518 532
519 533 def get_ippSeconds(self):
520 534 '''
521 535 '''
522 536 ippSeconds = 2.0 * 1000 * self.ipp / SPEED_OF_LIGHT
523
537
524 538 return ippSeconds
525
539
526 540 def set_ippSeconds(self, ippSeconds):
527 541 '''
528 542 '''
529
530 self.ipp = ippSeconds * SPEED_OF_LIGHT / (2.0*1000)
531
543
544 self.ipp = ippSeconds * SPEED_OF_LIGHT / (2.0 * 1000)
545
532 546 return
533
547
534 548 def get_size(self):
535
536 self.__size = 116 + 12*self.nWindows + 4*self.numTaus
537
549
550 self.__size = 116 + 12 * self.nWindows + 4 * self.numTaus
551
538 552 if self.codeType != 0:
539 self.__size += 4 + 4 + 4*self.nCode*numpy.ceil(self.nBaud/32.)
540
553 self.__size += 4 + 4 + 4 * self.nCode * \
554 numpy.ceil(self.nBaud / 32.)
555
541 556 return self.__size
542
557
543 558 def set_size(self, value):
544
559
545 560 raise IOError, "size is a property and it cannot be set, just read"
546
561
547 562 return
548
563
549 564 ippSeconds = property(get_ippSeconds, set_ippSeconds)
550 565 size = property(get_size, set_size)
551 566
567
552 568 class ProcessingHeader(Header):
553
554 # size = None
569
570 # size = None
555 571 dtype = None
556 572 blockSize = None
557 573 profilesPerBlock = None
558 574 dataBlocksPerFile = None
559 575 nWindows = None
560 576 processFlags = None
561 577 nCohInt = None
562 578 nIncohInt = None
563 579 totalSpectra = None
564 580 structure = PROCESSING_STRUCTURE
565 581 flag_dc = None
566 582 flag_cspc = None
567
568 def __init__(self, dtype=0, blockSize=0, profilesPerBlock=0, dataBlocksPerFile=0, nWindows=0,processFlags=0, nCohInt=0,
569 nIncohInt=0, totalSpectra=0, nHeights=0, firstHeight=0, deltaHeight=0, samplesWin=0, spectraComb=0, nCode=0,
570 code=0, nBaud=None, shif_fft=False, flag_dc=False, flag_cspc=False, flag_decode=False, flag_deflip=False
571 ):
572
573 # self.size = 0
583
584 def __init__(self, dtype=0, blockSize=0, profilesPerBlock=0, dataBlocksPerFile=0, nWindows=0, processFlags=0, nCohInt=0,
585 nIncohInt=0, totalSpectra=0, nHeights=0, firstHeight=0, deltaHeight=0, samplesWin=0, spectraComb=0, nCode=0,
586 code=0, nBaud=None, shif_fft=False, flag_dc=False, flag_cspc=False, flag_decode=False, flag_deflip=False
587 ):
588
589 # self.size = 0
574 590 self.dtype = dtype
575 591 self.blockSize = blockSize
576 592 self.profilesPerBlock = 0
577 593 self.dataBlocksPerFile = 0
578 594 self.nWindows = 0
579 595 self.processFlags = 0
580 596 self.nCohInt = 0
581 597 self.nIncohInt = 0
582 598 self.totalSpectra = 0
583
599
584 600 self.nHeights = 0
585 601 self.firstHeight = 0
586 602 self.deltaHeight = 0
587 603 self.samplesWin = 0
588 604 self.spectraComb = 0
589 605 self.nCode = None
590 606 self.code = None
591 607 self.nBaud = None
592
608
593 609 self.shif_fft = False
594 610 self.flag_dc = False
595 611 self.flag_cspc = False
596 612 self.flag_decode = False
597 613 self.flag_deflip = False
598 614 self.length = 0
599 615
600 616 def read(self, fp):
601 617 self.length = 0
602 618 try:
603 619 startFp = fp.tell()
604 620 except Exception, e:
605 621 startFp = None
606 622 pass
607
623
608 624 try:
609 625 if hasattr(fp, 'read'):
610 626 header = numpy.fromfile(fp, PROCESSING_STRUCTURE, 1)
611 627 else:
612 628 header = numpy.fromstring(fp, PROCESSING_STRUCTURE, 1)
613 629 self.length += header.nbytes
614 630 except Exception, e:
615 631 print "ProcessingHeader: " + str(e)
616 632 return 0
617
633
618 634 size = int(header['nSize'][0])
619 635 self.dtype = int(header['nDataType'][0])
620 636 self.blockSize = int(header['nSizeOfDataBlock'][0])
621 637 self.profilesPerBlock = int(header['nProfilesperBlock'][0])
622 638 self.dataBlocksPerFile = int(header['nDataBlocksperFile'][0])
623 639 self.nWindows = int(header['nNumWindows'][0])
624 640 self.processFlags = header['nProcessFlags']
625 641 self.nCohInt = int(header['nCoherentIntegrations'][0])
626 642 self.nIncohInt = int(header['nIncoherentIntegrations'][0])
627 643 self.totalSpectra = int(header['nTotalSpectra'][0])
628
644
629 645 try:
630 646 if hasattr(fp, 'read'):
631 samplingWindow = numpy.fromfile(fp, SAMPLING_STRUCTURE, self.nWindows)
647 samplingWindow = numpy.fromfile(
648 fp, SAMPLING_STRUCTURE, self.nWindows)
632 649 else:
633 samplingWindow = numpy.fromstring(fp[self.length:], SAMPLING_STRUCTURE, self.nWindows)
650 samplingWindow = numpy.fromstring(
651 fp[self.length:], SAMPLING_STRUCTURE, self.nWindows)
634 652 self.length += samplingWindow.nbytes
635 653 except Exception, e:
636 654 print "ProcessingHeader: " + str(e)
637 655 return 0
638
656
639 657 self.nHeights = int(numpy.sum(samplingWindow['nsa']))
640 658 self.firstHeight = float(samplingWindow['h0'][0])
641 659 self.deltaHeight = float(samplingWindow['dh'][0])
642 660 self.samplesWin = samplingWindow['nsa'][0]
643
644 661
645 662 try:
646 663 if hasattr(fp, 'read'):
647 self.spectraComb = numpy.fromfile(fp, 'u1', 2*self.totalSpectra)
664 self.spectraComb = numpy.fromfile(
665 fp, 'u1', 2 * self.totalSpectra)
648 666 else:
649 self.spectraComb = numpy.fromstring(fp[self.length:], 'u1', 2*self.totalSpectra)
667 self.spectraComb = numpy.fromstring(
668 fp[self.length:], 'u1', 2 * self.totalSpectra)
650 669 self.length += self.spectraComb.nbytes
651 670 except Exception, e:
652 671 print "ProcessingHeader: " + str(e)
653 672 return 0
654
673
655 674 if ((self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE) == PROCFLAG.DEFINE_PROCESS_CODE):
656 self.nCode = int(numpy.fromfile(fp,'<u4',1))
657 self.nBaud = int(numpy.fromfile(fp,'<u4',1))
658 self.code = numpy.fromfile(fp,'<f4',self.nCode*self.nBaud).reshape(self.nCode,self.nBaud)
659
675 self.nCode = int(numpy.fromfile(fp, '<u4', 1))
676 self.nBaud = int(numpy.fromfile(fp, '<u4', 1))
677 self.code = numpy.fromfile(
678 fp, '<f4', self.nCode * self.nBaud).reshape(self.nCode, self.nBaud)
679
660 680 if ((self.processFlags & PROCFLAG.EXP_NAME_ESP) == PROCFLAG.EXP_NAME_ESP):
661 exp_name_len = int(numpy.fromfile(fp,'<u4',1))
662 exp_name = numpy.fromfile(fp,'u1',exp_name_len+1)
663
681 exp_name_len = int(numpy.fromfile(fp, '<u4', 1))
682 exp_name = numpy.fromfile(fp, 'u1', exp_name_len + 1)
683
664 684 if ((self.processFlags & PROCFLAG.SHIFT_FFT_DATA) == PROCFLAG.SHIFT_FFT_DATA):
665 685 self.shif_fft = True
666 686 else:
667 687 self.shif_fft = False
668
688
669 689 if ((self.processFlags & PROCFLAG.SAVE_CHANNELS_DC) == PROCFLAG.SAVE_CHANNELS_DC):
670 690 self.flag_dc = True
671 691 else:
672 692 self.flag_dc = False
673
693
674 694 if ((self.processFlags & PROCFLAG.DECODE_DATA) == PROCFLAG.DECODE_DATA):
675 695 self.flag_decode = True
676 696 else:
677 697 self.flag_decode = False
678
698
679 699 if ((self.processFlags & PROCFLAG.DEFLIP_DATA) == PROCFLAG.DEFLIP_DATA):
680 700 self.flag_deflip = True
681 701 else:
682 702 self.flag_deflip = False
683
703
684 704 nChannels = 0
685 705 nPairs = 0
686 706 pairList = []
687
688 for i in range( 0, self.totalSpectra*2, 2 ):
689 if self.spectraComb[i] == self.spectraComb[i+1]:
690 nChannels = nChannels + 1 #par de canales iguales
707
708 for i in range(0, self.totalSpectra * 2, 2):
709 if self.spectraComb[i] == self.spectraComb[i + 1]:
710 nChannels = nChannels + 1 # par de canales iguales
691 711 else:
692 nPairs = nPairs + 1 #par de canales diferentes
693 pairList.append( (self.spectraComb[i], self.spectraComb[i+1]) )
694
712 nPairs = nPairs + 1 # par de canales diferentes
713 pairList.append((self.spectraComb[i], self.spectraComb[i + 1]))
714
695 715 self.flag_cspc = False
696 716 if nPairs > 0:
697 717 self.flag_cspc = True
698
699
700
718
701 719 if startFp is not None:
702 720 endFp = size + startFp
703 721 if fp.tell() > endFp:
704 sys.stderr.write("Warning: Processing header size is lower than it has to be")
722 sys.stderr.write(
723 "Warning: Processing header size is lower than it has to be")
705 724 return 0
706
725
707 726 if fp.tell() < endFp:
708 sys.stderr.write("Warning: Processing header size is greater than it is considered")
709
727 sys.stderr.write(
728 "Warning: Processing header size is greater than it is considered")
729
710 730 return 1
711
731
712 732 def write(self, fp):
713 #Clear DEFINE_PROCESS_CODE
733 # Clear DEFINE_PROCESS_CODE
714 734 self.processFlags = self.processFlags & (~PROCFLAG.DEFINE_PROCESS_CODE)
715
735
716 736 headerTuple = (self.size,
717 737 self.dtype,
718 738 self.blockSize,
719 739 self.profilesPerBlock,
720 740 self.dataBlocksPerFile,
721 741 self.nWindows,
722 742 self.processFlags,
723 743 self.nCohInt,
724 744 self.nIncohInt,
725 745 self.totalSpectra)
726
727 header = numpy.array(headerTuple,PROCESSING_STRUCTURE)
746
747 header = numpy.array(headerTuple, PROCESSING_STRUCTURE)
728 748 header.tofile(fp)
729
749
730 750 if self.nWindows != 0:
731 sampleWindowTuple = (self.firstHeight,self.deltaHeight,self.samplesWin)
732 samplingWindow = numpy.array(sampleWindowTuple,SAMPLING_STRUCTURE)
751 sampleWindowTuple = (
752 self.firstHeight, self.deltaHeight, self.samplesWin)
753 samplingWindow = numpy.array(sampleWindowTuple, SAMPLING_STRUCTURE)
733 754 samplingWindow.tofile(fp)
734
755
735 756 if self.totalSpectra != 0:
736 # spectraComb = numpy.array([],numpy.dtype('u1'))
757 # spectraComb = numpy.array([],numpy.dtype('u1'))
737 758 spectraComb = self.spectraComb
738 759 spectraComb.tofile(fp)
739
760
740 761 # if self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE == PROCFLAG.DEFINE_PROCESS_CODE:
741 762 # nCode = numpy.array([self.nCode], numpy.dtype('u4')) #Probar con un dato que almacene codigo, hasta el momento no se hizo la prueba
742 763 # nCode.tofile(fp)
743 #
764 #
744 765 # nBaud = numpy.array([self.nBaud], numpy.dtype('u4'))
745 766 # nBaud.tofile(fp)
746 #
767 #
747 768 # code = self.code.reshape(self.nCode*self.nBaud)
748 769 # code = code.astype(numpy.dtype('<f4'))
749 770 # code.tofile(fp)
750
771
751 772 return 1
752 773
753 774 def get_size(self):
754
755 self.__size = 40 + 12*self.nWindows + 2*self.totalSpectra
756
775
776 self.__size = 40 + 12 * self.nWindows + 2 * self.totalSpectra
777
757 778 # if self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE == PROCFLAG.DEFINE_PROCESS_CODE:
758 779 # self.__size += 4 + 4 + 4*self.nCode*numpy.ceil(self.nBaud/32.)
759 780 # self.__size += 4 + 4 + 4 * self.nCode * self.nBaud
760
781
761 782 return self.__size
762
783
763 784 def set_size(self, value):
764
785
765 786 raise IOError, "size is a property and it cannot be set, just read"
766
787
767 788 return
768
789
769 790 size = property(get_size, set_size)
770
791
792
771 793 class RCfunction:
772 NONE=0
773 FLIP=1
774 CODE=2
775 SAMPLING=3
776 LIN6DIV256=4
777 SYNCHRO=5
794 NONE = 0
795 FLIP = 1
796 CODE = 2
797 SAMPLING = 3
798 LIN6DIV256 = 4
799 SYNCHRO = 5
800
778 801
779 802 class nCodeType:
780 NONE=0
781 USERDEFINE=1
782 BARKER2=2
783 BARKER3=3
784 BARKER4=4
785 BARKER5=5
786 BARKER7=6
787 BARKER11=7
788 BARKER13=8
789 AC128=9
790 COMPLEMENTARYCODE2=10
791 COMPLEMENTARYCODE4=11
792 COMPLEMENTARYCODE8=12
793 COMPLEMENTARYCODE16=13
794 COMPLEMENTARYCODE32=14
795 COMPLEMENTARYCODE64=15
796 COMPLEMENTARYCODE128=16
797 CODE_BINARY28=17
798
799 class PROCFLAG:
800
803 NONE = 0
804 USERDEFINE = 1
805 BARKER2 = 2
806 BARKER3 = 3
807 BARKER4 = 4
808 BARKER5 = 5
809 BARKER7 = 6
810 BARKER11 = 7
811 BARKER13 = 8
812 AC128 = 9
813 COMPLEMENTARYCODE2 = 10
814 COMPLEMENTARYCODE4 = 11
815 COMPLEMENTARYCODE8 = 12
816 COMPLEMENTARYCODE16 = 13
817 COMPLEMENTARYCODE32 = 14
818 COMPLEMENTARYCODE64 = 15
819 COMPLEMENTARYCODE128 = 16
820 CODE_BINARY28 = 17
821
822
823 class PROCFLAG:
824
801 825 COHERENT_INTEGRATION = numpy.uint32(0x00000001)
802 DECODE_DATA = numpy.uint32(0x00000002)
826 DECODE_DATA = numpy.uint32(0x00000002)
803 827 SPECTRA_CALC = numpy.uint32(0x00000004)
804 INCOHERENT_INTEGRATION = numpy.uint32(0x00000008)
828 INCOHERENT_INTEGRATION = numpy.uint32(0x00000008)
805 829 POST_COHERENT_INTEGRATION = numpy.uint32(0x00000010)
806 830 SHIFT_FFT_DATA = numpy.uint32(0x00000020)
807
831
808 832 DATATYPE_CHAR = numpy.uint32(0x00000040)
809 833 DATATYPE_SHORT = numpy.uint32(0x00000080)
810 834 DATATYPE_LONG = numpy.uint32(0x00000100)
811 835 DATATYPE_INT64 = numpy.uint32(0x00000200)
812 836 DATATYPE_FLOAT = numpy.uint32(0x00000400)
813 837 DATATYPE_DOUBLE = numpy.uint32(0x00000800)
814
815 DATAARRANGE_CONTIGUOUS_CH = numpy.uint32(0x00001000)
816 DATAARRANGE_CONTIGUOUS_H = numpy.uint32(0x00002000)
817 DATAARRANGE_CONTIGUOUS_P = numpy.uint32(0x00004000)
818
838
839 DATAARRANGE_CONTIGUOUS_CH = numpy.uint32(0x00001000)
840 DATAARRANGE_CONTIGUOUS_H = numpy.uint32(0x00002000)
841 DATAARRANGE_CONTIGUOUS_P = numpy.uint32(0x00004000)
842
819 843 SAVE_CHANNELS_DC = numpy.uint32(0x00008000)
820 DEFLIP_DATA = numpy.uint32(0x00010000)
821 DEFINE_PROCESS_CODE = numpy.uint32(0x00020000)
822
844 DEFLIP_DATA = numpy.uint32(0x00010000)
845 DEFINE_PROCESS_CODE = numpy.uint32(0x00020000)
846
823 847 ACQ_SYS_NATALIA = numpy.uint32(0x00040000)
824 848 ACQ_SYS_ECHOTEK = numpy.uint32(0x00080000)
825 849 ACQ_SYS_ADRXD = numpy.uint32(0x000C0000)
826 850 ACQ_SYS_JULIA = numpy.uint32(0x00100000)
827 851 ACQ_SYS_XXXXXX = numpy.uint32(0x00140000)
828
852
829 853 EXP_NAME_ESP = numpy.uint32(0x00200000)
830 854 CHANNEL_NAMES_ESP = numpy.uint32(0x00400000)
831
855
832 856 OPERATION_MASK = numpy.uint32(0x0000003F)
833 857 DATATYPE_MASK = numpy.uint32(0x00000FC0)
834 858 DATAARRANGE_MASK = numpy.uint32(0x00007000)
835 859 ACQ_SYS_MASK = numpy.uint32(0x001C0000)
836
837 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
838 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
839 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
840 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
841 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
842 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
860
861
862 dtype0 = numpy.dtype([('real', '<i1'), ('imag', '<i1')])
863 dtype1 = numpy.dtype([('real', '<i2'), ('imag', '<i2')])
864 dtype2 = numpy.dtype([('real', '<i4'), ('imag', '<i4')])
865 dtype3 = numpy.dtype([('real', '<i8'), ('imag', '<i8')])
866 dtype4 = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
867 dtype5 = numpy.dtype([('real', '<f8'), ('imag', '<f8')])
843 868
844 869 NUMPY_DTYPE_LIST = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
845 870
846 PROCFLAG_DTYPE_LIST = [PROCFLAG.DATATYPE_CHAR,
847 PROCFLAG.DATATYPE_SHORT,
848 PROCFLAG.DATATYPE_LONG,
849 PROCFLAG.DATATYPE_INT64,
850 PROCFLAG.DATATYPE_FLOAT,
871 PROCFLAG_DTYPE_LIST = [PROCFLAG.DATATYPE_CHAR,
872 PROCFLAG.DATATYPE_SHORT,
873 PROCFLAG.DATATYPE_LONG,
874 PROCFLAG.DATATYPE_INT64,
875 PROCFLAG.DATATYPE_FLOAT,
851 876 PROCFLAG.DATATYPE_DOUBLE]
852 877
853 878 DTYPE_WIDTH = [1, 2, 4, 8, 4, 8]
854 879
880
855 881 def get_dtype_index(numpy_dtype):
856
882
857 883 index = None
858
884
859 885 for i in range(len(NUMPY_DTYPE_LIST)):
860 886 if numpy_dtype == NUMPY_DTYPE_LIST[i]:
861 887 index = i
862 888 break
863
889
864 890 return index
865 891
892
866 893 def get_numpy_dtype(index):
867
894
868 895 return NUMPY_DTYPE_LIST[index]
869 896
897
870 898 def get_procflag_dtype(index):
871
899
872 900 return PROCFLAG_DTYPE_LIST[index]
873 901
902
874 903 def get_dtype_width(index):
875
876 return DTYPE_WIDTH[index] No newline at end of file
904
905 return DTYPE_WIDTH[index]
@@ -1,791 +1,800
1 1
2 2 '''
3 3 Created on Jul 3, 2014
4 4
5 5 @author: roj-idl71
6 6 '''
7 7 # SUBCHANNELS EN VEZ DE CHANNELS
8 8 # BENCHMARKS -> PROBLEMAS CON ARCHIVOS GRANDES -> INCONSTANTE EN EL TIEMPO
9 9 # ACTUALIZACION DE VERSION
10 10 # HEADERS
11 11 # MODULO DE ESCRITURA
12 12 # METADATA
13 13
14 14 import os
15 15 import datetime
16 16 import numpy
17 17 import timeit
18 18 from fractions import Fraction
19 19
20 20 try:
21 21 from gevent import sleep
22 22 except:
23 23 from time import sleep
24 24
25 25 from schainpy.model.data.jroheaderIO import RadarControllerHeader, SystemHeader
26 26 from schainpy.model.data.jrodata import Voltage
27 27 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
28 28 from time import time
29 29
30 30 import cPickle
31 31 try:
32 32 import digital_rf
33 33 except:
34 34 print 'You should install "digital_rf" module if you want to read Digital RF data'
35 35
36 36
37 37 class DigitalRFReader(ProcessingUnit):
38 38 '''
39 39 classdocs
40 40 '''
41 41
42 42 def __init__(self, **kwargs):
43 43 '''
44 44 Constructor
45 45 '''
46 46
47 47 ProcessingUnit.__init__(self, **kwargs)
48 48
49 49 self.dataOut = Voltage()
50 50 self.__printInfo = True
51 51 self.__flagDiscontinuousBlock = False
52 52 self.__bufferIndex = 9999999
53 53 self.__ippKm = None
54 54 self.__codeType = 0
55 55 self.__nCode = None
56 56 self.__nBaud = None
57 57 self.__code = None
58 58 self.dtype = None
59 59 self.oldAverage = None
60 60
61 61 def close(self):
62 62 print 'Average of writing to digital rf format is ', self.oldAverage * 1000
63 63 return
64 64
65 65 def __getCurrentSecond(self):
66 66
67 67 return self.__thisUnixSample / self.__sample_rate
68 68
69 69 thisSecond = property(__getCurrentSecond, "I'm the 'thisSecond' property.")
70 70
71 71 def __setFileHeader(self):
72 72 '''
73 73 In this method will be initialized every parameter of dataOut object (header, no data)
74 74 '''
75 75 ippSeconds = 1.0 * self.__nSamples / self.__sample_rate
76 76
77 77 nProfiles = 1.0 / ippSeconds # Number of profiles in one second
78 78
79 79 try:
80 80 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(
81 81 self.__radarControllerHeader)
82 82 except:
83 83 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(
84 84 txA=0,
85 85 txB=0,
86 86 nWindows=1,
87 87 nHeights=self.__nSamples,
88 88 firstHeight=self.__firstHeigth,
89 89 deltaHeight=self.__deltaHeigth,
90 90 codeType=self.__codeType,
91 91 nCode=self.__nCode, nBaud=self.__nBaud,
92 92 code=self.__code)
93 93
94 94 try:
95 95 self.dataOut.systemHeaderObj = SystemHeader(self.__systemHeader)
96 96 except:
97 97 self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples,
98 98 nProfiles=nProfiles,
99 99 nChannels=len(
100 100 self.__channelList),
101 101 adcResolution=14)
102 102 self.dataOut.type = "Voltage"
103 103
104 104 self.dataOut.data = None
105 105
106 106 self.dataOut.dtype = self.dtype
107 107
108 108 # self.dataOut.nChannels = 0
109 109
110 110 # self.dataOut.nHeights = 0
111 111
112 112 self.dataOut.nProfiles = int(nProfiles)
113 113
114 114 self.dataOut.heightList = self.__firstHeigth + \
115 115 numpy.arange(self.__nSamples, dtype=numpy.float) * \
116 116 self.__deltaHeigth
117 117
118 118 self.dataOut.channelList = range(self.__num_subchannels)
119 119
120 120 self.dataOut.blocksize = self.dataOut.getNChannels() * self.dataOut.getNHeights()
121 121
122 122 # self.dataOut.channelIndexList = None
123 123
124 124 self.dataOut.flagNoData = True
125 125
126 126 self.dataOut.flagDataAsBlock = False
127 127 # Set to TRUE if the data is discontinuous
128 128 self.dataOut.flagDiscontinuousBlock = False
129 129
130 130 self.dataOut.utctime = None
131 131
132 132 # timezone like jroheader, difference in minutes between UTC and localtime
133 133 self.dataOut.timeZone = self.__timezone / 60
134 134
135 135 self.dataOut.dstFlag = 0
136 136
137 137 self.dataOut.errorCount = 0
138 138
139 139 try:
140 140 self.dataOut.nCohInt = self.fixed_metadata_dict.get(
141 141 'nCohInt', self.nCohInt)
142 142
143 143 # asumo que la data esta decodificada
144 144 self.dataOut.flagDecodeData = self.fixed_metadata_dict.get(
145 145 'flagDecodeData', self.flagDecodeData)
146 146
147 147 # asumo que la data esta sin flip
148 148 self.dataOut.flagDeflipData = self.fixed_metadata_dict['flagDeflipData']
149 149
150 150 self.dataOut.flagShiftFFT = self.fixed_metadata_dict['flagShiftFFT']
151 151
152 152 self.dataOut.useLocalTime = self.fixed_metadata_dict['useLocalTime']
153 153 except:
154 154 pass
155 155
156 156 self.dataOut.ippSeconds = ippSeconds
157 157
158 158 # Time interval between profiles
159 159 # self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
160 160
161 161 self.dataOut.frequency = self.__frequency
162 162
163 163 self.dataOut.realtime = self.__online
164 164
165 165 def findDatafiles(self, path, startDate=None, endDate=None):
166 166
167 167 if not os.path.isdir(path):
168 168 return []
169 169
170 170 try:
171 171 digitalReadObj = digital_rf.DigitalRFReader(
172 172 path, load_all_metadata=True)
173 173 except:
174 174 digitalReadObj = digital_rf.DigitalRFReader(path)
175 175
176 176 channelNameList = digitalReadObj.get_channels()
177 177
178 178 if not channelNameList:
179 179 return []
180 180
181 181 metadata_dict = digitalReadObj.get_rf_file_metadata(channelNameList[0])
182 182
183 183 sample_rate = metadata_dict['sample_rate'][0]
184 184
185 185 this_metadata_file = digitalReadObj.get_metadata(channelNameList[0])
186 186
187 187 try:
188 188 timezone = this_metadata_file['timezone'].value
189 189 except:
190 190 timezone = 0
191 191
192 192 startUTCSecond, endUTCSecond = digitalReadObj.get_bounds(
193 193 channelNameList[0]) / sample_rate - timezone
194 194
195 195 startDatetime = datetime.datetime.utcfromtimestamp(startUTCSecond)
196 196 endDatatime = datetime.datetime.utcfromtimestamp(endUTCSecond)
197 197
198 198 if not startDate:
199 199 startDate = startDatetime.date()
200 200
201 201 if not endDate:
202 202 endDate = endDatatime.date()
203 203
204 204 dateList = []
205 205
206 206 thisDatetime = startDatetime
207 207
208 208 while(thisDatetime <= endDatatime):
209 209
210 210 thisDate = thisDatetime.date()
211 211
212 212 if thisDate < startDate:
213 213 continue
214 214
215 215 if thisDate > endDate:
216 216 break
217 217
218 218 dateList.append(thisDate)
219 219 thisDatetime += datetime.timedelta(1)
220 220
221 221 return dateList
222 222
223 223 def setup(self, path=None,
224 224 startDate=None,
225 225 endDate=None,
226 226 startTime=datetime.time(0, 0, 0),
227 227 endTime=datetime.time(23, 59, 59),
228 228 channelList=None,
229 229 nSamples=None,
230 230 online=False,
231 231 delay=60,
232 232 buffer_size=1024,
233 233 ippKm=None,
234 234 nCohInt=1,
235 235 nCode=1,
236 236 nBaud=1,
237 237 flagDecodeData=False,
238 238 code=numpy.ones((1, 1), dtype=numpy.int),
239 239 **kwargs):
240 240 '''
241 241 In this method we should set all initial parameters.
242 242
243 243 Inputs:
244 244 path
245 245 startDate
246 246 endDate
247 247 startTime
248 248 endTime
249 249 set
250 250 expLabel
251 251 ext
252 252 online
253 253 delay
254 254 '''
255 255 self.nCohInt = nCohInt
256 256 self.flagDecodeData = flagDecodeData
257 257 self.i = 0
258 258 if not os.path.isdir(path):
259 259 raise ValueError, "[Reading] Directory %s does not exist" % path
260 260
261 261 try:
262 262 self.digitalReadObj = digital_rf.DigitalRFReader(
263 263 path, load_all_metadata=True)
264 264 except:
265 265 self.digitalReadObj = digital_rf.DigitalRFReader(path)
266 266
267 267 channelNameList = self.digitalReadObj.get_channels()
268 268
269 269 if not channelNameList:
270 270 raise ValueError, "[Reading] Directory %s does not have any files" % path
271 271
272 272 if not channelList:
273 273 channelList = range(len(channelNameList))
274 274
275 275 ########## Reading metadata ######################
276 276
277 277 top_properties = self.digitalReadObj.get_properties(
278 278 channelNameList[channelList[0]])
279 279
280 280 self.__num_subchannels = top_properties['num_subchannels']
281 281 self.__sample_rate = 1.0 * \
282 282 top_properties['sample_rate_numerator'] / \
283 283 top_properties['sample_rate_denominator']
284 284 # self.__samples_per_file = top_properties['samples_per_file'][0]
285 285 self.__deltaHeigth = 1e6 * 0.15 / self.__sample_rate # why 0.15?
286 286
287 287 this_metadata_file = self.digitalReadObj.get_digital_metadata(
288 288 channelNameList[channelList[0]])
289 289 metadata_bounds = this_metadata_file.get_bounds()
290 290 self.fixed_metadata_dict = this_metadata_file.read(
291 291 metadata_bounds[0])[metadata_bounds[0]] # GET FIRST HEADER
292 292
293 293 try:
294 294 self.__processingHeader = self.fixed_metadata_dict['processingHeader']
295 295 self.__radarControllerHeader = self.fixed_metadata_dict['radarControllerHeader']
296 296 self.__systemHeader = self.fixed_metadata_dict['systemHeader']
297 297 self.dtype = cPickle.loads(self.fixed_metadata_dict['dtype'])
298 298 except:
299 299 pass
300 300
301 301 self.__frequency = None
302 302
303 303 self.__frequency = self.fixed_metadata_dict.get('frequency', 1)
304 304
305 305 self.__timezone = self.fixed_metadata_dict.get('timezone', 300)
306 306
307 307 try:
308 308 nSamples = self.fixed_metadata_dict['nSamples']
309 309 except:
310 310 nSamples = None
311 311
312 312 self.__firstHeigth = 0
313 313
314 314 try:
315 315 codeType = self.__radarControllerHeader['codeType']
316 316 except:
317 317 codeType = 0
318 318
319 319 try:
320 320 if codeType:
321 321 nCode = self.__radarControllerHeader['nCode']
322 322 nBaud = self.__radarControllerHeader['nBaud']
323 323 code = self.__radarControllerHeader['code']
324 324 except:
325 325 pass
326 326
327 327 if not ippKm:
328 328 try:
329 329 # seconds to km
330 330 ippKm = self.__radarControllerHeader['ipp']
331 331 except:
332 332 ippKm = None
333 333 ####################################################
334 334 self.__ippKm = ippKm
335 335 startUTCSecond = None
336 336 endUTCSecond = None
337 337
338 338 if startDate:
339 339 startDatetime = datetime.datetime.combine(startDate, startTime)
340 340 startUTCSecond = (
341 341 startDatetime - datetime.datetime(1970, 1, 1)).total_seconds() + self.__timezone
342 342
343 343 if endDate:
344 344 endDatetime = datetime.datetime.combine(endDate, endTime)
345 345 endUTCSecond = (endDatetime - datetime.datetime(1970,
346 346 1, 1)).total_seconds() + self.__timezone
347 347
348 348 start_index, end_index = self.digitalReadObj.get_bounds(
349 349 channelNameList[channelList[0]])
350 350
351 351 if not startUTCSecond:
352 352 startUTCSecond = start_index / self.__sample_rate
353 353
354 354 if start_index > startUTCSecond * self.__sample_rate:
355 355 startUTCSecond = start_index / self.__sample_rate
356 356
357 357 if not endUTCSecond:
358 358 endUTCSecond = end_index / self.__sample_rate
359 359
360 360 if end_index < endUTCSecond * self.__sample_rate:
361 361 endUTCSecond = end_index / self.__sample_rate
362 362 if not nSamples:
363 363 if not ippKm:
364 364 raise ValueError, "[Reading] nSamples or ippKm should be defined"
365 365 nSamples = int(ippKm / (1e6 * 0.15 / self.__sample_rate))
366 366 channelBoundList = []
367 367 channelNameListFiltered = []
368 368
369 369 for thisIndexChannel in channelList:
370 370 thisChannelName = channelNameList[thisIndexChannel]
371 371 start_index, end_index = self.digitalReadObj.get_bounds(
372 372 thisChannelName)
373 373 channelBoundList.append((start_index, end_index))
374 374 channelNameListFiltered.append(thisChannelName)
375 375
376 376 self.profileIndex = 0
377 377 self.i = 0
378 378 self.__delay = delay
379 379
380 380 self.__codeType = codeType
381 381 self.__nCode = nCode
382 382 self.__nBaud = nBaud
383 383 self.__code = code
384 384
385 385 self.__datapath = path
386 386 self.__online = online
387 387 self.__channelList = channelList
388 388 self.__channelNameList = channelNameListFiltered
389 389 self.__channelBoundList = channelBoundList
390 390 self.__nSamples = nSamples
391 391 self.__samples_to_read = long(nSamples) # FIJO: AHORA 40
392 392 self.__nChannels = len(self.__channelList)
393 393
394 394 self.__startUTCSecond = startUTCSecond
395 395 self.__endUTCSecond = endUTCSecond
396 396
397 397 self.__timeInterval = 1.0 * self.__samples_to_read / \
398 398 self.__sample_rate # Time interval
399 399
400 400 if online:
401 401 # self.__thisUnixSample = int(endUTCSecond*self.__sample_rate - 4*self.__samples_to_read)
402 402 startUTCSecond = numpy.floor(endUTCSecond)
403 403
404 404 # por que en el otro metodo lo primero q se hace es sumar samplestoread
405 405 self.__thisUnixSample = long(
406 406 startUTCSecond * self.__sample_rate) - self.__samples_to_read
407 407
408 408 self.__data_buffer = numpy.zeros(
409 409 (self.__num_subchannels, self.__samples_to_read), dtype=numpy.complex)
410 410
411 411 self.__setFileHeader()
412 412 self.isConfig = True
413 413
414 414 print "[Reading] Digital RF Data was found from %s to %s " % (
415 415 datetime.datetime.utcfromtimestamp(
416 416 self.__startUTCSecond - self.__timezone),
417 417 datetime.datetime.utcfromtimestamp(
418 418 self.__endUTCSecond - self.__timezone)
419 419 )
420 420
421 421 print "[Reading] Starting process from %s to %s" % (datetime.datetime.utcfromtimestamp(startUTCSecond - self.__timezone),
422 422 datetime.datetime.utcfromtimestamp(
423 423 endUTCSecond - self.__timezone)
424 424 )
425 425 self.oldAverage = None
426 426 self.count = 0
427 427 self.executionTime = 0
428 428
429 429 def __reload(self):
430 430 # print
431 431 # print "%s not in range [%s, %s]" %(
432 432 # datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
433 433 # datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
434 434 # datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
435 435 # )
436 436 print "[Reading] reloading metadata ..."
437 437
438 438 try:
439 439 self.digitalReadObj.reload(complete_update=True)
440 440 except:
441 441 self.digitalReadObj.reload()
442 442
443 443 start_index, end_index = self.digitalReadObj.get_bounds(
444 444 self.__channelNameList[self.__channelList[0]])
445 445
446 446 if start_index > self.__startUTCSecond * self.__sample_rate:
447 447 self.__startUTCSecond = 1.0 * start_index / self.__sample_rate
448 448
449 449 if end_index > self.__endUTCSecond * self.__sample_rate:
450 450 self.__endUTCSecond = 1.0 * end_index / self.__sample_rate
451 451 print
452 452 print "[Reading] New timerange found [%s, %s] " % (
453 453 datetime.datetime.utcfromtimestamp(
454 454 self.__startUTCSecond - self.__timezone),
455 455 datetime.datetime.utcfromtimestamp(
456 456 self.__endUTCSecond - self.__timezone)
457 457 )
458 458
459 459 return True
460 460
461 461 return False
462 462
463 463 def timeit(self, toExecute):
464 464 t0 = time()
465 465 toExecute()
466 466 self.executionTime = time() - t0
467 467 if self.oldAverage is None:
468 468 self.oldAverage = self.executionTime
469 469 self.oldAverage = (self.executionTime + self.count *
470 470 self.oldAverage) / (self.count + 1.0)
471 471 self.count = self.count + 1.0
472 472 return
473 473
474 474 def __readNextBlock(self, seconds=30, volt_scale=1):
475 475 '''
476 476 '''
477 477
478 478 # Set the next data
479 479 self.__flagDiscontinuousBlock = False
480 480 self.__thisUnixSample += self.__samples_to_read
481 481
482 482 if self.__thisUnixSample + 2 * self.__samples_to_read > self.__endUTCSecond * self.__sample_rate:
483 483 print "[Reading] There are no more data into selected time-range"
484 484 if self.__online:
485 485 self.__reload()
486 486 else:
487 487 return False
488 488
489 489 if self.__thisUnixSample + 2 * self.__samples_to_read > self.__endUTCSecond * self.__sample_rate:
490 490 return False
491 491 self.__thisUnixSample -= self.__samples_to_read
492 492
493 493 indexChannel = 0
494 494
495 495 dataOk = False
496 496 for thisChannelName in self.__channelNameList: # TODO VARIOS CHANNELS?
497 497 for indexSubchannel in range(self.__num_subchannels):
498 498 try:
499 499 t0 = time()
500 500 result = self.digitalReadObj.read_vector_c81d(self.__thisUnixSample,
501 501 self.__samples_to_read,
502 502 thisChannelName, sub_channel=indexSubchannel)
503 503 self.executionTime = time() - t0
504 504 if self.oldAverage is None:
505 505 self.oldAverage = self.executionTime
506 506 self.oldAverage = (
507 507 self.executionTime + self.count * self.oldAverage) / (self.count + 1.0)
508 508 self.count = self.count + 1.0
509 509
510 510 except IOError, e:
511 511 # read next profile
512 512 self.__flagDiscontinuousBlock = True
513 513 print "[Reading] %s" % datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), e
514 514 break
515 515
516 516 if result.shape[0] != self.__samples_to_read:
517 517 self.__flagDiscontinuousBlock = True
518 518 print "[Reading] %s: Too few samples were found, just %d/%d samples" % (datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
519 519 result.shape[0],
520 520 self.__samples_to_read)
521 521 break
522 522
523 523 self.__data_buffer[indexSubchannel, :] = result * volt_scale
524 524
525 525 indexChannel += 1
526 526
527 527 dataOk = True
528 528
529 529 self.__utctime = self.__thisUnixSample / self.__sample_rate
530 530
531 531 if not dataOk:
532 532 return False
533 533
534 534 print "[Reading] %s: %d samples <> %f sec" % (datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
535 535 self.__samples_to_read,
536 536 self.__timeInterval)
537 537
538 538 self.__bufferIndex = 0
539 539
540 540 return True
541 541
542 542 def __isBufferEmpty(self):
543 543 return self.__bufferIndex > self.__samples_to_read - self.__nSamples # 40960 - 40
544 544
545 545 def getData(self, seconds=30, nTries=5):
546 546 '''
547 547 This method gets the data from files and put the data into the dataOut object
548 548
549 549 In addition, increase el the buffer counter in one.
550 550
551 551 Return:
552 552 data : retorna un perfil de voltages (alturas * canales) copiados desde el
553 553 buffer. Si no hay mas archivos a leer retorna None.
554 554
555 555 Affected:
556 556 self.dataOut
557 557 self.profileIndex
558 558 self.flagDiscontinuousBlock
559 559 self.flagIsNewBlock
560 560 '''
561 561
562 562 err_counter = 0
563 563 self.dataOut.flagNoData = True
564 564
565 565 if self.__isBufferEmpty():
566 566 self.__flagDiscontinuousBlock = False
567 567
568 568 while True:
569 569 if self.__readNextBlock():
570 570 break
571 571 if self.__thisUnixSample > self.__endUTCSecond * self.__sample_rate:
572 572 return False
573 573
574 574 if self.__flagDiscontinuousBlock:
575 575 print '[Reading] discontinuous block found ... continue with the next block'
576 576 continue
577 577
578 578 if not self.__online:
579 579 return False
580 580
581 581 err_counter += 1
582 582 if err_counter > nTries:
583 583 return False
584 584
585 585 print '[Reading] waiting %d seconds to read a new block' % seconds
586 586 sleep(seconds)
587 587
588 588 self.dataOut.data = self.__data_buffer[:,
589 589 self.__bufferIndex:self.__bufferIndex + self.__nSamples]
590 590 self.dataOut.utctime = (
591 591 self.__thisUnixSample + self.__bufferIndex) / self.__sample_rate
592 592 self.dataOut.flagNoData = False
593 593 self.dataOut.flagDiscontinuousBlock = self.__flagDiscontinuousBlock
594 594 self.dataOut.profileIndex = self.profileIndex
595 595
596 596 self.__bufferIndex += self.__nSamples
597 597 self.profileIndex += 1
598 598
599 599 if self.profileIndex == self.dataOut.nProfiles:
600 600 self.profileIndex = 0
601 601
602 602 return True
603 603
604 604 def printInfo(self):
605 605 '''
606 606 '''
607 607 if self.__printInfo == False:
608 608 return
609 609
610 610 # self.systemHeaderObj.printInfo()
611 611 # self.radarControllerHeaderObj.printInfo()
612 612
613 613 self.__printInfo = False
614 614
615 615 def printNumberOfBlock(self):
616 616 '''
617 617 '''
618 618 return
619 619 # print self.profileIndex
620 620
621 621 def run(self, **kwargs):
622 622 '''
623 623 This method will be called many times so here you should put all your code
624 624 '''
625 625
626 626 if not self.isConfig:
627 627 self.setup(**kwargs)
628 628 #self.i = self.i+1
629 629 self.getData(seconds=self.__delay)
630 630
631 631 return
632 632
633 633
634 634 class DigitalRFWriter(Operation):
635 635 '''
636 636 classdocs
637 637 '''
638 638
639 639 def __init__(self, **kwargs):
640 640 '''
641 641 Constructor
642 642 '''
643 643 Operation.__init__(self, **kwargs)
644 644 self.metadata_dict = {}
645 645 self.dataOut = None
646 646 self.dtype = None
647 647 self.oldAverage = 0
648 648
649 649 def setHeader(self):
650 650
651 651 self.metadata_dict['frequency'] = self.dataOut.frequency
652 652 self.metadata_dict['timezone'] = self.dataOut.timeZone
653 653 self.metadata_dict['dtype'] = cPickle.dumps(self.dataOut.dtype)
654 654 self.metadata_dict['nProfiles'] = self.dataOut.nProfiles
655 655 self.metadata_dict['heightList'] = self.dataOut.heightList
656 656 self.metadata_dict['channelList'] = self.dataOut.channelList
657 657 self.metadata_dict['flagDecodeData'] = self.dataOut.flagDecodeData
658 658 self.metadata_dict['flagDeflipData'] = self.dataOut.flagDeflipData
659 659 self.metadata_dict['flagShiftFFT'] = self.dataOut.flagShiftFFT
660 self.metadata_dict['flagDataAsBlock'] = self.dataOut.flagDataAsBlock
661 660 self.metadata_dict['useLocalTime'] = self.dataOut.useLocalTime
662 661 self.metadata_dict['nCohInt'] = self.dataOut.nCohInt
663
664 return
662 self.metadata_dict['type'] = self.dataOut.type
663 self.metadata_dict['flagDataAsBlock'] = getattr(
664 self.dataOut, 'flagDataAsBlock', None) # chequear
665 665
666 666 def setup(self, dataOut, path, frequency, fileCadence, dirCadence, metadataCadence, set=0, metadataFile='metadata', ext='.h5'):
667 667 '''
668 668 In this method we should set all initial parameters.
669 669 Input:
670 670 dataOut: Input data will also be outputa data
671 671 '''
672 672 self.setHeader()
673 673 self.__ippSeconds = dataOut.ippSeconds
674 674 self.__deltaH = dataOut.getDeltaH()
675 675 self.__sample_rate = 1e6 * 0.15 / self.__deltaH
676 676 self.__dtype = dataOut.dtype
677 677 if len(dataOut.dtype) == 2:
678 678 self.__dtype = dataOut.dtype[0]
679 679 self.__nSamples = dataOut.systemHeaderObj.nSamples
680 680 self.__nProfiles = dataOut.nProfiles
681 self.__blocks_per_file = dataOut.processingHeaderObj.dataBlocksPerFile
682 681
683 self.arr_data = arr_data = numpy.ones((self.__nSamples, len(
684 self.dataOut.channelList)), dtype=[('r', self.__dtype), ('i', self.__dtype)])
682 if self.dataOut.type != 'Voltage':
683 raise 'Digital RF cannot be used with this data type'
684 self.arr_data = numpy.ones((1, dataOut.nFFTPoints * len(
685 self.dataOut.channelList)), dtype=[('r', self.__dtype), ('i', self.__dtype)])
686 else:
687 self.arr_data = numpy.ones((self.__nSamples, len(
688 self.dataOut.channelList)), dtype=[('r', self.__dtype), ('i', self.__dtype)])
685 689
686 690 file_cadence_millisecs = 1000
687 691
688 692 sample_rate_fraction = Fraction(self.__sample_rate).limit_denominator()
689 693 sample_rate_numerator = long(sample_rate_fraction.numerator)
690 694 sample_rate_denominator = long(sample_rate_fraction.denominator)
691 695 start_global_index = dataOut.utctime * self.__sample_rate
692 696
693 697 uuid = 'prueba'
694 698 compression_level = 0
695 699 checksum = False
696 700 is_complex = True
697 701 num_subchannels = len(dataOut.channelList)
698 702 is_continuous = True
699 703 marching_periods = False
700 704
701 705 self.digitalWriteObj = digital_rf.DigitalRFWriter(path, self.__dtype, dirCadence,
702 706 fileCadence, start_global_index,
703 707 sample_rate_numerator, sample_rate_denominator, uuid, compression_level, checksum,
704 708 is_complex, num_subchannels, is_continuous, marching_periods)
705
706 709 metadata_dir = os.path.join(path, 'metadata')
707 710 os.system('mkdir %s' % (metadata_dir))
708
709 711 self.digitalMetadataWriteObj = digital_rf.DigitalMetadataWriter(metadata_dir, dirCadence, 1, # 236, file_cadence_millisecs / 1000
710 712 sample_rate_numerator, sample_rate_denominator,
711 713 metadataFile)
712
713 714 self.isConfig = True
714 715 self.currentSample = 0
715 716 self.oldAverage = 0
716 717 self.count = 0
717 718 return
718 719
719 720 def writeMetadata(self):
720 print '[Writing] - Writing metadata'
721 721 start_idx = self.__sample_rate * self.dataOut.utctime
722 722
723 723 self.metadata_dict['processingHeader'] = self.dataOut.processingHeaderObj.getAsDict(
724 724 )
725 725 self.metadata_dict['radarControllerHeader'] = self.dataOut.radarControllerHeaderObj.getAsDict(
726 726 )
727 727 self.metadata_dict['systemHeader'] = self.dataOut.systemHeaderObj.getAsDict(
728 728 )
729 729 self.digitalMetadataWriteObj.write(start_idx, self.metadata_dict)
730 730 return
731 731
732 732 def timeit(self, toExecute):
733 733 t0 = time()
734 734 toExecute()
735 735 self.executionTime = time() - t0
736 736 if self.oldAverage is None:
737 737 self.oldAverage = self.executionTime
738 738 self.oldAverage = (self.executionTime + self.count *
739 739 self.oldAverage) / (self.count + 1.0)
740 740 self.count = self.count + 1.0
741 741 return
742 742
743 743 def writeData(self):
744 for i in range(self.dataOut.systemHeaderObj.nSamples):
744 if self.dataOut.type != 'Voltage':
745 raise 'Digital RF cannot be used with this data type'
745 746 for channel in self.dataOut.channelList:
746 self.arr_data[i][channel]['r'] = self.dataOut.data[channel][i].real
747 self.arr_data[i][channel]['i'] = self.dataOut.data[channel][i].imag
747 for i in range(self.dataOut.nFFTPoints):
748 self.arr_data[1][channel * self.dataOut.nFFTPoints +
749 i]['r'] = self.dataOut.data[channel][i].real
750 self.arr_data[1][channel * self.dataOut.nFFTPoints +
751 i]['i'] = self.dataOut.data[channel][i].imag
752 else:
753 for i in range(self.dataOut.systemHeaderObj.nSamples):
754 for channel in self.dataOut.channelList:
755 self.arr_data[i][channel]['r'] = self.dataOut.data[channel][i].real
756 self.arr_data[i][channel]['i'] = self.dataOut.data[channel][i].imag
748 757
749 758 def f(): return self.digitalWriteObj.rf_write(self.arr_data)
750 759 self.timeit(f)
751 760
752 761 return
753 762
754 763 def run(self, dataOut, frequency=49.92e6, path=None, fileCadence=1000, dirCadence=36000, metadataCadence=1, **kwargs):
755 764 '''
756 765 This method will be called many times so here you should put all your code
757 766 Inputs:
758 767 dataOut: object with the data
759 768 '''
760 769 # print dataOut.__dict__
761 770 self.dataOut = dataOut
762 771 if not self.isConfig:
763 772 self.setup(dataOut, path, frequency, fileCadence,
764 773 dirCadence, metadataCadence, **kwargs)
765 774 self.writeMetadata()
766 775
767 776 self.writeData()
768 777
769 778 ## self.currentSample += 1
770 779 # if self.dataOut.flagDataAsBlock or self.currentSample == 1:
771 780 # self.writeMetadata()
772 781 ## if self.currentSample == self.__nProfiles: self.currentSample = 0
773 782
774 783 def close(self):
775 784 print '[Writing] - Closing files '
776 785 print 'Average of writing to digital rf format is ', self.oldAverage * 1000
777 786 try:
778 787 self.digitalWriteObj.close()
779 788 except:
780 789 pass
781 790
782 791
783 792 # raise
784 793 if __name__ == '__main__':
785 794
786 795 readObj = DigitalRFReader()
787 796
788 797 while True:
789 798 readObj.run(path='/home/jchavez/jicamarca/mocked_data/')
790 799 # readObj.printInfo()
791 800 # readObj.printNumberOfBlock()
@@ -1,901 +1,943
1 1 import itertools
2 2
3 3 import numpy
4 4
5 5 from jroproc_base import ProcessingUnit, Operation
6 6 from schainpy.model.data.jrodata import Spectra
7 7 from schainpy.model.data.jrodata import hildebrand_sekhon
8 8
9
9 10 class SpectraProc(ProcessingUnit):
10 11
11 12 def __init__(self, **kwargs):
12 13
13 14 ProcessingUnit.__init__(self, **kwargs)
14 15
15 16 self.buffer = None
16 17 self.firstdatatime = None
17 18 self.profIndex = 0
18 19 self.dataOut = Spectra()
19 20 self.id_min = None
20 21 self.id_max = None
21 22
22 23 def __updateSpecFromVoltage(self):
23 24
24 25 self.dataOut.timeZone = self.dataIn.timeZone
25 26 self.dataOut.dstFlag = self.dataIn.dstFlag
26 27 self.dataOut.errorCount = self.dataIn.errorCount
27 28 self.dataOut.useLocalTime = self.dataIn.useLocalTime
28
29 try:
30 self.dataOut.processingHeaderObj = self.dataIn.processingHeaderObj.copy()
31 except:
32 pass
29 33 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
30 34 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
31 35 self.dataOut.channelList = self.dataIn.channelList
32 36 self.dataOut.heightList = self.dataIn.heightList
33 self.dataOut.dtype = numpy.dtype([('real','<f4'),('imag','<f4')])
37 self.dataOut.dtype = numpy.dtype([('real', '<f4'), ('imag', '<f4')])
34 38
35 39 self.dataOut.nBaud = self.dataIn.nBaud
36 40 self.dataOut.nCode = self.dataIn.nCode
37 41 self.dataOut.code = self.dataIn.code
38 42 self.dataOut.nProfiles = self.dataOut.nFFTPoints
39 43
40 44 self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock
41 45 self.dataOut.utctime = self.firstdatatime
42 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
43 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
46 # asumo q la data esta decodificada
47 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData
48 # asumo q la data esta sin flip
49 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData
44 50 self.dataOut.flagShiftFFT = False
45 51
46 52 self.dataOut.nCohInt = self.dataIn.nCohInt
47 53 self.dataOut.nIncohInt = 1
48 54
49 55 self.dataOut.windowOfFilter = self.dataIn.windowOfFilter
50 56
51 57 self.dataOut.frequency = self.dataIn.frequency
52 58 self.dataOut.realtime = self.dataIn.realtime
53 59
54 60 self.dataOut.azimuth = self.dataIn.azimuth
55 61 self.dataOut.zenith = self.dataIn.zenith
56 62
57 63 self.dataOut.beam.codeList = self.dataIn.beam.codeList
58 64 self.dataOut.beam.azimuthList = self.dataIn.beam.azimuthList
59 65 self.dataOut.beam.zenithList = self.dataIn.beam.zenithList
60 66
61 67 def __getFft(self):
62 68 """
63 69 Convierte valores de Voltaje a Spectra
64 70
65 71 Affected:
66 72 self.dataOut.data_spc
67 73 self.dataOut.data_cspc
68 74 self.dataOut.data_dc
69 75 self.dataOut.heightList
70 76 self.profIndex
71 77 self.buffer
72 78 self.dataOut.flagNoData
73 79 """
74 fft_volt = numpy.fft.fft(self.buffer,n=self.dataOut.nFFTPoints,axis=1)
80 fft_volt = numpy.fft.fft(
81 self.buffer, n=self.dataOut.nFFTPoints, axis=1)
75 82 fft_volt = fft_volt.astype(numpy.dtype('complex'))
76 dc = fft_volt[:,0,:]
83 dc = fft_volt[:, 0, :]
77 84
78 #calculo de self-spectra
79 fft_volt = numpy.fft.fftshift(fft_volt,axes=(1,))
85 # calculo de self-spectra
86 fft_volt = numpy.fft.fftshift(fft_volt, axes=(1,))
80 87 spc = fft_volt * numpy.conjugate(fft_volt)
81 88 spc = spc.real
82 89
83 90 blocksize = 0
84 91 blocksize += dc.size
85 92 blocksize += spc.size
86 93
87 94 cspc = None
88 95 pairIndex = 0
89 96 if self.dataOut.pairsList != None:
90 #calculo de cross-spectra
91 cspc = numpy.zeros((self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex')
97 # calculo de cross-spectra
98 cspc = numpy.zeros(
99 (self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex')
92 100 for pair in self.dataOut.pairsList:
93 101 if pair[0] not in self.dataOut.channelList:
94 raise ValueError, "Error getting CrossSpectra: pair 0 of %s is not in channelList = %s" %(str(pair), str(self.dataOut.channelList))
102 raise ValueError, "Error getting CrossSpectra: pair 0 of %s is not in channelList = %s" % (
103 str(pair), str(self.dataOut.channelList))
95 104 if pair[1] not in self.dataOut.channelList:
96 raise ValueError, "Error getting CrossSpectra: pair 1 of %s is not in channelList = %s" %(str(pair), str(self.dataOut.channelList))
105 raise ValueError, "Error getting CrossSpectra: pair 1 of %s is not in channelList = %s" % (
106 str(pair), str(self.dataOut.channelList))
97 107
98 cspc[pairIndex,:,:] = fft_volt[pair[0],:,:] * numpy.conjugate(fft_volt[pair[1],:,:])
108 cspc[pairIndex, :, :] = fft_volt[pair[0], :, :] * \
109 numpy.conjugate(fft_volt[pair[1], :, :])
99 110 pairIndex += 1
100 111 blocksize += cspc.size
101 112
102 113 self.dataOut.data_spc = spc
103 114 self.dataOut.data_cspc = cspc
104 115 self.dataOut.data_dc = dc
105 116 self.dataOut.blockSize = blocksize
106 117 self.dataOut.flagShiftFFT = True
107 118
108 119 def run(self, nProfiles=None, nFFTPoints=None, pairsList=[], ippFactor=None):
109 120
110 121 self.dataOut.flagNoData = True
111 122
112 123 if self.dataIn.type == "Spectra":
113 124 self.dataOut.copy(self.dataIn)
114 125 if not pairsList:
115 pairsList = itertools.combinations(self.dataOut.channelList, 2)
126 pairsList = itertools.combinations(self.dataOut.channelList, 2)
116 127 if self.dataOut.data_cspc is not None:
117 128 self.__selectPairs(pairsList)
118 129 return True
119 130
120 131 if self.dataIn.type == "Voltage":
121 132
122 133 if nFFTPoints == None:
123 134 raise ValueError, "This SpectraProc.run() need nFFTPoints input variable"
124 135
125 136 if nProfiles == None:
126 137 nProfiles = nFFTPoints
127 138
128 139 if ippFactor == None:
129 140 ippFactor = 1
130 141
131 142 self.dataOut.ippFactor = ippFactor
132 143
133 144 self.dataOut.nFFTPoints = nFFTPoints
134 145 self.dataOut.pairsList = pairsList
135 146
136 147 if self.buffer is None:
137 self.buffer = numpy.zeros( (self.dataIn.nChannels,
138 nProfiles,
139 self.dataIn.nHeights),
148 self.buffer = numpy.zeros((self.dataIn.nChannels,
149 nProfiles,
150 self.dataIn.nHeights),
140 151 dtype='complex')
141 152
142 153 if self.dataIn.flagDataAsBlock:
143 #data dimension: [nChannels, nProfiles, nSamples]
154 # data dimension: [nChannels, nProfiles, nSamples]
144 155 nVoltProfiles = self.dataIn.data.shape[1]
145 156 # nVoltProfiles = self.dataIn.nProfiles
146 157
147 158 if nVoltProfiles == nProfiles:
148 159 self.buffer = self.dataIn.data.copy()
149 160 self.profIndex = nVoltProfiles
150 161
151 162 elif nVoltProfiles < nProfiles:
152 163
153 164 if self.profIndex == 0:
154 165 self.id_min = 0
155 166 self.id_max = nVoltProfiles
156 167
157 self.buffer[:,self.id_min:self.id_max,:] = self.dataIn.data
168 self.buffer[:, self.id_min:self.id_max,
169 :] = self.dataIn.data
158 170 self.profIndex += nVoltProfiles
159 171 self.id_min += nVoltProfiles
160 172 self.id_max += nVoltProfiles
161 173 else:
162 raise ValueError, "The type object %s has %d profiles, it should just has %d profiles"%(self.dataIn.type,self.dataIn.data.shape[1],nProfiles)
174 raise ValueError, "The type object %s has %d profiles, it should just has %d profiles" % (
175 self.dataIn.type, self.dataIn.data.shape[1], nProfiles)
163 176 self.dataOut.flagNoData = True
164 177 return 0
165 178 else:
166 self.buffer[:,self.profIndex,:] = self.dataIn.data.copy()
179 self.buffer[:, self.profIndex, :] = self.dataIn.data.copy()
167 180 self.profIndex += 1
168 181
169 182 if self.firstdatatime == None:
170 183 self.firstdatatime = self.dataIn.utctime
171 184
172 185 if self.profIndex == nProfiles:
173 186 self.__updateSpecFromVoltage()
174 187 self.__getFft()
175 188
176 189 self.dataOut.flagNoData = False
177 190 self.firstdatatime = None
178 191 self.profIndex = 0
179 192
180 193 return True
181 194
182 raise ValueError, "The type of input object '%s' is not valid"%(self.dataIn.type)
195 raise ValueError, "The type of input object '%s' is not valid" % (
196 self.dataIn.type)
183 197
184 198 def __selectPairs(self, pairsList):
185 199
186 if not pairsList:
200 if not pairsList:
187 201 return
188 202
189 203 pairs = []
190 204 pairsIndex = []
191 205
192 206 for pair in pairsList:
193 207 if pair[0] not in self.dataOut.channelList or pair[1] not in self.dataOut.channelList:
194 208 continue
195 209 pairs.append(pair)
196 210 pairsIndex.append(pairs.index(pair))
197
211
198 212 self.dataOut.data_cspc = self.dataOut.data_cspc[pairsIndex]
199 213 self.dataOut.pairsList = pairs
200 214
201 215 return
202 216
203 217 def __selectPairsByChannel(self, channelList=None):
204 218
205 219 if channelList == None:
206 220 return
207 221
208 222 pairsIndexListSelected = []
209 223 for pairIndex in self.dataOut.pairsIndexList:
210 #First pair
224 # First pair
211 225 if self.dataOut.pairsList[pairIndex][0] not in channelList:
212 226 continue
213 #Second pair
227 # Second pair
214 228 if self.dataOut.pairsList[pairIndex][1] not in channelList:
215 229 continue
216 230
217 231 pairsIndexListSelected.append(pairIndex)
218 232
219 233 if not pairsIndexListSelected:
220 234 self.dataOut.data_cspc = None
221 235 self.dataOut.pairsList = []
222 236 return
223 237
224 238 self.dataOut.data_cspc = self.dataOut.data_cspc[pairsIndexListSelected]
225 self.dataOut.pairsList = [self.dataOut.pairsList[i] for i in pairsIndexListSelected]
239 self.dataOut.pairsList = [self.dataOut.pairsList[i]
240 for i in pairsIndexListSelected]
226 241
227 242 return
228 243
229 244 def selectChannels(self, channelList):
230 245
231 246 channelIndexList = []
232 247
233 248 for channel in channelList:
234 249 if channel not in self.dataOut.channelList:
235 raise ValueError, "Error selecting channels, Channel %d is not valid.\nAvailable channels = %s" %(channel, str(self.dataOut.channelList))
250 raise ValueError, "Error selecting channels, Channel %d is not valid.\nAvailable channels = %s" % (
251 channel, str(self.dataOut.channelList))
236 252
237 253 index = self.dataOut.channelList.index(channel)
238 254 channelIndexList.append(index)
239 255
240 256 self.selectChannelsByIndex(channelIndexList)
241 257
242 258 def selectChannelsByIndex(self, channelIndexList):
243 259 """
244 260 Selecciona un bloque de datos en base a canales segun el channelIndexList
245 261
246 262 Input:
247 263 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
248 264
249 265 Affected:
250 266 self.dataOut.data_spc
251 267 self.dataOut.channelIndexList
252 268 self.dataOut.nChannels
253 269
254 270 Return:
255 271 None
256 272 """
257 273
258 274 for channelIndex in channelIndexList:
259 275 if channelIndex not in self.dataOut.channelIndexList:
260 raise ValueError, "Error selecting channels: The value %d in channelIndexList is not valid.\nAvailable channel indexes = " %(channelIndex, self.dataOut.channelIndexList)
276 raise ValueError, "Error selecting channels: The value %d in channelIndexList is not valid.\nAvailable channel indexes = " % (
277 channelIndex, self.dataOut.channelIndexList)
261 278
262 279 # nChannels = len(channelIndexList)
263 280
264 data_spc = self.dataOut.data_spc[channelIndexList,:]
265 data_dc = self.dataOut.data_dc[channelIndexList,:]
281 data_spc = self.dataOut.data_spc[channelIndexList, :]
282 data_dc = self.dataOut.data_dc[channelIndexList, :]
266 283
267 284 self.dataOut.data_spc = data_spc
268 285 self.dataOut.data_dc = data_dc
269 286
270 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
287 self.dataOut.channelList = [
288 self.dataOut.channelList[i] for i in channelIndexList]
271 289 # self.dataOut.nChannels = nChannels
272 290
273 291 self.__selectPairsByChannel(self.dataOut.channelList)
274 292
275 293 return 1
276 294
277 295 def selectHeights(self, minHei, maxHei):
278 296 """
279 297 Selecciona un bloque de datos en base a un grupo de valores de alturas segun el rango
280 298 minHei <= height <= maxHei
281 299
282 300 Input:
283 301 minHei : valor minimo de altura a considerar
284 302 maxHei : valor maximo de altura a considerar
285 303
286 304 Affected:
287 305 Indirectamente son cambiados varios valores a travez del metodo selectHeightsByIndex
288 306
289 307 Return:
290 308 1 si el metodo se ejecuto con exito caso contrario devuelve 0
291 309 """
292 310
293 311 if (minHei > maxHei):
294 raise ValueError, "Error selecting heights: Height range (%d,%d) is not valid" % (minHei, maxHei)
312 raise ValueError, "Error selecting heights: Height range (%d,%d) is not valid" % (
313 minHei, maxHei)
295 314
296 315 if (minHei < self.dataOut.heightList[0]):
297 316 minHei = self.dataOut.heightList[0]
298 317
299 318 if (maxHei > self.dataOut.heightList[-1]):
300 319 maxHei = self.dataOut.heightList[-1]
301 320
302 321 minIndex = 0
303 322 maxIndex = 0
304 323 heights = self.dataOut.heightList
305 324
306 325 inda = numpy.where(heights >= minHei)
307 326 indb = numpy.where(heights <= maxHei)
308 327
309 328 try:
310 329 minIndex = inda[0][0]
311 330 except:
312 331 minIndex = 0
313 332
314 333 try:
315 334 maxIndex = indb[0][-1]
316 335 except:
317 336 maxIndex = len(heights)
318 337
319 338 self.selectHeightsByIndex(minIndex, maxIndex)
320 339
321 340 return 1
322 341
323 def getBeaconSignal(self, tauindex = 0, channelindex = 0, hei_ref=None):
324 newheis = numpy.where(self.dataOut.heightList>self.dataOut.radarControllerHeaderObj.Taus[tauindex])
342 def getBeaconSignal(self, tauindex=0, channelindex=0, hei_ref=None):
343 newheis = numpy.where(
344 self.dataOut.heightList > self.dataOut.radarControllerHeaderObj.Taus[tauindex])
325 345
326 346 if hei_ref != None:
327 newheis = numpy.where(self.dataOut.heightList>hei_ref)
347 newheis = numpy.where(self.dataOut.heightList > hei_ref)
328 348
329 349 minIndex = min(newheis[0])
330 350 maxIndex = max(newheis[0])
331 data_spc = self.dataOut.data_spc[:,:,minIndex:maxIndex+1]
332 heightList = self.dataOut.heightList[minIndex:maxIndex+1]
351 data_spc = self.dataOut.data_spc[:, :, minIndex:maxIndex + 1]
352 heightList = self.dataOut.heightList[minIndex:maxIndex + 1]
333 353
334 354 # determina indices
335 nheis = int(self.dataOut.radarControllerHeaderObj.txB/(self.dataOut.heightList[1]-self.dataOut.heightList[0]))
336 avg_dB = 10*numpy.log10(numpy.sum(data_spc[channelindex,:,:],axis=0))
355 nheis = int(self.dataOut.radarControllerHeaderObj.txB /
356 (self.dataOut.heightList[1] - self.dataOut.heightList[0]))
357 avg_dB = 10 * \
358 numpy.log10(numpy.sum(data_spc[channelindex, :, :], axis=0))
337 359 beacon_dB = numpy.sort(avg_dB)[-nheis:]
338 360 beacon_heiIndexList = []
339 361 for val in avg_dB.tolist():
340 362 if val >= beacon_dB[0]:
341 363 beacon_heiIndexList.append(avg_dB.tolist().index(val))
342 364
343 365 #data_spc = data_spc[:,:,beacon_heiIndexList]
344 366 data_cspc = None
345 367 if self.dataOut.data_cspc is not None:
346 data_cspc = self.dataOut.data_cspc[:,:,minIndex:maxIndex+1]
368 data_cspc = self.dataOut.data_cspc[:, :, minIndex:maxIndex + 1]
347 369 #data_cspc = data_cspc[:,:,beacon_heiIndexList]
348 370
349 371 data_dc = None
350 372 if self.dataOut.data_dc is not None:
351 data_dc = self.dataOut.data_dc[:,minIndex:maxIndex+1]
373 data_dc = self.dataOut.data_dc[:, minIndex:maxIndex + 1]
352 374 #data_dc = data_dc[:,beacon_heiIndexList]
353 375
354 376 self.dataOut.data_spc = data_spc
355 377 self.dataOut.data_cspc = data_cspc
356 378 self.dataOut.data_dc = data_dc
357 379 self.dataOut.heightList = heightList
358 380 self.dataOut.beacon_heiIndexList = beacon_heiIndexList
359 381
360 382 return 1
361 383
362
363 384 def selectHeightsByIndex(self, minIndex, maxIndex):
364 385 """
365 386 Selecciona un bloque de datos en base a un grupo indices de alturas segun el rango
366 387 minIndex <= index <= maxIndex
367 388
368 389 Input:
369 390 minIndex : valor de indice minimo de altura a considerar
370 391 maxIndex : valor de indice maximo de altura a considerar
371 392
372 393 Affected:
373 394 self.dataOut.data_spc
374 395 self.dataOut.data_cspc
375 396 self.dataOut.data_dc
376 397 self.dataOut.heightList
377 398
378 399 Return:
379 400 1 si el metodo se ejecuto con exito caso contrario devuelve 0
380 401 """
381 402
382 403 if (minIndex < 0) or (minIndex > maxIndex):
383 raise ValueError, "Error selecting heights: Index range (%d,%d) is not valid" % (minIndex, maxIndex)
404 raise ValueError, "Error selecting heights: Index range (%d,%d) is not valid" % (
405 minIndex, maxIndex)
384 406
385 407 if (maxIndex >= self.dataOut.nHeights):
386 maxIndex = self.dataOut.nHeights-1
408 maxIndex = self.dataOut.nHeights - 1
387 409
388 #Spectra
389 data_spc = self.dataOut.data_spc[:,:,minIndex:maxIndex+1]
410 # Spectra
411 data_spc = self.dataOut.data_spc[:, :, minIndex:maxIndex + 1]
390 412
391 413 data_cspc = None
392 414 if self.dataOut.data_cspc is not None:
393 data_cspc = self.dataOut.data_cspc[:,:,minIndex:maxIndex+1]
415 data_cspc = self.dataOut.data_cspc[:, :, minIndex:maxIndex + 1]
394 416
395 417 data_dc = None
396 418 if self.dataOut.data_dc is not None:
397 data_dc = self.dataOut.data_dc[:,minIndex:maxIndex+1]
419 data_dc = self.dataOut.data_dc[:, minIndex:maxIndex + 1]
398 420
399 421 self.dataOut.data_spc = data_spc
400 422 self.dataOut.data_cspc = data_cspc
401 423 self.dataOut.data_dc = data_dc
402 424
403 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex+1]
425 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex + 1]
404 426
405 427 return 1
406 428
407 def removeDC(self, mode = 2):
429 def removeDC(self, mode=2):
408 430 jspectra = self.dataOut.data_spc
409 431 jcspectra = self.dataOut.data_cspc
410 432
411
412 433 num_chan = jspectra.shape[0]
413 434 num_hei = jspectra.shape[2]
414 435
415 436 if jcspectra is not None:
416 437 jcspectraExist = True
417 438 num_pairs = jcspectra.shape[0]
418 else: jcspectraExist = False
439 else:
440 jcspectraExist = False
419 441
420 freq_dc = jspectra.shape[1]/2
421 ind_vel = numpy.array([-2,-1,1,2]) + freq_dc
442 freq_dc = jspectra.shape[1] / 2
443 ind_vel = numpy.array([-2, -1, 1, 2]) + freq_dc
422 444
423 if ind_vel[0]<0:
424 ind_vel[range(0,1)] = ind_vel[range(0,1)] + self.num_prof
445 if ind_vel[0] < 0:
446 ind_vel[range(0, 1)] = ind_vel[range(0, 1)] + self.num_prof
425 447
426 448 if mode == 1:
427 jspectra[:,freq_dc,:] = (jspectra[:,ind_vel[1],:] + jspectra[:,ind_vel[2],:])/2 #CORRECCION
449 jspectra[:, freq_dc, :] = (
450 jspectra[:, ind_vel[1], :] + jspectra[:, ind_vel[2], :]) / 2 # CORRECCION
428 451
429 452 if jcspectraExist:
430 jcspectra[:,freq_dc,:] = (jcspectra[:,ind_vel[1],:] + jcspectra[:,ind_vel[2],:])/2
453 jcspectra[:, freq_dc, :] = (
454 jcspectra[:, ind_vel[1], :] + jcspectra[:, ind_vel[2], :]) / 2
431 455
432 456 if mode == 2:
433 457
434 vel = numpy.array([-2,-1,1,2])
435 xx = numpy.zeros([4,4])
458 vel = numpy.array([-2, -1, 1, 2])
459 xx = numpy.zeros([4, 4])
436 460
437 461 for fil in range(4):
438 xx[fil,:] = vel[fil]**numpy.asarray(range(4))
462 xx[fil, :] = vel[fil]**numpy.asarray(range(4))
439 463
440 464 xx_inv = numpy.linalg.inv(xx)
441 xx_aux = xx_inv[0,:]
465 xx_aux = xx_inv[0, :]
442 466
443 467 for ich in range(num_chan):
444 yy = jspectra[ich,ind_vel,:]
445 jspectra[ich,freq_dc,:] = numpy.dot(xx_aux,yy)
468 yy = jspectra[ich, ind_vel, :]
469 jspectra[ich, freq_dc, :] = numpy.dot(xx_aux, yy)
446 470
447 junkid = jspectra[ich,freq_dc,:]<=0
471 junkid = jspectra[ich, freq_dc, :] <= 0
448 472 cjunkid = sum(junkid)
449 473
450 474 if cjunkid.any():
451 jspectra[ich,freq_dc,junkid.nonzero()] = (jspectra[ich,ind_vel[1],junkid] + jspectra[ich,ind_vel[2],junkid])/2
475 jspectra[ich, freq_dc, junkid.nonzero()] = (
476 jspectra[ich, ind_vel[1], junkid] + jspectra[ich, ind_vel[2], junkid]) / 2
452 477
453 478 if jcspectraExist:
454 479 for ip in range(num_pairs):
455 yy = jcspectra[ip,ind_vel,:]
456 jcspectra[ip,freq_dc,:] = numpy.dot(xx_aux,yy)
457
480 yy = jcspectra[ip, ind_vel, :]
481 jcspectra[ip, freq_dc, :] = numpy.dot(xx_aux, yy)
458 482
459 483 self.dataOut.data_spc = jspectra
460 484 self.dataOut.data_cspc = jcspectra
461 485
462 486 return 1
463 487
464 def removeInterference(self, interf = 2,hei_interf = None, nhei_interf = None, offhei_interf = None):
488 def removeInterference(self, interf=2, hei_interf=None, nhei_interf=None, offhei_interf=None):
465 489
466 490 jspectra = self.dataOut.data_spc
467 491 jcspectra = self.dataOut.data_cspc
468 492 jnoise = self.dataOut.getNoise()
469 493 num_incoh = self.dataOut.nIncohInt
470 494
471 num_channel = jspectra.shape[0]
472 num_prof = jspectra.shape[1]
473 num_hei = jspectra.shape[2]
495 num_channel = jspectra.shape[0]
496 num_prof = jspectra.shape[1]
497 num_hei = jspectra.shape[2]
474 498
475 #hei_interf
499 # hei_interf
476 500 if hei_interf is None:
477 count_hei = num_hei/2 #Como es entero no importa
501 count_hei = num_hei / 2 # Como es entero no importa
478 502 hei_interf = numpy.asmatrix(range(count_hei)) + num_hei - count_hei
479 503 hei_interf = numpy.asarray(hei_interf)[0]
480 #nhei_interf
504 # nhei_interf
481 505 if (nhei_interf == None):
482 506 nhei_interf = 5
483 507 if (nhei_interf < 1):
484 508 nhei_interf = 1
485 509 if (nhei_interf > count_hei):
486 510 nhei_interf = count_hei
487 511 if (offhei_interf == None):
488 512 offhei_interf = 0
489 513
490 514 ind_hei = range(num_hei)
491 515 # mask_prof = numpy.asarray(range(num_prof - 2)) + 1
492 516 # mask_prof[range(num_prof/2 - 1,len(mask_prof))] += 1
493 517 mask_prof = numpy.asarray(range(num_prof))
494 518 num_mask_prof = mask_prof.size
495 comp_mask_prof = [0, num_prof/2]
496
519 comp_mask_prof = [0, num_prof / 2]
497 520
498 #noise_exist: Determina si la variable jnoise ha sido definida y contiene la informacion del ruido de cada canal
521 # noise_exist: Determina si la variable jnoise ha sido definida y contiene la informacion del ruido de cada canal
499 522 if (jnoise.size < num_channel or numpy.isnan(jnoise).any()):
500 523 jnoise = numpy.nan
501 524 noise_exist = jnoise[0] < numpy.Inf
502 525
503 #Subrutina de Remocion de la Interferencia
526 # Subrutina de Remocion de la Interferencia
504 527 for ich in range(num_channel):
505 #Se ordena los espectros segun su potencia (menor a mayor)
506 power = jspectra[ich,mask_prof,:]
507 power = power[:,hei_interf]
508 power = power.sum(axis = 0)
528 # Se ordena los espectros segun su potencia (menor a mayor)
529 power = jspectra[ich, mask_prof, :]
530 power = power[:, hei_interf]
531 power = power.sum(axis=0)
509 532 psort = power.ravel().argsort()
510 533
511 #Se estima la interferencia promedio en los Espectros de Potencia empleando
512 junkspc_interf = jspectra[ich,:,hei_interf[psort[range(offhei_interf, nhei_interf + offhei_interf)]]]
534 # Se estima la interferencia promedio en los Espectros de Potencia empleando
535 junkspc_interf = jspectra[ich, :, hei_interf[psort[range(
536 offhei_interf, nhei_interf + offhei_interf)]]]
513 537
514 538 if noise_exist:
515 # tmp_noise = jnoise[ich] / num_prof
539 # tmp_noise = jnoise[ich] / num_prof
516 540 tmp_noise = jnoise[ich]
517 541 junkspc_interf = junkspc_interf - tmp_noise
518 542 #junkspc_interf[:,comp_mask_prof] = 0
519 543
520 jspc_interf = junkspc_interf.sum(axis = 0) / nhei_interf
544 jspc_interf = junkspc_interf.sum(axis=0) / nhei_interf
521 545 jspc_interf = jspc_interf.transpose()
522 #Calculando el espectro de interferencia promedio
523 noiseid = numpy.where(jspc_interf <= tmp_noise/ numpy.sqrt(num_incoh))
546 # Calculando el espectro de interferencia promedio
547 noiseid = numpy.where(
548 jspc_interf <= tmp_noise / numpy.sqrt(num_incoh))
524 549 noiseid = noiseid[0]
525 550 cnoiseid = noiseid.size
526 interfid = numpy.where(jspc_interf > tmp_noise/ numpy.sqrt(num_incoh))
551 interfid = numpy.where(
552 jspc_interf > tmp_noise / numpy.sqrt(num_incoh))
527 553 interfid = interfid[0]
528 554 cinterfid = interfid.size
529 555
530 if (cnoiseid > 0): jspc_interf[noiseid] = 0
556 if (cnoiseid > 0):
557 jspc_interf[noiseid] = 0
531 558
532 #Expandiendo los perfiles a limpiar
559 # Expandiendo los perfiles a limpiar
533 560 if (cinterfid > 0):
534 new_interfid = (numpy.r_[interfid - 1, interfid, interfid + 1] + num_prof)%num_prof
561 new_interfid = (
562 numpy.r_[interfid - 1, interfid, interfid + 1] + num_prof) % num_prof
535 563 new_interfid = numpy.asarray(new_interfid)
536 564 new_interfid = {x for x in new_interfid}
537 565 new_interfid = numpy.array(list(new_interfid))
538 566 new_cinterfid = new_interfid.size
539 else: new_cinterfid = 0
567 else:
568 new_cinterfid = 0
540 569
541 570 for ip in range(new_cinterfid):
542 ind = junkspc_interf[:,new_interfid[ip]].ravel().argsort()
543 jspc_interf[new_interfid[ip]] = junkspc_interf[ind[nhei_interf/2],new_interfid[ip]]
544
571 ind = junkspc_interf[:, new_interfid[ip]].ravel().argsort()
572 jspc_interf[new_interfid[ip]
573 ] = junkspc_interf[ind[nhei_interf / 2], new_interfid[ip]]
545 574
546 jspectra[ich,:,ind_hei] = jspectra[ich,:,ind_hei] - jspc_interf #Corregir indices
575 jspectra[ich, :, ind_hei] = jspectra[ich, :,
576 ind_hei] - jspc_interf # Corregir indices
547 577
548 #Removiendo la interferencia del punto de mayor interferencia
578 # Removiendo la interferencia del punto de mayor interferencia
549 579 ListAux = jspc_interf[mask_prof].tolist()
550 580 maxid = ListAux.index(max(ListAux))
551 581
552
553 582 if cinterfid > 0:
554 for ip in range(cinterfid*(interf == 2) - 1):
555 ind = (jspectra[ich,interfid[ip],:] < tmp_noise*(1 + 1/numpy.sqrt(num_incoh))).nonzero()
583 for ip in range(cinterfid * (interf == 2) - 1):
584 ind = (jspectra[ich, interfid[ip], :] < tmp_noise *
585 (1 + 1 / numpy.sqrt(num_incoh))).nonzero()
556 586 cind = len(ind)
557 587
558 588 if (cind > 0):
559 jspectra[ich,interfid[ip],ind] = tmp_noise*(1 + (numpy.random.uniform(cind) - 0.5)/numpy.sqrt(num_incoh))
589 jspectra[ich, interfid[ip], ind] = tmp_noise * \
590 (1 + (numpy.random.uniform(cind) - 0.5) /
591 numpy.sqrt(num_incoh))
560 592
561 ind = numpy.array([-2,-1,1,2])
562 xx = numpy.zeros([4,4])
593 ind = numpy.array([-2, -1, 1, 2])
594 xx = numpy.zeros([4, 4])
563 595
564 596 for id1 in range(4):
565 xx[:,id1] = ind[id1]**numpy.asarray(range(4))
597 xx[:, id1] = ind[id1]**numpy.asarray(range(4))
566 598
567 599 xx_inv = numpy.linalg.inv(xx)
568 xx = xx_inv[:,0]
569 ind = (ind + maxid + num_mask_prof)%num_mask_prof
570 yy = jspectra[ich,mask_prof[ind],:]
571 jspectra[ich,mask_prof[maxid],:] = numpy.dot(yy.transpose(),xx)
572
573
574 indAux = (jspectra[ich,:,:] < tmp_noise*(1-1/numpy.sqrt(num_incoh))).nonzero()
575 jspectra[ich,indAux[0],indAux[1]] = tmp_noise * (1 - 1/numpy.sqrt(num_incoh))
576
577 #Remocion de Interferencia en el Cross Spectra
578 if jcspectra is None: return jspectra, jcspectra
579 num_pairs = jcspectra.size/(num_prof*num_hei)
600 xx = xx_inv[:, 0]
601 ind = (ind + maxid + num_mask_prof) % num_mask_prof
602 yy = jspectra[ich, mask_prof[ind], :]
603 jspectra[ich, mask_prof[maxid], :] = numpy.dot(
604 yy.transpose(), xx)
605
606 indAux = (jspectra[ich, :, :] < tmp_noise *
607 (1 - 1 / numpy.sqrt(num_incoh))).nonzero()
608 jspectra[ich, indAux[0], indAux[1]] = tmp_noise * \
609 (1 - 1 / numpy.sqrt(num_incoh))
610
611 # Remocion de Interferencia en el Cross Spectra
612 if jcspectra is None:
613 return jspectra, jcspectra
614 num_pairs = jcspectra.size / (num_prof * num_hei)
580 615 jcspectra = jcspectra.reshape(num_pairs, num_prof, num_hei)
581 616
582 617 for ip in range(num_pairs):
583 618
584 619 #-------------------------------------------
585 620
586 cspower = numpy.abs(jcspectra[ip,mask_prof,:])
587 cspower = cspower[:,hei_interf]
588 cspower = cspower.sum(axis = 0)
621 cspower = numpy.abs(jcspectra[ip, mask_prof, :])
622 cspower = cspower[:, hei_interf]
623 cspower = cspower.sum(axis=0)
589 624
590 625 cspsort = cspower.ravel().argsort()
591 junkcspc_interf = jcspectra[ip,:,hei_interf[cspsort[range(offhei_interf, nhei_interf + offhei_interf)]]]
626 junkcspc_interf = jcspectra[ip, :, hei_interf[cspsort[range(
627 offhei_interf, nhei_interf + offhei_interf)]]]
592 628 junkcspc_interf = junkcspc_interf.transpose()
593 jcspc_interf = junkcspc_interf.sum(axis = 1)/nhei_interf
629 jcspc_interf = junkcspc_interf.sum(axis=1) / nhei_interf
594 630
595 631 ind = numpy.abs(jcspc_interf[mask_prof]).ravel().argsort()
596 632
597 median_real = numpy.median(numpy.real(junkcspc_interf[mask_prof[ind[range(3*num_prof/4)]],:]))
598 median_imag = numpy.median(numpy.imag(junkcspc_interf[mask_prof[ind[range(3*num_prof/4)]],:]))
599 junkcspc_interf[comp_mask_prof,:] = numpy.complex(median_real, median_imag)
633 median_real = numpy.median(numpy.real(
634 junkcspc_interf[mask_prof[ind[range(3 * num_prof / 4)]], :]))
635 median_imag = numpy.median(numpy.imag(
636 junkcspc_interf[mask_prof[ind[range(3 * num_prof / 4)]], :]))
637 junkcspc_interf[comp_mask_prof, :] = numpy.complex(
638 median_real, median_imag)
600 639
601 640 for iprof in range(num_prof):
602 ind = numpy.abs(junkcspc_interf[iprof,:]).ravel().argsort()
603 jcspc_interf[iprof] = junkcspc_interf[iprof, ind[nhei_interf/2]]
641 ind = numpy.abs(junkcspc_interf[iprof, :]).ravel().argsort()
642 jcspc_interf[iprof] = junkcspc_interf[iprof,
643 ind[nhei_interf / 2]]
604 644
605 #Removiendo la Interferencia
606 jcspectra[ip,:,ind_hei] = jcspectra[ip,:,ind_hei] - jcspc_interf
645 # Removiendo la Interferencia
646 jcspectra[ip, :, ind_hei] = jcspectra[ip,
647 :, ind_hei] - jcspc_interf
607 648
608 649 ListAux = numpy.abs(jcspc_interf[mask_prof]).tolist()
609 650 maxid = ListAux.index(max(ListAux))
610 651
611 ind = numpy.array([-2,-1,1,2])
612 xx = numpy.zeros([4,4])
652 ind = numpy.array([-2, -1, 1, 2])
653 xx = numpy.zeros([4, 4])
613 654
614 655 for id1 in range(4):
615 xx[:,id1] = ind[id1]**numpy.asarray(range(4))
656 xx[:, id1] = ind[id1]**numpy.asarray(range(4))
616 657
617 658 xx_inv = numpy.linalg.inv(xx)
618 xx = xx_inv[:,0]
659 xx = xx_inv[:, 0]
619 660
620 ind = (ind + maxid + num_mask_prof)%num_mask_prof
621 yy = jcspectra[ip,mask_prof[ind],:]
622 jcspectra[ip,mask_prof[maxid],:] = numpy.dot(yy.transpose(),xx)
661 ind = (ind + maxid + num_mask_prof) % num_mask_prof
662 yy = jcspectra[ip, mask_prof[ind], :]
663 jcspectra[ip, mask_prof[maxid], :] = numpy.dot(yy.transpose(), xx)
623 664
624 #Guardar Resultados
665 # Guardar Resultados
625 666 self.dataOut.data_spc = jspectra
626 667 self.dataOut.data_cspc = jcspectra
627 668
628 669 return 1
629 670
630 671 def setRadarFrequency(self, frequency=None):
631 672
632 673 if frequency != None:
633 674 self.dataOut.frequency = frequency
634 675
635 676 return 1
636 677
637 678 def getNoise(self, minHei=None, maxHei=None, minVel=None, maxVel=None):
638 #validacion de rango
679 # validacion de rango
639 680 if minHei == None:
640 681 minHei = self.dataOut.heightList[0]
641 682
642 683 if maxHei == None:
643 684 maxHei = self.dataOut.heightList[-1]
644 685
645 686 if (minHei < self.dataOut.heightList[0]) or (minHei > maxHei):
646 print 'minHei: %.2f is out of the heights range'%(minHei)
647 print 'minHei is setting to %.2f'%(self.dataOut.heightList[0])
687 print 'minHei: %.2f is out of the heights range' % (minHei)
688 print 'minHei is setting to %.2f' % (self.dataOut.heightList[0])
648 689 minHei = self.dataOut.heightList[0]
649 690
650 691 if (maxHei > self.dataOut.heightList[-1]) or (maxHei < minHei):
651 print 'maxHei: %.2f is out of the heights range'%(maxHei)
652 print 'maxHei is setting to %.2f'%(self.dataOut.heightList[-1])
692 print 'maxHei: %.2f is out of the heights range' % (maxHei)
693 print 'maxHei is setting to %.2f' % (self.dataOut.heightList[-1])
653 694 maxHei = self.dataOut.heightList[-1]
654 695
655 696 # validacion de velocidades
656 697 velrange = self.dataOut.getVelRange(1)
657 698
658 699 if minVel == None:
659 700 minVel = velrange[0]
660 701
661 702 if maxVel == None:
662 703 maxVel = velrange[-1]
663 704
664 705 if (minVel < velrange[0]) or (minVel > maxVel):
665 print 'minVel: %.2f is out of the velocity range'%(minVel)
666 print 'minVel is setting to %.2f'%(velrange[0])
706 print 'minVel: %.2f is out of the velocity range' % (minVel)
707 print 'minVel is setting to %.2f' % (velrange[0])
667 708 minVel = velrange[0]
668 709
669 710 if (maxVel > velrange[-1]) or (maxVel < minVel):
670 print 'maxVel: %.2f is out of the velocity range'%(maxVel)
671 print 'maxVel is setting to %.2f'%(velrange[-1])
711 print 'maxVel: %.2f is out of the velocity range' % (maxVel)
712 print 'maxVel is setting to %.2f' % (velrange[-1])
672 713 maxVel = velrange[-1]
673 714
674 715 # seleccion de indices para rango
675 716 minIndex = 0
676 717 maxIndex = 0
677 718 heights = self.dataOut.heightList
678 719
679 720 inda = numpy.where(heights >= minHei)
680 721 indb = numpy.where(heights <= maxHei)
681 722
682 723 try:
683 724 minIndex = inda[0][0]
684 725 except:
685 726 minIndex = 0
686 727
687 728 try:
688 729 maxIndex = indb[0][-1]
689 730 except:
690 731 maxIndex = len(heights)
691 732
692 733 if (minIndex < 0) or (minIndex > maxIndex):
693 raise ValueError, "some value in (%d,%d) is not valid" % (minIndex, maxIndex)
734 raise ValueError, "some value in (%d,%d) is not valid" % (
735 minIndex, maxIndex)
694 736
695 737 if (maxIndex >= self.dataOut.nHeights):
696 maxIndex = self.dataOut.nHeights-1
738 maxIndex = self.dataOut.nHeights - 1
697 739
698 740 # seleccion de indices para velocidades
699 741 indminvel = numpy.where(velrange >= minVel)
700 742 indmaxvel = numpy.where(velrange <= maxVel)
701 743 try:
702 744 minIndexVel = indminvel[0][0]
703 745 except:
704 746 minIndexVel = 0
705 747
706 748 try:
707 749 maxIndexVel = indmaxvel[0][-1]
708 750 except:
709 751 maxIndexVel = len(velrange)
710 752
711 #seleccion del espectro
712 data_spc = self.dataOut.data_spc[:,minIndexVel:maxIndexVel+1,minIndex:maxIndex+1]
713 #estimacion de ruido
753 # seleccion del espectro
754 data_spc = self.dataOut.data_spc[:,
755 minIndexVel:maxIndexVel + 1, minIndex:maxIndex + 1]
756 # estimacion de ruido
714 757 noise = numpy.zeros(self.dataOut.nChannels)
715 758
716 759 for channel in range(self.dataOut.nChannels):
717 daux = data_spc[channel,:,:]
760 daux = data_spc[channel, :, :]
718 761 noise[channel] = hildebrand_sekhon(daux, self.dataOut.nIncohInt)
719 762
720 763 self.dataOut.noise_estimation = noise.copy()
721 764
722 765 return 1
723 766
724 class IncohInt(Operation):
725 767
768 class IncohInt(Operation):
726 769
727 770 __profIndex = 0
728 __withOverapping = False
771 __withOverapping = False
729 772
730 773 __byTime = False
731 774 __initime = None
732 775 __lastdatatime = None
733 776 __integrationtime = None
734 777
735 778 __buffer_spc = None
736 779 __buffer_cspc = None
737 780 __buffer_dc = None
738 781
739 782 __dataReady = False
740 783
741 784 __timeInterval = None
742 785
743 786 n = None
744 787
745
746
747 788 def __init__(self, **kwargs):
748 789
749 790 Operation.__init__(self, **kwargs)
750 791 # self.isConfig = False
751 792
752 793 def setup(self, n=None, timeInterval=None, overlapping=False):
753 794 """
754 795 Set the parameters of the integration class.
755 796
756 797 Inputs:
757 798
758 799 n : Number of coherent integrations
759 800 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
760 801 overlapping :
761 802
762 803 """
763 804
764 805 self.__initime = None
765 806 self.__lastdatatime = 0
766 807
767 808 self.__buffer_spc = 0
768 809 self.__buffer_cspc = 0
769 810 self.__buffer_dc = 0
770 811
771 812 self.__profIndex = 0
772 813 self.__dataReady = False
773 814 self.__byTime = False
774 815
775 816 if n is None and timeInterval is None:
776 817 raise ValueError, "n or timeInterval should be specified ..."
777 818
778 819 if n is not None:
779 820 self.n = int(n)
780 821 else:
781 self.__integrationtime = int(timeInterval) #if (type(timeInterval)!=integer) -> change this line
822 # if (type(timeInterval)!=integer) -> change this line
823 self.__integrationtime = int(timeInterval)
782 824 self.n = None
783 825 self.__byTime = True
784 826
785 827 def putData(self, data_spc, data_cspc, data_dc):
786
787 828 """
788 829 Add a profile to the __buffer_spc and increase in one the __profileIndex
789 830
790 831 """
791 832
792 833 self.__buffer_spc += data_spc
793 834
794 835 if data_cspc is None:
795 836 self.__buffer_cspc = None
796 837 else:
797 838 self.__buffer_cspc += data_cspc
798 839
799 840 if data_dc is None:
800 841 self.__buffer_dc = None
801 842 else:
802 843 self.__buffer_dc += data_dc
803 844
804 845 self.__profIndex += 1
805 846
806 847 return
807 848
808 849 def pushData(self):
809 850 """
810 851 Return the sum of the last profiles and the profiles used in the sum.
811 852
812 853 Affected:
813 854
814 855 self.__profileIndex
815 856
816 857 """
817 858
818 859 data_spc = self.__buffer_spc
819 860 data_cspc = self.__buffer_cspc
820 861 data_dc = self.__buffer_dc
821 862 n = self.__profIndex
822 863
823 864 self.__buffer_spc = 0
824 865 self.__buffer_cspc = 0
825 866 self.__buffer_dc = 0
826 867 self.__profIndex = 0
827 868
828 869 return data_spc, data_cspc, data_dc, n
829 870
830 871 def byProfiles(self, *args):
831 872
832 873 self.__dataReady = False
833 874 avgdata_spc = None
834 875 avgdata_cspc = None
835 876 avgdata_dc = None
836 877
837 878 self.putData(*args)
838 879
839 880 if self.__profIndex == self.n:
840 881
841 882 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
842 883 self.n = n
843 884 self.__dataReady = True
844 885
845 886 return avgdata_spc, avgdata_cspc, avgdata_dc
846 887
847 888 def byTime(self, datatime, *args):
848 889
849 890 self.__dataReady = False
850 891 avgdata_spc = None
851 892 avgdata_cspc = None
852 893 avgdata_dc = None
853 894
854 895 self.putData(*args)
855 896
856 897 if (datatime - self.__initime) >= self.__integrationtime:
857 898 avgdata_spc, avgdata_cspc, avgdata_dc, n = self.pushData()
858 899 self.n = n
859 900 self.__dataReady = True
860 901
861 902 return avgdata_spc, avgdata_cspc, avgdata_dc
862 903
863 904 def integrate(self, datatime, *args):
864 905
865 906 if self.__profIndex == 0:
866 907 self.__initime = datatime
867 908
868 909 if self.__byTime:
869 avgdata_spc, avgdata_cspc, avgdata_dc = self.byTime(datatime, *args)
910 avgdata_spc, avgdata_cspc, avgdata_dc = self.byTime(
911 datatime, *args)
870 912 else:
871 913 avgdata_spc, avgdata_cspc, avgdata_dc = self.byProfiles(*args)
872 914
873 915 if not self.__dataReady:
874 916 return None, None, None, None
875 917
876 918 return self.__initime, avgdata_spc, avgdata_cspc, avgdata_dc
877 919
878 920 def run(self, dataOut, n=None, timeInterval=None, overlapping=False):
879 if n==1:
921 if n == 1:
880 922 return
881 923
882 924 dataOut.flagNoData = True
883 925
884 926 if not self.isConfig:
885 927 self.setup(n, timeInterval, overlapping)
886 928 self.isConfig = True
887 929
888 930 avgdatatime, avgdata_spc, avgdata_cspc, avgdata_dc = self.integrate(dataOut.utctime,
889 931 dataOut.data_spc,
890 932 dataOut.data_cspc,
891 933 dataOut.data_dc)
892 934
893 935 if self.__dataReady:
894 936
895 937 dataOut.data_spc = avgdata_spc
896 938 dataOut.data_cspc = avgdata_cspc
897 939 dataOut.data_dc = avgdata_dc
898 940
899 941 dataOut.nIncohInt *= self.n
900 942 dataOut.utctime = avgdatatime
901 943 dataOut.flagNoData = False
General Comments 0
You need to be logged in to leave comments. Login now