##// END OF EJS Templates
test zmq for rawdata
José Chávez -
r975:a07abe3cf821
parent child
Show More
@@ -1,104 +1,108
1 1 # Byte-compiled / optimized / DLL files
2 2 __pycache__/
3 3 *.py[cod]
4 4 *$py.class
5 5
6 6 # C extensions
7 7 *.so
8 8
9 9 # Distribution / packaging
10 10 .Python
11 11 env/
12 12 build/
13 13 develop-eggs/
14 14 dist/
15 15 downloads/
16 16 eggs/
17 17 .eggs/
18 18 lib/
19 19 lib64/
20 20 parts/
21 21 sdist/
22 22 var/
23 23 wheels/
24 24 *.egg-info/
25 25 .installed.cfg
26 26 *.egg
27 27
28 28 # PyInstaller
29 29 # Usually these files are written by a python script from a template
30 30 # before PyInstaller builds the exe, so as to inject date/other infos into it.
31 31 *.manifest
32 32 *.spec
33 33
34 34 # Installer logs
35 35 pip-log.txt
36 36 pip-delete-this-directory.txt
37 37
38 38 # Unit test / coverage reports
39 39 htmlcov/
40 40 .tox/
41 41 .coverage
42 42 .coverage.*
43 43 .cache
44 44 nosetests.xml
45 45 coverage.xml
46 46 *,cover
47 47 .hypothesis/
48 48
49 49 # Translations
50 50 *.mo
51 51 *.pot
52 52
53 53 # Django stuff:
54 54 *.log
55 55 local_settings.py
56 56
57 57 # Flask stuff:
58 58 instance/
59 59 .webassets-cache
60 60
61 61 # Scrapy stuff:
62 62 .scrapy
63 63
64 64 # Sphinx documentation
65 65 docs/_build/
66 66
67 67 # PyBuilder
68 68 target/
69 69
70 70 # Jupyter Notebook
71 71 .ipynb_checkpoints
72 72
73 73 # pyenv
74 74 .python-version
75 75
76 76 # celery beat schedule file
77 77 celerybeat-schedule
78 78
79 79 # SageMath parsed files
80 80 *.sage.py
81 81
82 82 # dotenv
83 83 .env
84 84
85 85 # virtualenv
86 86 .venv
87 87 venv/
88 88 ENV/
89 89
90 90 # Spyder project settings
91 91 .spyderproject
92 92 .spyproject
93 93
94 94 # Rope project settings
95 95 .ropeproject
96 96
97 97 # mkdocs documentation
98 98 /site
99 99
100 100 # eclipse
101 101 .project
102 102 .pydevproject
103 103
104 # vscode
105
106 .vscode
107
104 108 schainpy/scripts/ No newline at end of file
@@ -1,3 +1,4
1 1 {
2 "python.linting.pylintEnabled": true
2 "python.linting.pylintEnabled": true,
3 "git.ignoreLimitWarning": true
3 4 } No newline at end of file
@@ -1,765 +1,851
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JROHeaderIO.py 151 2012-10-31 19:00:51Z murco $
5 5 '''
6 6 import sys
7 7 import numpy
8 8 import copy
9 9 import datetime
10 10
11 11 SPEED_OF_LIGHT = 299792458
12 12 SPEED_OF_LIGHT = 3e8
13 13
14 14 BASIC_STRUCTURE = numpy.dtype([
15 15 ('nSize','<u4'),
16 16 ('nVersion','<u2'),
17 17 ('nDataBlockId','<u4'),
18 18 ('nUtime','<u4'),
19 19 ('nMilsec','<u2'),
20 20 ('nTimezone','<i2'),
21 21 ('nDstflag','<i2'),
22 22 ('nErrorCount','<u4')
23 23 ])
24 24
25 25 SYSTEM_STRUCTURE = numpy.dtype([
26 26 ('nSize','<u4'),
27 27 ('nNumSamples','<u4'),
28 28 ('nNumProfiles','<u4'),
29 29 ('nNumChannels','<u4'),
30 30 ('nADCResolution','<u4'),
31 31 ('nPCDIOBusWidth','<u4'),
32 32 ])
33 33
34 34 RADAR_STRUCTURE = numpy.dtype([
35 35 ('nSize','<u4'),
36 36 ('nExpType','<u4'),
37 37 ('nNTx','<u4'),
38 38 ('fIpp','<f4'),
39 39 ('fTxA','<f4'),
40 40 ('fTxB','<f4'),
41 41 ('nNumWindows','<u4'),
42 42 ('nNumTaus','<u4'),
43 43 ('nCodeType','<u4'),
44 44 ('nLine6Function','<u4'),
45 45 ('nLine5Function','<u4'),
46 46 ('fClock','<f4'),
47 47 ('nPrePulseBefore','<u4'),
48 48 ('nPrePulseAfter','<u4'),
49 49 ('sRangeIPP','<a20'),
50 50 ('sRangeTxA','<a20'),
51 51 ('sRangeTxB','<a20'),
52 52 ])
53 53
54 54 SAMPLING_STRUCTURE = numpy.dtype([('h0','<f4'),('dh','<f4'),('nsa','<u4')])
55 55
56 56
57 57 PROCESSING_STRUCTURE = numpy.dtype([
58 58 ('nSize','<u4'),
59 59 ('nDataType','<u4'),
60 60 ('nSizeOfDataBlock','<u4'),
61 61 ('nProfilesperBlock','<u4'),
62 62 ('nDataBlocksperFile','<u4'),
63 63 ('nNumWindows','<u4'),
64 64 ('nProcessFlags','<u4'),
65 65 ('nCoherentIntegrations','<u4'),
66 66 ('nIncoherentIntegrations','<u4'),
67 67 ('nTotalSpectra','<u4')
68 68 ])
69 69
70 70 class Header(object):
71 71
72 72 def __init__(self):
73 73 raise NotImplementedError
74 74
75 75 def copy(self):
76 76 return copy.deepcopy(self)
77 77
78 78 def read(self):
79 79
80 80 raise NotImplementedError
81 81
82 82 def write(self):
83 83
84 84 raise NotImplementedError
85 85
86 86 def printInfo(self):
87 87
88 88 message = "#"*50 + "\n"
89 89 message += self.__class__.__name__.upper() + "\n"
90 90 message += "#"*50 + "\n"
91 91
92 92 keyList = self.__dict__.keys()
93 93 keyList.sort()
94 94
95 95 for key in keyList:
96 96 message += "%s = %s" %(key, self.__dict__[key]) + "\n"
97 97
98 98 if "size" not in keyList:
99 99 attr = getattr(self, "size")
100 100
101 101 if attr:
102 102 message += "%s = %s" %("size", attr) + "\n"
103 103
104 104 print message
105 105
106 106 class BasicHeader(Header):
107 107
108 108 size = None
109 109 version = None
110 110 dataBlock = None
111 111 utc = None
112 112 ltc = None
113 113 miliSecond = None
114 114 timeZone = None
115 115 dstFlag = None
116 116 errorCount = None
117 117 datatime = None
118 118 __LOCALTIME = None
119 119
120 120 def __init__(self, useLocalTime=True):
121 121
122 122 self.size = 24
123 123 self.version = 0
124 124 self.dataBlock = 0
125 125 self.utc = 0
126 126 self.miliSecond = 0
127 127 self.timeZone = 0
128 128 self.dstFlag = 0
129 129 self.errorCount = 0
130 130
131 131 self.useLocalTime = useLocalTime
132 132
133 133 def read(self, fp):
134 134
135 self.length = 0
135 136 try:
136 137 if hasattr(fp, 'read'):
137 print 'fromfile'
138 138 header = numpy.fromfile(fp, BASIC_STRUCTURE,1)
139 139 else:
140 print 'fromstring'
141 140 header = numpy.fromstring(fp, BASIC_STRUCTURE,1)
142 141 except Exception, e:
143 142 print "BasicHeader: "
144 143 print e
145 144 return 0
146
145
147 146 self.size = int(header['nSize'][0])
148 147 self.version = int(header['nVersion'][0])
149 148 self.dataBlock = int(header['nDataBlockId'][0])
150 149 self.utc = int(header['nUtime'][0])
151 150 self.miliSecond = int(header['nMilsec'][0])
152 151 self.timeZone = int(header['nTimezone'][0])
153 152 self.dstFlag = int(header['nDstflag'][0])
154 153 self.errorCount = int(header['nErrorCount'][0])
155 154
156 155 if self.size < 24:
157 156 return 0
158
157
158 self.length = header.nbytes
159 159 return 1
160 160
161 161 def write(self, fp):
162 162
163 163 headerTuple = (self.size,self.version,self.dataBlock,self.utc,self.miliSecond,self.timeZone,self.dstFlag,self.errorCount)
164 164 header = numpy.array(headerTuple, BASIC_STRUCTURE)
165 165 header.tofile(fp)
166 166
167 167 return 1
168 168
169 169 def get_ltc(self):
170 170
171 171 return self.utc - self.timeZone*60
172 172
173 173 def set_ltc(self, value):
174 174
175 175 self.utc = value + self.timeZone*60
176 176
177 177 def get_datatime(self):
178 178
179 179 return datetime.datetime.utcfromtimestamp(self.ltc)
180 180
181 181 ltc = property(get_ltc, set_ltc)
182 182 datatime = property(get_datatime)
183 183
184 184 class SystemHeader(Header):
185 185
186 186 size = None
187 187 nSamples = None
188 188 nProfiles = None
189 189 nChannels = None
190 190 adcResolution = None
191 191 pciDioBusWidth = None
192 192
193 193 def __init__(self, nSamples=0, nProfiles=0, nChannels=0, adcResolution=14, pciDioBusWith=0):
194 194
195 195 self.size = 24
196 196 self.nSamples = nSamples
197 197 self.nProfiles = nProfiles
198 198 self.nChannels = nChannels
199 199 self.adcResolution = adcResolution
200 200 self.pciDioBusWidth = pciDioBusWith
201 201
202 202 def read(self, fp):
203
204 startFp = fp.tell()
205
203 self.length = 0
206 204 try:
207 header = numpy.fromfile(fp,SYSTEM_STRUCTURE,1)
205 startFp = fp.tell()
208 206 except Exception, e:
209 print "System Header: " + e
207 startFp = None
208 pass
209
210 try:
211 if hasattr(fp, 'read'):
212 header = numpy.fromfile(fp, SYSTEM_STRUCTURE,1)
213 else:
214 header = numpy.fromstring(fp, SYSTEM_STRUCTURE,1)
215 except Exception, e:
216 print "System Header: " + str(e)
210 217 return 0
211 218
212 219 self.size = header['nSize'][0]
213 220 self.nSamples = header['nNumSamples'][0]
214 221 self.nProfiles = header['nNumProfiles'][0]
215 222 self.nChannels = header['nNumChannels'][0]
216 223 self.adcResolution = header['nADCResolution'][0]
217 224 self.pciDioBusWidth = header['nPCDIOBusWidth'][0]
218 225
219 endFp = self.size + startFp
220 226
221 if fp.tell() > endFp:
222 sys.stderr.write("Warning %s: Size value read from System Header is lower than it has to be\n" %fp.name)
223 return 0
227 if startFp is not None:
228 endFp = self.size + startFp
224 229
225 if fp.tell() < endFp:
226 sys.stderr.write("Warning %s: Size value read from System Header size is greater than it has to be\n" %fp.name)
227 return 0
230 if fp.tell() > endFp:
231 sys.stderr.write("Warning %s: Size value read from System Header is lower than it has to be\n" %fp.name)
232 return 0
233
234 if fp.tell() < endFp:
235 sys.stderr.write("Warning %s: Size value read from System Header size is greater than it has to be\n" %fp.name)
236 return 0
228 237
238 self.length = header.nbytes
229 239 return 1
230 240
231 241 def write(self, fp):
232 242
233 243 headerTuple = (self.size,self.nSamples,self.nProfiles,self.nChannels,self.adcResolution,self.pciDioBusWidth)
234 244 header = numpy.array(headerTuple,SYSTEM_STRUCTURE)
235 245 header.tofile(fp)
236 246
237 247 return 1
238 248
239 249 class RadarControllerHeader(Header):
240 250
241 251 expType = None
242 252 nTx = None
243 253 ipp = None
244 254 txA = None
245 255 txB = None
246 256 nWindows = None
247 257 numTaus = None
248 258 codeType = None
249 259 line6Function = None
250 260 line5Function = None
251 261 fClock = None
252 262 prePulseBefore = None
253 263 prePulserAfter = None
254 264 rangeIpp = None
255 265 rangeTxA = None
256 266 rangeTxB = None
257 267
258 268 __size = None
259 269
260 270 def __init__(self, expType=2, nTx=1,
261 271 ippKm=None, txA=0, txB=0,
262 272 nWindows=None, nHeights=None, firstHeight=None, deltaHeight=None,
263 273 numTaus=0, line6Function=0, line5Function=0, fClock=None,
264 274 prePulseBefore=0, prePulseAfter=0,
265 275 codeType=0, nCode=0, nBaud=0, code=None,
266 276 flip1=0, flip2=0):
267 277
268 278 # self.size = 116
269 279 self.expType = expType
270 280 self.nTx = nTx
271 281 self.ipp = ippKm
272 282 self.txA = txA
273 283 self.txB = txB
274 284 self.rangeIpp = ippKm
275 285 self.rangeTxA = txA
276 286 self.rangeTxB = txB
277 287
278 288 self.nWindows = nWindows
279 289 self.numTaus = numTaus
280 290 self.codeType = codeType
281 291 self.line6Function = line6Function
282 292 self.line5Function = line5Function
283 293 self.fClock = fClock
284 294 self.prePulseBefore = prePulseBefore
285 295 self.prePulserAfter = prePulseAfter
286 296
287 297 self.nHeights = nHeights
288 298 self.firstHeight = firstHeight
289 299 self.deltaHeight = deltaHeight
290 300 self.samplesWin = nHeights
291 301
292 302 self.nCode = nCode
293 303 self.nBaud = nBaud
294 304 self.code = code
295 305 self.flip1 = flip1
296 306 self.flip2 = flip2
297 307
298 308 self.code_size = int(numpy.ceil(self.nBaud/32.))*self.nCode*4
299 309 # self.dynamic = numpy.array([],numpy.dtype('byte'))
300 310
301 311 if self.fClock is None and self.deltaHeight is not None:
302 312 self.fClock = 0.15/(deltaHeight*1e-6) #0.15Km / (height * 1u)
303 313
304 314 def read(self, fp):
305
306
307 startFp = fp.tell()
315 self.length = 0
308 316 try:
309 header = numpy.fromfile(fp,RADAR_STRUCTURE,1)
317 startFp = fp.tell()
310 318 except Exception, e:
311 print "RadarControllerHeader: " + e
319 startFp = None
320 pass
321
322 try:
323 if hasattr(fp, 'read'):
324 header = numpy.fromfile(fp, RADAR_STRUCTURE,1)
325 else:
326 header = numpy.fromstring(fp, RADAR_STRUCTURE,1)
327 self.length += header.nbytes
328 except Exception, e:
329 print "RadarControllerHeader: " + str(e)
312 330 return 0
313 331
314 332 size = int(header['nSize'][0])
315 333 self.expType = int(header['nExpType'][0])
316 334 self.nTx = int(header['nNTx'][0])
317 335 self.ipp = float(header['fIpp'][0])
318 336 self.txA = float(header['fTxA'][0])
319 337 self.txB = float(header['fTxB'][0])
320 338 self.nWindows = int(header['nNumWindows'][0])
321 339 self.numTaus = int(header['nNumTaus'][0])
322 340 self.codeType = int(header['nCodeType'][0])
323 341 self.line6Function = int(header['nLine6Function'][0])
324 342 self.line5Function = int(header['nLine5Function'][0])
325 343 self.fClock = float(header['fClock'][0])
326 344 self.prePulseBefore = int(header['nPrePulseBefore'][0])
327 345 self.prePulserAfter = int(header['nPrePulseAfter'][0])
328 346 self.rangeIpp = header['sRangeIPP'][0]
329 347 self.rangeTxA = header['sRangeTxA'][0]
330 348 self.rangeTxB = header['sRangeTxB'][0]
331 349
332 samplingWindow = numpy.fromfile(fp,SAMPLING_STRUCTURE,self.nWindows)
333
350 try:
351 if hasattr(fp, 'read'):
352 samplingWindow = numpy.fromfile(fp, SAMPLING_STRUCTURE, self.nWindows)
353 else:
354 samplingWindow = numpy.fromstring(fp[self.length:], SAMPLING_STRUCTURE, self.nWindows)
355 self.length += samplingWindow.nbytes
356 except Exception, e:
357 print "RadarControllerHeader: " + str(e)
358 return 0
334 359 self.nHeights = int(numpy.sum(samplingWindow['nsa']))
335 360 self.firstHeight = samplingWindow['h0']
336 361 self.deltaHeight = samplingWindow['dh']
337 362 self.samplesWin = samplingWindow['nsa']
363
364
365
366 try:
367 if hasattr(fp, 'read'):
368 self.Taus = numpy.fromfile(fp, '<f4', self.numTaus)
369 else:
370 self.Taus = numpy.fromstring(fp[self.length:], '<f4', self.numTaus)
371 self.length += self.Taus.nbytes
372 except Exception, e:
373 print "RadarControllerHeader: " + str(e)
374 return 0
375
338 376
339 self.Taus = numpy.fromfile(fp,'<f4',self.numTaus)
340 377
341 378 self.code_size = 0
342 379 if self.codeType != 0:
343 self.nCode = int(numpy.fromfile(fp,'<u4',1))
344 self.nBaud = int(numpy.fromfile(fp,'<u4',1))
345 380
381 try:
382 if hasattr(fp, 'read'):
383 self.nCode = numpy.fromfile(fp, '<u4', 1)
384 self.length += self.nCode.nbytes
385 self.nBaud = numpy.fromfile(fp, '<u4', 1)
386 self.length += self.nBaud.nbytes
387 else:
388 self.nCode = numpy.fromstring(fp[self.length:], '<u4', 1)[0]
389 self.length += self.nCode.nbytes
390 self.nBaud = numpy.fromstring(fp[self.length:], '<u4', 1)[0]
391 self.length += self.nBaud.nbytes
392 except Exception, e:
393 print "RadarControllerHeader: " + str(e)
394 return 0
346 395 code = numpy.empty([self.nCode,self.nBaud],dtype='i1')
396
347 397 for ic in range(self.nCode):
348 temp = numpy.fromfile(fp,'u4',int(numpy.ceil(self.nBaud/32.)))
398 try:
399 if hasattr(fp, 'read'):
400 temp = numpy.fromfile(fp,'u4', int(numpy.ceil(self.nBaud/32.)))
401 else:
402 temp = numpy.fromstring(fp,'u4', int(numpy.ceil(self.nBaud/32.)))
403 self.length += temp.nbytes
404 except Exception, e:
405 print "RadarControllerHeader: " + str(e)
406 return 0
407
349 408 for ib in range(self.nBaud-1,-1,-1):
350 409 code[ic,ib] = temp[ib/32]%2
351 410 temp[ib/32] = temp[ib/32]/2
352 411
353 412 self.code = 2.0*code - 1.0
354 413 self.code_size = int(numpy.ceil(self.nBaud/32.))*self.nCode*4
355 414
356 415 # if self.line5Function == RCfunction.FLIP:
357 416 # self.flip1 = numpy.fromfile(fp,'<u4',1)
358 417 #
359 418 # if self.line6Function == RCfunction.FLIP:
360 419 # self.flip2 = numpy.fromfile(fp,'<u4',1)
361
362 endFp = size + startFp
363
364 if fp.tell() != endFp:
365 # fp.seek(endFp)
366 print "%s: Radar Controller Header size is not consistent: from data [%d] != from header field [%d]" %(fp.name, fp.tell()-startFp, size)
367 # return 0
368
369 if fp.tell() > endFp:
370 sys.stderr.write("Warning %s: Size value read from Radar Controller header is lower than it has to be\n" %fp.name)
371 # return 0
372
373 if fp.tell() < endFp:
374 sys.stderr.write("Warning %s: Size value read from Radar Controller header is greater than it has to be\n" %fp.name)
420 if startFp is not None:
421 endFp = size + startFp
375 422
423 if fp.tell() != endFp:
424 # fp.seek(endFp)
425 print "%s: Radar Controller Header size is not consistent: from data [%d] != from header field [%d]" %(fp.name, fp.tell()-startFp, size)
426 # return 0
427
428 if fp.tell() > endFp:
429 sys.stderr.write("Warning %s: Size value read from Radar Controller header is lower than it has to be\n" %fp.name)
430 # return 0
431
432 if fp.tell() < endFp:
433 sys.stderr.write("Warning %s: Size value read from Radar Controller header is greater than it has to be\n" %fp.name)
376 434
435
377 436 return 1
378 437
379 438 def write(self, fp):
380 439
381 440 headerTuple = (self.size,
382 441 self.expType,
383 442 self.nTx,
384 443 self.ipp,
385 444 self.txA,
386 445 self.txB,
387 446 self.nWindows,
388 447 self.numTaus,
389 448 self.codeType,
390 449 self.line6Function,
391 450 self.line5Function,
392 451 self.fClock,
393 452 self.prePulseBefore,
394 453 self.prePulserAfter,
395 454 self.rangeIpp,
396 455 self.rangeTxA,
397 456 self.rangeTxB)
398 457
399 458 header = numpy.array(headerTuple,RADAR_STRUCTURE)
400 459 header.tofile(fp)
401 460
402 461 sampleWindowTuple = (self.firstHeight,self.deltaHeight,self.samplesWin)
403 462 samplingWindow = numpy.array(sampleWindowTuple,SAMPLING_STRUCTURE)
404 463 samplingWindow.tofile(fp)
405 464
406 465 if self.numTaus > 0:
407 466 self.Taus.tofile(fp)
408 467
409 468 if self.codeType !=0:
410 469 nCode = numpy.array(self.nCode, '<u4')
411 470 nCode.tofile(fp)
412 471 nBaud = numpy.array(self.nBaud, '<u4')
413 472 nBaud.tofile(fp)
414 473 code1 = (self.code + 1.0)/2.
415 474
416 475 for ic in range(self.nCode):
417 476 tempx = numpy.zeros(numpy.ceil(self.nBaud/32.))
418 477 start = 0
419 478 end = 32
420 479 for i in range(len(tempx)):
421 480 code_selected = code1[ic,start:end]
422 481 for j in range(len(code_selected)-1,-1,-1):
423 482 if code_selected[j] == 1:
424 483 tempx[i] = tempx[i] + 2**(len(code_selected)-1-j)
425 484 start = start + 32
426 485 end = end + 32
427 486
428 487 tempx = tempx.astype('u4')
429 488 tempx.tofile(fp)
430 489
431 490 # if self.line5Function == RCfunction.FLIP:
432 491 # self.flip1.tofile(fp)
433 492 #
434 493 # if self.line6Function == RCfunction.FLIP:
435 494 # self.flip2.tofile(fp)
436 495
437 496 return 1
438 497
439 498 def get_ippSeconds(self):
440 499 '''
441 500 '''
442 501 ippSeconds = 2.0 * 1000 * self.ipp / SPEED_OF_LIGHT
443 502
444 503 return ippSeconds
445 504
446 505 def set_ippSeconds(self, ippSeconds):
447 506 '''
448 507 '''
449 508
450 509 self.ipp = ippSeconds * SPEED_OF_LIGHT / (2.0*1000)
451 510
452 511 return
453 512
454 513 def get_size(self):
455 514
456 515 self.__size = 116 + 12*self.nWindows + 4*self.numTaus
457 516
458 517 if self.codeType != 0:
459 518 self.__size += 4 + 4 + 4*self.nCode*numpy.ceil(self.nBaud/32.)
460 519
461 520 return self.__size
462 521
463 522 def set_size(self, value):
464 523
465 524 raise IOError, "size is a property and it cannot be set, just read"
466 525
467 526 return
468 527
469 528 ippSeconds = property(get_ippSeconds, set_ippSeconds)
470 529 size = property(get_size, set_size)
471 530
472 531 class ProcessingHeader(Header):
473 532
474 533 # size = None
475 534 dtype = None
476 535 blockSize = None
477 536 profilesPerBlock = None
478 537 dataBlocksPerFile = None
479 538 nWindows = None
480 539 processFlags = None
481 540 nCohInt = None
482 541 nIncohInt = None
483 542 totalSpectra = None
484 543
485 544 flag_dc = None
486 545 flag_cspc = None
487 546
488 547 def __init__(self):
489 548
490 549 # self.size = 0
491 550 self.dtype = 0
492 551 self.blockSize = 0
493 552 self.profilesPerBlock = 0
494 553 self.dataBlocksPerFile = 0
495 554 self.nWindows = 0
496 555 self.processFlags = 0
497 556 self.nCohInt = 0
498 557 self.nIncohInt = 0
499 558 self.totalSpectra = 0
500 559
501 560 self.nHeights = 0
502 561 self.firstHeight = 0
503 562 self.deltaHeight = 0
504 563 self.samplesWin = 0
505 564 self.spectraComb = 0
506 565 self.nCode = None
507 566 self.code = None
508 567 self.nBaud = None
509 568
510 569 self.shif_fft = False
511 570 self.flag_dc = False
512 571 self.flag_cspc = False
513 572 self.flag_decode = False
514 573 self.flag_deflip = False
515
574 self.length = 0
516 575 def read(self, fp):
517
518 startFp = fp.tell()
576 self.length = 0
577 try:
578 startFp = fp.tell()
579 except Exception, e:
580 startFp = None
581 pass
519 582
520 583 try:
521 header = numpy.fromfile(fp,PROCESSING_STRUCTURE,1)
584 if hasattr(fp, 'read'):
585 header = numpy.fromfile(fp, PROCESSING_STRUCTURE, 1)
586 else:
587 header = numpy.fromstring(fp, PROCESSING_STRUCTURE, 1)
588 self.length += header.nbytes
522 589 except Exception, e:
523 print "ProcessingHeader: " + e
590 print "ProcessingHeader: " + str(e)
524 591 return 0
525 592
526 593 size = int(header['nSize'][0])
527 594 self.dtype = int(header['nDataType'][0])
528 595 self.blockSize = int(header['nSizeOfDataBlock'][0])
529 596 self.profilesPerBlock = int(header['nProfilesperBlock'][0])
530 597 self.dataBlocksPerFile = int(header['nDataBlocksperFile'][0])
531 598 self.nWindows = int(header['nNumWindows'][0])
532 599 self.processFlags = header['nProcessFlags']
533 600 self.nCohInt = int(header['nCoherentIntegrations'][0])
534 601 self.nIncohInt = int(header['nIncoherentIntegrations'][0])
535 602 self.totalSpectra = int(header['nTotalSpectra'][0])
536 603
537 samplingWindow = numpy.fromfile(fp,SAMPLING_STRUCTURE,self.nWindows)
604 try:
605 if hasattr(fp, 'read'):
606 samplingWindow = numpy.fromfile(fp, SAMPLING_STRUCTURE, self.nWindows)
607 else:
608 samplingWindow = numpy.fromstring(fp[self.length:], SAMPLING_STRUCTURE, self.nWindows)
609 self.length += samplingWindow.nbytes
610 except Exception, e:
611 print "ProcessingHeader: " + str(e)
612 return 0
538 613
539 614 self.nHeights = int(numpy.sum(samplingWindow['nsa']))
540 615 self.firstHeight = float(samplingWindow['h0'][0])
541 616 self.deltaHeight = float(samplingWindow['dh'][0])
542 617 self.samplesWin = samplingWindow['nsa'][0]
543 618
544 self.spectraComb = numpy.fromfile(fp,'u1',2*self.totalSpectra)
619
620 try:
621 if hasattr(fp, 'read'):
622 self.spectraComb = numpy.fromfile(fp, 'u1', 2*self.totalSpectra)
623 else:
624 self.spectraComb = numpy.fromstring(fp[self.length:], 'u1', 2*self.totalSpectra)
625 self.length += self.spectraComb.nbytes
626 except Exception, e:
627 print "ProcessingHeader: " + str(e)
628 return 0
545 629
546 630 if ((self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE) == PROCFLAG.DEFINE_PROCESS_CODE):
547 631 self.nCode = int(numpy.fromfile(fp,'<u4',1))
548 632 self.nBaud = int(numpy.fromfile(fp,'<u4',1))
549 633 self.code = numpy.fromfile(fp,'<f4',self.nCode*self.nBaud).reshape(self.nCode,self.nBaud)
550 634
551 635 if ((self.processFlags & PROCFLAG.EXP_NAME_ESP) == PROCFLAG.EXP_NAME_ESP):
552 636 exp_name_len = int(numpy.fromfile(fp,'<u4',1))
553 637 exp_name = numpy.fromfile(fp,'u1',exp_name_len+1)
554 638
555 639 if ((self.processFlags & PROCFLAG.SHIFT_FFT_DATA) == PROCFLAG.SHIFT_FFT_DATA):
556 640 self.shif_fft = True
557 641 else:
558 642 self.shif_fft = False
559 643
560 644 if ((self.processFlags & PROCFLAG.SAVE_CHANNELS_DC) == PROCFLAG.SAVE_CHANNELS_DC):
561 645 self.flag_dc = True
562 646 else:
563 647 self.flag_dc = False
564 648
565 649 if ((self.processFlags & PROCFLAG.DECODE_DATA) == PROCFLAG.DECODE_DATA):
566 650 self.flag_decode = True
567 651 else:
568 652 self.flag_decode = False
569 653
570 654 if ((self.processFlags & PROCFLAG.DEFLIP_DATA) == PROCFLAG.DEFLIP_DATA):
571 655 self.flag_deflip = True
572 656 else:
573 657 self.flag_deflip = False
574 658
575 659 nChannels = 0
576 660 nPairs = 0
577 661 pairList = []
578 662
579 663 for i in range( 0, self.totalSpectra*2, 2 ):
580 664 if self.spectraComb[i] == self.spectraComb[i+1]:
581 665 nChannels = nChannels + 1 #par de canales iguales
582 666 else:
583 667 nPairs = nPairs + 1 #par de canales diferentes
584 668 pairList.append( (self.spectraComb[i], self.spectraComb[i+1]) )
585 669
586 670 self.flag_cspc = False
587 671 if nPairs > 0:
588 672 self.flag_cspc = True
589 673
590 endFp = size + startFp
591 674
592 if fp.tell() > endFp:
593 sys.stderr.write("Warning: Processing header size is lower than it has to be")
594 return 0
595
596 if fp.tell() < endFp:
597 sys.stderr.write("Warning: Processing header size is greater than it is considered")
675
676 if startFp is not None:
677 endFp = size + startFp
678 if fp.tell() > endFp:
679 sys.stderr.write("Warning: Processing header size is lower than it has to be")
680 return 0
681
682 if fp.tell() < endFp:
683 sys.stderr.write("Warning: Processing header size is greater than it is considered")
598 684
599 685 return 1
600 686
601 687 def write(self, fp):
602 688 #Clear DEFINE_PROCESS_CODE
603 689 self.processFlags = self.processFlags & (~PROCFLAG.DEFINE_PROCESS_CODE)
604 690
605 691 headerTuple = (self.size,
606 692 self.dtype,
607 693 self.blockSize,
608 694 self.profilesPerBlock,
609 695 self.dataBlocksPerFile,
610 696 self.nWindows,
611 697 self.processFlags,
612 698 self.nCohInt,
613 699 self.nIncohInt,
614 700 self.totalSpectra)
615 701
616 702 header = numpy.array(headerTuple,PROCESSING_STRUCTURE)
617 703 header.tofile(fp)
618 704
619 705 if self.nWindows != 0:
620 706 sampleWindowTuple = (self.firstHeight,self.deltaHeight,self.samplesWin)
621 707 samplingWindow = numpy.array(sampleWindowTuple,SAMPLING_STRUCTURE)
622 708 samplingWindow.tofile(fp)
623 709
624 710 if self.totalSpectra != 0:
625 711 # spectraComb = numpy.array([],numpy.dtype('u1'))
626 712 spectraComb = self.spectraComb
627 713 spectraComb.tofile(fp)
628 714
629 715 # if self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE == PROCFLAG.DEFINE_PROCESS_CODE:
630 716 # nCode = numpy.array([self.nCode], numpy.dtype('u4')) #Probar con un dato que almacene codigo, hasta el momento no se hizo la prueba
631 717 # nCode.tofile(fp)
632 718 #
633 719 # nBaud = numpy.array([self.nBaud], numpy.dtype('u4'))
634 720 # nBaud.tofile(fp)
635 721 #
636 722 # code = self.code.reshape(self.nCode*self.nBaud)
637 723 # code = code.astype(numpy.dtype('<f4'))
638 724 # code.tofile(fp)
639 725
640 726 return 1
641 727
642 728 def get_size(self):
643 729
644 730 self.__size = 40 + 12*self.nWindows + 2*self.totalSpectra
645 731
646 732 # if self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE == PROCFLAG.DEFINE_PROCESS_CODE:
647 733 # self.__size += 4 + 4 + 4*self.nCode*numpy.ceil(self.nBaud/32.)
648 734 # self.__size += 4 + 4 + 4 * self.nCode * self.nBaud
649 735
650 736 return self.__size
651 737
652 738 def set_size(self, value):
653 739
654 740 raise IOError, "size is a property and it cannot be set, just read"
655 741
656 742 return
657 743
658 744 size = property(get_size, set_size)
659 745
660 746 class RCfunction:
661 747 NONE=0
662 748 FLIP=1
663 749 CODE=2
664 750 SAMPLING=3
665 751 LIN6DIV256=4
666 752 SYNCHRO=5
667 753
668 754 class nCodeType:
669 755 NONE=0
670 756 USERDEFINE=1
671 757 BARKER2=2
672 758 BARKER3=3
673 759 BARKER4=4
674 760 BARKER5=5
675 761 BARKER7=6
676 762 BARKER11=7
677 763 BARKER13=8
678 764 AC128=9
679 765 COMPLEMENTARYCODE2=10
680 766 COMPLEMENTARYCODE4=11
681 767 COMPLEMENTARYCODE8=12
682 768 COMPLEMENTARYCODE16=13
683 769 COMPLEMENTARYCODE32=14
684 770 COMPLEMENTARYCODE64=15
685 771 COMPLEMENTARYCODE128=16
686 772 CODE_BINARY28=17
687 773
688 774 class PROCFLAG:
689 775
690 776 COHERENT_INTEGRATION = numpy.uint32(0x00000001)
691 777 DECODE_DATA = numpy.uint32(0x00000002)
692 778 SPECTRA_CALC = numpy.uint32(0x00000004)
693 779 INCOHERENT_INTEGRATION = numpy.uint32(0x00000008)
694 780 POST_COHERENT_INTEGRATION = numpy.uint32(0x00000010)
695 781 SHIFT_FFT_DATA = numpy.uint32(0x00000020)
696 782
697 783 DATATYPE_CHAR = numpy.uint32(0x00000040)
698 784 DATATYPE_SHORT = numpy.uint32(0x00000080)
699 785 DATATYPE_LONG = numpy.uint32(0x00000100)
700 786 DATATYPE_INT64 = numpy.uint32(0x00000200)
701 787 DATATYPE_FLOAT = numpy.uint32(0x00000400)
702 788 DATATYPE_DOUBLE = numpy.uint32(0x00000800)
703 789
704 790 DATAARRANGE_CONTIGUOUS_CH = numpy.uint32(0x00001000)
705 791 DATAARRANGE_CONTIGUOUS_H = numpy.uint32(0x00002000)
706 792 DATAARRANGE_CONTIGUOUS_P = numpy.uint32(0x00004000)
707 793
708 794 SAVE_CHANNELS_DC = numpy.uint32(0x00008000)
709 795 DEFLIP_DATA = numpy.uint32(0x00010000)
710 796 DEFINE_PROCESS_CODE = numpy.uint32(0x00020000)
711 797
712 798 ACQ_SYS_NATALIA = numpy.uint32(0x00040000)
713 799 ACQ_SYS_ECHOTEK = numpy.uint32(0x00080000)
714 800 ACQ_SYS_ADRXD = numpy.uint32(0x000C0000)
715 801 ACQ_SYS_JULIA = numpy.uint32(0x00100000)
716 802 ACQ_SYS_XXXXXX = numpy.uint32(0x00140000)
717 803
718 804 EXP_NAME_ESP = numpy.uint32(0x00200000)
719 805 CHANNEL_NAMES_ESP = numpy.uint32(0x00400000)
720 806
721 807 OPERATION_MASK = numpy.uint32(0x0000003F)
722 808 DATATYPE_MASK = numpy.uint32(0x00000FC0)
723 809 DATAARRANGE_MASK = numpy.uint32(0x00007000)
724 810 ACQ_SYS_MASK = numpy.uint32(0x001C0000)
725 811
726 812 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
727 813 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
728 814 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
729 815 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
730 816 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
731 817 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
732 818
733 819 NUMPY_DTYPE_LIST = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
734 820
735 821 PROCFLAG_DTYPE_LIST = [PROCFLAG.DATATYPE_CHAR,
736 822 PROCFLAG.DATATYPE_SHORT,
737 823 PROCFLAG.DATATYPE_LONG,
738 824 PROCFLAG.DATATYPE_INT64,
739 825 PROCFLAG.DATATYPE_FLOAT,
740 826 PROCFLAG.DATATYPE_DOUBLE]
741 827
742 828 DTYPE_WIDTH = [1, 2, 4, 8, 4, 8]
743 829
744 830 def get_dtype_index(numpy_dtype):
745 831
746 832 index = None
747 833
748 834 for i in range(len(NUMPY_DTYPE_LIST)):
749 835 if numpy_dtype == NUMPY_DTYPE_LIST[i]:
750 836 index = i
751 837 break
752 838
753 839 return index
754 840
755 841 def get_numpy_dtype(index):
756 842
757 843 return NUMPY_DTYPE_LIST[index]
758 844
759 845 def get_procflag_dtype(index):
760 846
761 847 return PROCFLAG_DTYPE_LIST[index]
762 848
763 849 def get_dtype_width(index):
764 850
765 851 return DTYPE_WIDTH[index] No newline at end of file
@@ -1,225 +1,225
1 1 '''
2 2 Created on Jul 9, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6 import os
7 7 import datetime
8 8 import numpy
9 9
10 10 from figure import Figure
11 11
12 12 class Scope(Figure):
13 13
14 14 isConfig = None
15 15
16 def __init__(self):
17
16 def __init__(self, **kwargs):
17 Figure.__init__(self, **kwargs)
18 18 self.isConfig = False
19 19 self.WIDTH = 300
20 20 self.HEIGHT = 200
21 21 self.counter_imagwr = 0
22 22
23 23 def getSubplots(self):
24 24
25 25 nrow = self.nplots
26 26 ncol = 3
27 27 return nrow, ncol
28 28
29 29 def setup(self, id, nplots, wintitle, show):
30 30
31 31 self.nplots = nplots
32 32
33 33 self.createFigure(id=id,
34 34 wintitle=wintitle,
35 35 show=show)
36 36
37 37 nrow,ncol = self.getSubplots()
38 38 colspan = 3
39 39 rowspan = 1
40 40
41 41 for i in range(nplots):
42 42 self.addAxes(nrow, ncol, i, 0, colspan, rowspan)
43 43
44 44 def plot_iq(self, x, y, id, channelIndexList, thisDatetime, wintitle, show, xmin, xmax, ymin, ymax):
45 45 yreal = y[channelIndexList,:].real
46 46 yimag = y[channelIndexList,:].imag
47 47
48 48 title = wintitle + " Scope: %s" %(thisDatetime.strftime("%d-%b-%Y %H:%M:%S"))
49 49 xlabel = "Range (Km)"
50 50 ylabel = "Intensity - IQ"
51 51
52 52 if not self.isConfig:
53 53 nplots = len(channelIndexList)
54 54
55 55 self.setup(id=id,
56 56 nplots=nplots,
57 57 wintitle='',
58 58 show=show)
59 59
60 60 if xmin == None: xmin = numpy.nanmin(x)
61 61 if xmax == None: xmax = numpy.nanmax(x)
62 62 if ymin == None: ymin = min(numpy.nanmin(yreal),numpy.nanmin(yimag))
63 63 if ymax == None: ymax = max(numpy.nanmax(yreal),numpy.nanmax(yimag))
64 64
65 65 self.isConfig = True
66 66
67 67 self.setWinTitle(title)
68 68
69 69 for i in range(len(self.axesList)):
70 70 title = "Channel %d" %(i)
71 71 axes = self.axesList[i]
72 72
73 73 axes.pline(x, yreal[i,:],
74 74 xmin=xmin, xmax=xmax, ymin=ymin, ymax=ymax,
75 75 xlabel=xlabel, ylabel=ylabel, title=title)
76 76
77 77 axes.addpline(x, yimag[i,:], idline=1, color="red", linestyle="solid", lw=2)
78 78
79 79 def plot_power(self, x, y, id, channelIndexList, thisDatetime, wintitle, show, xmin, xmax, ymin, ymax):
80 80 y = y[channelIndexList,:] * numpy.conjugate(y[channelIndexList,:])
81 81 yreal = y.real
82 82
83 83 title = wintitle + " Scope: %s" %(thisDatetime.strftime("%d-%b-%Y %H:%M:%S"))
84 84 xlabel = "Range (Km)"
85 85 ylabel = "Intensity"
86 86
87 87 if not self.isConfig:
88 88 nplots = len(channelIndexList)
89 89
90 90 self.setup(id=id,
91 91 nplots=nplots,
92 92 wintitle='',
93 93 show=show)
94 94
95 95 if xmin == None: xmin = numpy.nanmin(x)
96 96 if xmax == None: xmax = numpy.nanmax(x)
97 97 if ymin == None: ymin = numpy.nanmin(yreal)
98 98 if ymax == None: ymax = numpy.nanmax(yreal)
99 99
100 100 self.isConfig = True
101 101
102 102 self.setWinTitle(title)
103 103
104 104 for i in range(len(self.axesList)):
105 105 title = "Channel %d" %(i)
106 106 axes = self.axesList[i]
107 107 ychannel = yreal[i,:]
108 108 axes.pline(x, ychannel,
109 109 xmin=xmin, xmax=xmax, ymin=ymin, ymax=ymax,
110 110 xlabel=xlabel, ylabel=ylabel, title=title)
111 111
112 112
113 113 def run(self, dataOut, id, wintitle="", channelList=None,
114 114 xmin=None, xmax=None, ymin=None, ymax=None, save=False,
115 115 figpath='./', figfile=None, show=True, wr_period=1,
116 116 ftp=False, server=None, folder=None, username=None, password=None, type='power'):
117 117
118 118 """
119 119
120 120 Input:
121 121 dataOut :
122 122 id :
123 123 wintitle :
124 124 channelList :
125 125 xmin : None,
126 126 xmax : None,
127 127 ymin : None,
128 128 ymax : None,
129 129 """
130 130
131 131 if channelList == None:
132 132 channelIndexList = dataOut.channelIndexList
133 133 else:
134 134 channelIndexList = []
135 135 for channel in channelList:
136 136 if channel not in dataOut.channelList:
137 137 raise ValueError, "Channel %d is not in dataOut.channelList"
138 138 channelIndexList.append(dataOut.channelList.index(channel))
139 139
140 140 thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[0])
141 141
142 142 if dataOut.flagDataAsBlock:
143 143
144 144 for i in range(dataOut.nProfiles):
145 145
146 146 wintitle1 = wintitle + " [Profile = %d] " %i
147 147
148 148 if type == "power":
149 149 self.plot_power(dataOut.heightList,
150 150 dataOut.data[:,i,:],
151 151 id,
152 152 channelIndexList,
153 153 thisDatetime,
154 154 wintitle1,
155 155 show,
156 156 xmin,
157 157 xmax,
158 158 ymin,
159 159 ymax)
160 160
161 161 if type == "iq":
162 162 self.plot_iq(dataOut.heightList,
163 163 dataOut.data[:,i,:],
164 164 id,
165 165 channelIndexList,
166 166 thisDatetime,
167 167 wintitle1,
168 168 show,
169 169 xmin,
170 170 xmax,
171 171 ymin,
172 172 ymax)
173 173
174 174 self.draw()
175 175
176 176 str_datetime = thisDatetime.strftime("%Y%m%d_%H%M%S")
177 177 figfile = self.getFilename(name = str_datetime) + "_" + str(i)
178 178
179 179 self.save(figpath=figpath,
180 180 figfile=figfile,
181 181 save=save,
182 182 ftp=ftp,
183 183 wr_period=wr_period,
184 184 thisDatetime=thisDatetime)
185 185
186 186 else:
187 187 wintitle += " [Profile = %d] " %dataOut.profileIndex
188 188
189 189 if type == "power":
190 190 self.plot_power(dataOut.heightList,
191 191 dataOut.data,
192 192 id,
193 193 channelIndexList,
194 194 thisDatetime,
195 195 wintitle,
196 196 show,
197 197 xmin,
198 198 xmax,
199 199 ymin,
200 200 ymax)
201 201
202 202 if type == "iq":
203 203 self.plot_iq(dataOut.heightList,
204 204 dataOut.data,
205 205 id,
206 206 channelIndexList,
207 207 thisDatetime,
208 208 wintitle,
209 209 show,
210 210 xmin,
211 211 xmax,
212 212 ymin,
213 213 ymax)
214 214
215 215 self.draw()
216 216
217 217 str_datetime = thisDatetime.strftime("%Y%m%d_%H%M%S") + "_" + str(dataOut.profileIndex)
218 218 figfile = self.getFilename(name = str_datetime)
219 219
220 220 self.save(figpath=figpath,
221 221 figfile=figfile,
222 222 save=save,
223 223 ftp=ftp,
224 224 wr_period=wr_period,
225 225 thisDatetime=thisDatetime)
@@ -1,1814 +1,1816
1 1 '''
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6 import os
7 7 import sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import inspect
13 13 import time, datetime
14 14 import traceback
15 15 import zmq
16 16
17 17 try:
18 18 from gevent import sleep
19 19 except:
20 20 from time import sleep
21 21
22 22 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
23 23 from schainpy.model.data.jroheaderIO import get_dtype_index, get_numpy_dtype, get_procflag_dtype, get_dtype_width
24 24
25 25 LOCALTIME = True
26 26
27 27 def isNumber(cad):
28 28 """
29 29 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
30 30
31 31 Excepciones:
32 32 Si un determinado string no puede ser convertido a numero
33 33 Input:
34 34 str, string al cual se le analiza para determinar si convertible a un numero o no
35 35
36 36 Return:
37 37 True : si el string es uno numerico
38 38 False : no es un string numerico
39 39 """
40 40 try:
41 41 float( cad )
42 42 return True
43 43 except:
44 44 return False
45 45
46 46 def isFileInEpoch(filename, startUTSeconds, endUTSeconds):
47 47 """
48 48 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
49 49
50 50 Inputs:
51 51 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
52 52
53 53 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
54 54 segundos contados desde 01/01/1970.
55 55 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
56 56 segundos contados desde 01/01/1970.
57 57
58 58 Return:
59 59 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
60 60 fecha especificado, de lo contrario retorna False.
61 61
62 62 Excepciones:
63 63 Si el archivo no existe o no puede ser abierto
64 64 Si la cabecera no puede ser leida.
65 65
66 66 """
67 67 basicHeaderObj = BasicHeader(LOCALTIME)
68 68
69 69 try:
70 70 fp = open(filename,'rb')
71 71 except IOError:
72 72 print "The file %s can't be opened" %(filename)
73 73 return 0
74 74
75 75 sts = basicHeaderObj.read(fp)
76 76 fp.close()
77 77
78 78 if not(sts):
79 79 print "Skipping the file %s because it has not a valid header" %(filename)
80 80 return 0
81 81
82 82 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
83 83 return 0
84 84
85 85 return 1
86 86
87 87 def isTimeInRange(thisTime, startTime, endTime):
88 88
89 89 if endTime >= startTime:
90 90 if (thisTime < startTime) or (thisTime > endTime):
91 91 return 0
92 92
93 93 return 1
94 94 else:
95 95 if (thisTime < startTime) and (thisTime > endTime):
96 96 return 0
97 97
98 98 return 1
99 99
100 100 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
101 101 """
102 102 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
103 103
104 104 Inputs:
105 105 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
106 106
107 107 startDate : fecha inicial del rango seleccionado en formato datetime.date
108 108
109 109 endDate : fecha final del rango seleccionado en formato datetime.date
110 110
111 111 startTime : tiempo inicial del rango seleccionado en formato datetime.time
112 112
113 113 endTime : tiempo final del rango seleccionado en formato datetime.time
114 114
115 115 Return:
116 116 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
117 117 fecha especificado, de lo contrario retorna False.
118 118
119 119 Excepciones:
120 120 Si el archivo no existe o no puede ser abierto
121 121 Si la cabecera no puede ser leida.
122 122
123 123 """
124 124
125 125
126 126 try:
127 127 fp = open(filename,'rb')
128 128 except IOError:
129 129 print "The file %s can't be opened" %(filename)
130 130 return None
131 131
132 132 firstBasicHeaderObj = BasicHeader(LOCALTIME)
133 133 systemHeaderObj = SystemHeader()
134 134 radarControllerHeaderObj = RadarControllerHeader()
135 135 processingHeaderObj = ProcessingHeader()
136 136
137 137 lastBasicHeaderObj = BasicHeader(LOCALTIME)
138 138
139 139 sts = firstBasicHeaderObj.read(fp)
140 140
141 141 if not(sts):
142 142 print "[Reading] Skipping the file %s because it has not a valid header" %(filename)
143 143 return None
144 144
145 145 if not systemHeaderObj.read(fp):
146 146 return None
147 147
148 148 if not radarControllerHeaderObj.read(fp):
149 149 return None
150 150
151 151 if not processingHeaderObj.read(fp):
152 152 return None
153 153
154 154 filesize = os.path.getsize(filename)
155 155
156 156 offset = processingHeaderObj.blockSize + 24 #header size
157 157
158 158 if filesize <= offset:
159 159 print "[Reading] %s: This file has not enough data" %filename
160 160 return None
161 161
162 162 fp.seek(-offset, 2)
163 163
164 164 sts = lastBasicHeaderObj.read(fp)
165 165
166 166 fp.close()
167 167
168 168 thisDatetime = lastBasicHeaderObj.datatime
169 169 thisTime_last_block = thisDatetime.time()
170 170
171 171 thisDatetime = firstBasicHeaderObj.datatime
172 172 thisDate = thisDatetime.date()
173 173 thisTime_first_block = thisDatetime.time()
174 174
175 175 #General case
176 176 # o>>>>>>>>>>>>>><<<<<<<<<<<<<<o
177 177 #-----------o----------------------------o-----------
178 178 # startTime endTime
179 179
180 180 if endTime >= startTime:
181 181 if (thisTime_last_block < startTime) or (thisTime_first_block > endTime):
182 182 return None
183 183
184 184 return thisDatetime
185 185
186 186 #If endTime < startTime then endTime belongs to the next day
187 187
188 188
189 189 #<<<<<<<<<<<o o>>>>>>>>>>>
190 190 #-----------o----------------------------o-----------
191 191 # endTime startTime
192 192
193 193 if (thisDate == startDate) and (thisTime_last_block < startTime):
194 194 return None
195 195
196 196 if (thisDate == endDate) and (thisTime_first_block > endTime):
197 197 return None
198 198
199 199 if (thisTime_last_block < startTime) and (thisTime_first_block > endTime):
200 200 return None
201 201
202 202 return thisDatetime
203 203
204 204 def isFolderInDateRange(folder, startDate=None, endDate=None):
205 205 """
206 206 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
207 207
208 208 Inputs:
209 209 folder : nombre completo del directorio.
210 210 Su formato deberia ser "/path_root/?YYYYDDD"
211 211
212 212 siendo:
213 213 YYYY : Anio (ejemplo 2015)
214 214 DDD : Dia del anio (ejemplo 305)
215 215
216 216 startDate : fecha inicial del rango seleccionado en formato datetime.date
217 217
218 218 endDate : fecha final del rango seleccionado en formato datetime.date
219 219
220 220 Return:
221 221 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
222 222 fecha especificado, de lo contrario retorna False.
223 223 Excepciones:
224 224 Si el directorio no tiene el formato adecuado
225 225 """
226 226
227 227 basename = os.path.basename(folder)
228 228
229 229 if not isRadarFolder(basename):
230 230 print "The folder %s has not the rigth format" %folder
231 231 return 0
232 232
233 233 if startDate and endDate:
234 234 thisDate = getDateFromRadarFolder(basename)
235 235
236 236 if thisDate < startDate:
237 237 return 0
238 238
239 239 if thisDate > endDate:
240 240 return 0
241 241
242 242 return 1
243 243
244 244 def isFileInDateRange(filename, startDate=None, endDate=None):
245 245 """
246 246 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
247 247
248 248 Inputs:
249 249 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
250 250
251 251 Su formato deberia ser "?YYYYDDDsss"
252 252
253 253 siendo:
254 254 YYYY : Anio (ejemplo 2015)
255 255 DDD : Dia del anio (ejemplo 305)
256 256 sss : set
257 257
258 258 startDate : fecha inicial del rango seleccionado en formato datetime.date
259 259
260 260 endDate : fecha final del rango seleccionado en formato datetime.date
261 261
262 262 Return:
263 263 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
264 264 fecha especificado, de lo contrario retorna False.
265 265 Excepciones:
266 266 Si el archivo no tiene el formato adecuado
267 267 """
268 268
269 269 basename = os.path.basename(filename)
270 270
271 271 if not isRadarFile(basename):
272 272 print "The filename %s has not the rigth format" %filename
273 273 return 0
274 274
275 275 if startDate and endDate:
276 276 thisDate = getDateFromRadarFile(basename)
277 277
278 278 if thisDate < startDate:
279 279 return 0
280 280
281 281 if thisDate > endDate:
282 282 return 0
283 283
284 284 return 1
285 285
286 286 def getFileFromSet(path, ext, set):
287 287 validFilelist = []
288 288 fileList = os.listdir(path)
289 289
290 290 # 0 1234 567 89A BCDE
291 291 # H YYYY DDD SSS .ext
292 292
293 293 for thisFile in fileList:
294 294 try:
295 295 year = int(thisFile[1:5])
296 296 doy = int(thisFile[5:8])
297 297 except:
298 298 continue
299 299
300 300 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
301 301 continue
302 302
303 303 validFilelist.append(thisFile)
304 304
305 305 myfile = fnmatch.filter(validFilelist,'*%4.4d%3.3d%3.3d*'%(year,doy,set))
306 306
307 307 if len(myfile)!= 0:
308 308 return myfile[0]
309 309 else:
310 310 filename = '*%4.4d%3.3d%3.3d%s'%(year,doy,set,ext.lower())
311 311 print 'the filename %s does not exist'%filename
312 312 print '...going to the last file: '
313 313
314 314 if validFilelist:
315 315 validFilelist = sorted( validFilelist, key=str.lower )
316 316 return validFilelist[-1]
317 317
318 318 return None
319 319
320 320 def getlastFileFromPath(path, ext):
321 321 """
322 322 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
323 323 al final de la depuracion devuelve el ultimo file de la lista que quedo.
324 324
325 325 Input:
326 326 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
327 327 ext : extension de los files contenidos en una carpeta
328 328
329 329 Return:
330 330 El ultimo file de una determinada carpeta, no se considera el path.
331 331 """
332 332 validFilelist = []
333 333 fileList = os.listdir(path)
334 334
335 335 # 0 1234 567 89A BCDE
336 336 # H YYYY DDD SSS .ext
337 337
338 338 for thisFile in fileList:
339 339
340 340 year = thisFile[1:5]
341 341 if not isNumber(year):
342 342 continue
343 343
344 344 doy = thisFile[5:8]
345 345 if not isNumber(doy):
346 346 continue
347 347
348 348 year = int(year)
349 349 doy = int(doy)
350 350
351 351 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
352 352 continue
353 353
354 354 validFilelist.append(thisFile)
355 355
356 356 if validFilelist:
357 357 validFilelist = sorted( validFilelist, key=str.lower )
358 358 return validFilelist[-1]
359 359
360 360 return None
361 361
362 362 def checkForRealPath(path, foldercounter, year, doy, set, ext):
363 363 """
364 364 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
365 365 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
366 366 el path exacto de un determinado file.
367 367
368 368 Example :
369 369 nombre correcto del file es .../.../D2009307/P2009307367.ext
370 370
371 371 Entonces la funcion prueba con las siguientes combinaciones
372 372 .../.../y2009307367.ext
373 373 .../.../Y2009307367.ext
374 374 .../.../x2009307/y2009307367.ext
375 375 .../.../x2009307/Y2009307367.ext
376 376 .../.../X2009307/y2009307367.ext
377 377 .../.../X2009307/Y2009307367.ext
378 378 siendo para este caso, la ultima combinacion de letras, identica al file buscado
379 379
380 380 Return:
381 381 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
382 382 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
383 383 para el filename
384 384 """
385 385 fullfilename = None
386 386 find_flag = False
387 387 filename = None
388 388
389 389 prefixDirList = [None,'d','D']
390 390 if ext.lower() == ".r": #voltage
391 391 prefixFileList = ['d','D']
392 392 elif ext.lower() == ".pdata": #spectra
393 393 prefixFileList = ['p','P']
394 394 else:
395 395 return None, filename
396 396
397 397 #barrido por las combinaciones posibles
398 398 for prefixDir in prefixDirList:
399 399 thispath = path
400 400 if prefixDir != None:
401 401 #formo el nombre del directorio xYYYYDDD (x=d o x=D)
402 402 if foldercounter == 0:
403 403 thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy ))
404 404 else:
405 405 thispath = os.path.join(path, "%s%04d%03d_%02d" % ( prefixDir, year, doy , foldercounter))
406 406 for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D"
407 407 filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
408 408 fullfilename = os.path.join( thispath, filename ) #formo el path completo
409 409
410 410 if os.path.exists( fullfilename ): #verifico que exista
411 411 find_flag = True
412 412 break
413 413 if find_flag:
414 414 break
415 415
416 416 if not(find_flag):
417 417 return None, filename
418 418
419 419 return fullfilename, filename
420 420
421 421 def isRadarFolder(folder):
422 422 try:
423 423 year = int(folder[1:5])
424 424 doy = int(folder[5:8])
425 425 except:
426 426 return 0
427 427
428 428 return 1
429 429
430 430 def isRadarFile(file):
431 431 try:
432 432 year = int(file[1:5])
433 433 doy = int(file[5:8])
434 434 set = int(file[8:11])
435 435 except:
436 436 return 0
437 437
438 438 return 1
439 439
440 440 def getDateFromRadarFile(file):
441 441 try:
442 442 year = int(file[1:5])
443 443 doy = int(file[5:8])
444 444 set = int(file[8:11])
445 445 except:
446 446 return None
447 447
448 448 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy-1)
449 449 return thisDate
450 450
451 451 def getDateFromRadarFolder(folder):
452 452 try:
453 453 year = int(folder[1:5])
454 454 doy = int(folder[5:8])
455 455 except:
456 456 return None
457 457
458 458 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy-1)
459 459 return thisDate
460 460
461 461 class JRODataIO:
462 462
463 463 c = 3E8
464 464
465 465 isConfig = False
466 466
467 467 basicHeaderObj = None
468 468
469 469 systemHeaderObj = None
470 470
471 471 radarControllerHeaderObj = None
472 472
473 473 processingHeaderObj = None
474 474
475 475 dtype = None
476 476
477 477 pathList = []
478 478
479 479 filenameList = []
480 480
481 481 filename = None
482 482
483 483 ext = None
484 484
485 485 flagIsNewFile = 1
486 486
487 487 flagDiscontinuousBlock = 0
488 488
489 489 flagIsNewBlock = 0
490 490
491 491 fp = None
492 492
493 493 firstHeaderSize = 0
494 494
495 495 basicHeaderSize = 24
496 496
497 497 versionFile = 1103
498 498
499 499 fileSize = None
500 500
501 501 # ippSeconds = None
502 502
503 503 fileSizeByHeader = None
504 504
505 505 fileIndex = None
506 506
507 507 profileIndex = None
508 508
509 509 blockIndex = None
510 510
511 511 nTotalBlocks = None
512 512
513 513 maxTimeStep = 30
514 514
515 515 lastUTTime = None
516 516
517 517 datablock = None
518 518
519 519 dataOut = None
520 520
521 521 blocksize = None
522 522
523 523 getByBlock = False
524 524
525 525 def __init__(self):
526 526
527 527 raise NotImplementedError
528 528
529 529 def run(self):
530 530
531 531 raise NotImplementedError
532 532
533 533 def getDtypeWidth(self):
534 534
535 535 dtype_index = get_dtype_index(self.dtype)
536 536 dtype_width = get_dtype_width(dtype_index)
537 537
538 538 return dtype_width
539 539
540 540 def getAllowedArgs(self):
541 541 return inspect.getargspec(self.run).args
542 542
543 543 class JRODataReader(JRODataIO):
544 544
545 545
546 546 online = 0
547 547
548 548 realtime = 0
549 549
550 550 nReadBlocks = 0
551 551
552 552 delay = 10 #number of seconds waiting a new file
553 553
554 554 nTries = 3 #quantity tries
555 555
556 556 nFiles = 3 #number of files for searching
557 557
558 558 path = None
559 559
560 560 foldercounter = 0
561 561
562 562 flagNoMoreFiles = 0
563 563
564 564 datetimeList = []
565 565
566 566 __isFirstTimeOnline = 1
567 567
568 568 __printInfo = True
569 569
570 570 profileIndex = None
571 571
572 572 nTxs = 1
573 573
574 574 txIndex = None
575 575
576 576 #Added--------------------
577 577
578 578 selBlocksize = None
579 579
580 580 selBlocktime = None
581 581
582 582
583 583 def __init__(self):
584 584
585 585 """
586 586 This class is used to find data files
587 587
588 588 Example:
589 589 reader = JRODataReader()
590 590 fileList = reader.findDataFiles()
591 591
592 592 """
593 593 pass
594 594
595 595
596 596 def createObjByDefault(self):
597 597 """
598 598
599 599 """
600 600 raise NotImplementedError
601 601
602 602 def getBlockDimension(self):
603 603
604 604 raise NotImplementedError
605 605
606 606 def __searchFilesOffLine(self,
607 607 path,
608 608 startDate=None,
609 609 endDate=None,
610 610 startTime=datetime.time(0,0,0),
611 611 endTime=datetime.time(23,59,59),
612 612 set=None,
613 613 expLabel='',
614 614 ext='.r',
615 615 queue=None,
616 616 cursor=None,
617 617 skip=None,
618 618 walk=True):
619 619
620 620 self.filenameList = []
621 621 self.datetimeList = []
622 622
623 623 pathList = []
624 624
625 625 dateList, pathList = self.findDatafiles(path, startDate, endDate, expLabel, ext, walk, include_path=True)
626 626
627 627 if dateList == []:
628 628 # print "[Reading] Date range selected invalid [%s - %s]: No *%s files in %s)" %(startDate, endDate, ext, path)
629 629 return None, None
630 630
631 631 if len(dateList) > 1:
632 632 print "[Reading] Data found for date range [%s - %s]: total days = %d" %(startDate, endDate, len(dateList))
633 633 else:
634 634 print "[Reading] Data found for date range [%s - %s]: date = %s" %(startDate, endDate, dateList[0])
635 635
636 636 filenameList = []
637 637 datetimeList = []
638 638
639 639 for thisPath in pathList:
640 640 # thisPath = pathList[pathDict[file]]
641 641
642 642 fileList = glob.glob1(thisPath, "*%s" %ext)
643 643 fileList.sort()
644 644
645 645 skippedFileList = []
646 646
647 647 if cursor is not None and skip is not None:
648 648 # if cursor*skip > len(fileList):
649 649 if skip == 0:
650 650 if queue is not None:
651 651 queue.put(len(fileList))
652 652 skippedFileList = []
653 653 else:
654 654 skippedFileList = fileList[cursor*skip: cursor*skip + skip]
655 655
656 656 else:
657 657 skippedFileList = fileList
658 658
659 659 for file in skippedFileList:
660 660
661 661 filename = os.path.join(thisPath,file)
662 662
663 663 if not isFileInDateRange(filename, startDate, endDate):
664 664 continue
665 665
666 666 thisDatetime = isFileInTimeRange(filename, startDate, endDate, startTime, endTime)
667 667
668 668 if not(thisDatetime):
669 669 continue
670 670
671 671 filenameList.append(filename)
672 672 datetimeList.append(thisDatetime)
673 673
674 674 if not(filenameList):
675 675 print "[Reading] Time range selected invalid [%s - %s]: No *%s files in %s)" %(startTime, endTime, ext, path)
676 676 return None, None
677 677
678 678 print "[Reading] %d file(s) was(were) found in time range: %s - %s" %(len(filenameList), startTime, endTime)
679 679 print
680 680
681 681 for i in range(len(filenameList)):
682 682 print "[Reading] %s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
683 683
684 684 self.filenameList = filenameList
685 685 self.datetimeList = datetimeList
686 686
687 687 return pathList, filenameList
688 688
689 689 def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True, set=None):
690 690
691 691 """
692 692 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
693 693 devuelve el archivo encontrado ademas de otros datos.
694 694
695 695 Input:
696 696 path : carpeta donde estan contenidos los files que contiene data
697 697
698 698 expLabel : Nombre del subexperimento (subfolder)
699 699
700 700 ext : extension de los files
701 701
702 702 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
703 703
704 704 Return:
705 705 directory : eL directorio donde esta el file encontrado
706 706 filename : el ultimo file de una determinada carpeta
707 707 year : el anho
708 708 doy : el numero de dia del anho
709 709 set : el set del archivo
710 710
711 711
712 712 """
713 713 if not os.path.isdir(path):
714 714 return None, None, None, None, None, None
715 715
716 716 dirList = []
717 717
718 718 if not walk:
719 719 fullpath = path
720 720 foldercounter = 0
721 721 else:
722 722 #Filtra solo los directorios
723 723 for thisPath in os.listdir(path):
724 724 if not os.path.isdir(os.path.join(path,thisPath)):
725 725 continue
726 726 if not isRadarFolder(thisPath):
727 727 continue
728 728
729 729 dirList.append(thisPath)
730 730
731 731 if not(dirList):
732 732 return None, None, None, None, None, None
733 733
734 734 dirList = sorted( dirList, key=str.lower )
735 735
736 736 doypath = dirList[-1]
737 737 foldercounter = int(doypath.split('_')[1]) if len(doypath.split('_'))>1 else 0
738 738 fullpath = os.path.join(path, doypath, expLabel)
739 739
740 740
741 741 print "[Reading] %s folder was found: " %(fullpath )
742 742
743 743 if set == None:
744 744 filename = getlastFileFromPath(fullpath, ext)
745 745 else:
746 746 filename = getFileFromSet(fullpath, ext, set)
747 747
748 748 if not(filename):
749 749 return None, None, None, None, None, None
750 750
751 751 print "[Reading] %s file was found" %(filename)
752 752
753 753 if not(self.__verifyFile(os.path.join(fullpath, filename))):
754 754 return None, None, None, None, None, None
755 755
756 756 year = int( filename[1:5] )
757 757 doy = int( filename[5:8] )
758 758 set = int( filename[8:11] )
759 759
760 760 return fullpath, foldercounter, filename, year, doy, set
761 761
762 762 def __setNextFileOffline(self):
763 763
764 764 idFile = self.fileIndex
765 765
766 766 while (True):
767 767 idFile += 1
768 768 if not(idFile < len(self.filenameList)):
769 769 self.flagNoMoreFiles = 1
770 770 # print "[Reading] No more Files"
771 771 return 0
772 772
773 773 filename = self.filenameList[idFile]
774 774
775 775 if not(self.__verifyFile(filename)):
776 776 continue
777 777
778 778 fileSize = os.path.getsize(filename)
779 779 fp = open(filename,'rb')
780 780 break
781 781
782 782 self.flagIsNewFile = 1
783 783 self.fileIndex = idFile
784 784 self.filename = filename
785 785 self.fileSize = fileSize
786 786 self.fp = fp
787 787
788 788 # print "[Reading] Setting the file: %s"%self.filename
789 789
790 790 return 1
791 791
792 792 def __setNextFileOnline(self):
793 793 """
794 794 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
795 795 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
796 796 siguientes.
797 797
798 798 Affected:
799 799 self.flagIsNewFile
800 800 self.filename
801 801 self.fileSize
802 802 self.fp
803 803 self.set
804 804 self.flagNoMoreFiles
805 805
806 806 Return:
807 807 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
808 808 1 : si el file fue abierto con exito y esta listo a ser leido
809 809
810 810 Excepciones:
811 811 Si un determinado file no puede ser abierto
812 812 """
813 813 nFiles = 0
814 814 fileOk_flag = False
815 815 firstTime_flag = True
816 816
817 817 self.set += 1
818 818
819 819 if self.set > 999:
820 820 self.set = 0
821 821 self.foldercounter += 1
822 822
823 823 #busca el 1er file disponible
824 824 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
825 825 if fullfilename:
826 826 if self.__verifyFile(fullfilename, False):
827 827 fileOk_flag = True
828 828
829 829 #si no encuentra un file entonces espera y vuelve a buscar
830 830 if not(fileOk_flag):
831 831 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
832 832
833 833 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
834 834 tries = self.nTries
835 835 else:
836 836 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
837 837
838 838 for nTries in range( tries ):
839 839 if firstTime_flag:
840 840 print "\t[Reading] Waiting %0.2f sec for the next file: \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
841 841 sleep( self.delay )
842 842 else:
843 843 print "\t[Reading] Searching the next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
844 844
845 845 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
846 846 if fullfilename:
847 847 if self.__verifyFile(fullfilename):
848 848 fileOk_flag = True
849 849 break
850 850
851 851 if fileOk_flag:
852 852 break
853 853
854 854 firstTime_flag = False
855 855
856 856 print "\t[Reading] Skipping the file \"%s\" due to this file doesn't exist" % filename
857 857 self.set += 1
858 858
859 859 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
860 860 self.set = 0
861 861 self.doy += 1
862 862 self.foldercounter = 0
863 863
864 864 if fileOk_flag:
865 865 self.fileSize = os.path.getsize( fullfilename )
866 866 self.filename = fullfilename
867 867 self.flagIsNewFile = 1
868 868 if self.fp != None: self.fp.close()
869 869 self.fp = open(fullfilename, 'rb')
870 870 self.flagNoMoreFiles = 0
871 871 # print '[Reading] Setting the file: %s' % fullfilename
872 872 else:
873 873 self.fileSize = 0
874 874 self.filename = None
875 875 self.flagIsNewFile = 0
876 876 self.fp = None
877 877 self.flagNoMoreFiles = 1
878 878 # print '[Reading] No more files to read'
879 879
880 880 return fileOk_flag
881 881
882 882 def setNextFile(self):
883 883 if self.fp != None:
884 884 self.fp.close()
885 885
886 886 if self.online:
887 887 newFile = self.__setNextFileOnline()
888 888 else:
889 889 newFile = self.__setNextFileOffline()
890 890
891 891 if not(newFile):
892 892 print '[Reading] No more files to read'
893 893 return 0
894 894
895 895 if self.verbose:
896 896 print '[Reading] Setting the file: %s' % self.filename
897 897
898 898 self.__readFirstHeader()
899 899 self.nReadBlocks = 0
900 900 return 1
901 901
902 902 def __waitNewBlock(self):
903 903 """
904 904 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
905 905
906 906 Si el modo de lectura es OffLine siempre retorn 0
907 907 """
908 908 if not self.online:
909 909 return 0
910 910
911 911 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
912 912 return 0
913 913
914 914 currentPointer = self.fp.tell()
915 915
916 916 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
917 917
918 918 for nTries in range( self.nTries ):
919 919
920 920 self.fp.close()
921 921 self.fp = open( self.filename, 'rb' )
922 922 self.fp.seek( currentPointer )
923 923
924 924 self.fileSize = os.path.getsize( self.filename )
925 925 currentSize = self.fileSize - currentPointer
926 926
927 927 if ( currentSize >= neededSize ):
928 928 self.basicHeaderObj.read(self.fp)
929 929 return 1
930 930
931 931 if self.fileSize == self.fileSizeByHeader:
932 932 # self.flagEoF = True
933 933 return 0
934 934
935 935 print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
936 936 sleep( self.delay )
937 937
938 938
939 939 return 0
940 940
941 941 def waitDataBlock(self,pointer_location):
942 942
943 943 currentPointer = pointer_location
944 944
945 945 neededSize = self.processingHeaderObj.blockSize #+ self.basicHeaderSize
946 946
947 947 for nTries in range( self.nTries ):
948 948 self.fp.close()
949 949 self.fp = open( self.filename, 'rb' )
950 950 self.fp.seek( currentPointer )
951 951
952 952 self.fileSize = os.path.getsize( self.filename )
953 953 currentSize = self.fileSize - currentPointer
954 954
955 955 if ( currentSize >= neededSize ):
956 956 return 1
957 957
958 958 print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
959 959 sleep( self.delay )
960 960
961 961 return 0
962 962
963 963 def __jumpToLastBlock(self):
964 964
965 965 if not(self.__isFirstTimeOnline):
966 966 return
967 967
968 968 csize = self.fileSize - self.fp.tell()
969 969 blocksize = self.processingHeaderObj.blockSize
970 970
971 971 #salta el primer bloque de datos
972 972 if csize > self.processingHeaderObj.blockSize:
973 973 self.fp.seek(self.fp.tell() + blocksize)
974 974 else:
975 975 return
976 976
977 977 csize = self.fileSize - self.fp.tell()
978 978 neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
979 979 while True:
980 980
981 981 if self.fp.tell()<self.fileSize:
982 982 self.fp.seek(self.fp.tell() + neededsize)
983 983 else:
984 984 self.fp.seek(self.fp.tell() - neededsize)
985 985 break
986 986
987 987 # csize = self.fileSize - self.fp.tell()
988 988 # neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
989 989 # factor = int(csize/neededsize)
990 990 # if factor > 0:
991 991 # self.fp.seek(self.fp.tell() + factor*neededsize)
992 992
993 993 self.flagIsNewFile = 0
994 994 self.__isFirstTimeOnline = 0
995 995
996 996 def __setNewBlock(self):
997 if self.server is None:
998 if self.fp == None:
999 return 0
997 #if self.server is None:
998 if self.fp == None:
999 return 0
1000 1000
1001 1001 # if self.online:
1002 1002 # self.__jumpToLastBlock()
1003 1003 print 'xxxx'
1004 1004
1005 1005 if self.flagIsNewFile:
1006 1006 self.lastUTTime = self.basicHeaderObj.utc
1007 1007 return 1
1008 1008
1009 1009 if self.realtime:
1010 1010 self.flagDiscontinuousBlock = 1
1011 1011 if not(self.setNextFile()):
1012 1012 return 0
1013 1013 else:
1014 1014 return 1
1015 1015 print 'xxxx'
1016 if self.server is None:
1017 currentSize = self.fileSize - self.fp.tell()
1018 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
1019 if (currentSize >= neededSize):
1020 self.basicHeaderObj.read(self.fp)
1021 self.lastUTTime = self.basicHeaderObj.utc
1022 return 1
1023 else:
1024 self.basicHeaderObj.read(self.zHeader)
1016 #if self.server is None:
1017 currentSize = self.fileSize - self.fp.tell()
1018 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
1019 if (currentSize >= neededSize):
1020 self.basicHeaderObj.read(self.fp)
1025 1021 self.lastUTTime = self.basicHeaderObj.utc
1026 1022 return 1
1023 # else:
1024 # self.basicHeaderObj.read(self.zHeader)
1025 # self.lastUTTime = self.basicHeaderObj.utc
1026 # return 1
1027 1027 if self.__waitNewBlock():
1028 1028 self.lastUTTime = self.basicHeaderObj.utc
1029 1029 return 1
1030 if self.server is None:
1031 if not(self.setNextFile()):
1032 return 0
1030 #if self.server is None:
1031 if not(self.setNextFile()):
1032 return 0
1033 1033
1034 1034 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
1035 1035 self.lastUTTime = self.basicHeaderObj.utc
1036 1036
1037 1037 self.flagDiscontinuousBlock = 0
1038 1038
1039 1039 if deltaTime > self.maxTimeStep:
1040 1040 self.flagDiscontinuousBlock = 1
1041 1041
1042 1042 return 1
1043 1043
1044 1044 def readNextBlock(self):
1045 1045
1046 1046 #Skip block out of startTime and endTime
1047 1047 while True:
1048 1048 print 'cxxxx'
1049 1049 if not(self.__setNewBlock()):
1050 1050 print 'returning'
1051 1051 return 0
1052 1052 print 'dxxx'
1053 1053 if not(self.readBlock()):
1054 1054 return 0
1055 1055
1056 1056 self.getBasicHeader()
1057 1057
1058 1058 if not isTimeInRange(self.dataOut.datatime.time(), self.startTime, self.endTime):
1059 1059
1060 1060 print "[Reading] Block No. %d/%d -> %s [Skipping]" %(self.nReadBlocks,
1061 1061 self.processingHeaderObj.dataBlocksPerFile,
1062 1062 self.dataOut.datatime.ctime())
1063 1063 continue
1064 1064
1065 1065 break
1066 1066
1067 1067 if self.verbose:
1068 1068 print "[Reading] Block No. %d/%d -> %s" %(self.nReadBlocks,
1069 1069 self.processingHeaderObj.dataBlocksPerFile,
1070 1070 self.dataOut.datatime.ctime())
1071 1071 return 1
1072 1072
1073 1073 def __readFirstHeader(self):
1074 1074
1075 1075 self.basicHeaderObj.read(self.fp)
1076 1076 self.systemHeaderObj.read(self.fp)
1077 1077 self.radarControllerHeaderObj.read(self.fp)
1078 1078 self.processingHeaderObj.read(self.fp)
1079 1079
1080 1080 self.firstHeaderSize = self.basicHeaderObj.size
1081 1081
1082 1082 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
1083 1083 if datatype == 0:
1084 1084 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
1085 1085 elif datatype == 1:
1086 1086 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
1087 1087 elif datatype == 2:
1088 1088 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
1089 1089 elif datatype == 3:
1090 1090 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
1091 1091 elif datatype == 4:
1092 1092 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
1093 1093 elif datatype == 5:
1094 1094 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
1095 1095 else:
1096 1096 raise ValueError, 'Data type was not defined'
1097 1097
1098 1098 self.dtype = datatype_str
1099 1099 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
1100 1100 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
1101 1101 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
1102 1102 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
1103 1103 self.getBlockDimension()
1104 1104
1105 1105 def __verifyFile(self, filename, msgFlag=True):
1106 1106
1107 1107 msg = None
1108 1108
1109 1109 try:
1110 1110 fp = open(filename, 'rb')
1111 1111 except IOError:
1112 1112
1113 1113 if msgFlag:
1114 1114 print "[Reading] File %s can't be opened" % (filename)
1115 1115
1116 1116 return False
1117 1117
1118 1118 currentPosition = fp.tell()
1119 1119 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
1120 1120
1121 1121 if neededSize == 0:
1122 1122 basicHeaderObj = BasicHeader(LOCALTIME)
1123 1123 systemHeaderObj = SystemHeader()
1124 1124 radarControllerHeaderObj = RadarControllerHeader()
1125 1125 processingHeaderObj = ProcessingHeader()
1126 1126
1127 1127 if not( basicHeaderObj.read(fp) ):
1128 1128 fp.close()
1129 1129 return False
1130 1130
1131 1131 if not( systemHeaderObj.read(fp) ):
1132 1132 fp.close()
1133 1133 return False
1134 1134
1135 1135 if not( radarControllerHeaderObj.read(fp) ):
1136 1136 fp.close()
1137 1137 return False
1138 1138
1139 1139 if not( processingHeaderObj.read(fp) ):
1140 1140 fp.close()
1141 1141 return False
1142 1142
1143 1143 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
1144 1144 else:
1145 1145 msg = "[Reading] Skipping the file %s due to it hasn't enough data" %filename
1146 1146
1147 1147 fp.close()
1148 1148
1149 1149 fileSize = os.path.getsize(filename)
1150 1150 currentSize = fileSize - currentPosition
1151 1151
1152 1152 if currentSize < neededSize:
1153 1153 if msgFlag and (msg != None):
1154 1154 print msg
1155 1155 return False
1156 1156
1157 1157 return True
1158 1158
1159 1159 def findDatafiles(self, path, startDate=None, endDate=None, expLabel='', ext='.r', walk=True, include_path=False):
1160 1160
1161 1161 path_empty = True
1162 1162
1163 1163 dateList = []
1164 1164 pathList = []
1165 1165
1166 1166 multi_path = path.split(',')
1167 1167
1168 1168 if not walk:
1169 1169
1170 1170 for single_path in multi_path:
1171 1171
1172 1172 if not os.path.isdir(single_path):
1173 1173 continue
1174 1174
1175 1175 fileList = glob.glob1(single_path, "*"+ext)
1176 1176
1177 1177 if not fileList:
1178 1178 continue
1179 1179
1180 1180 path_empty = False
1181 1181
1182 1182 fileList.sort()
1183 1183
1184 1184 for thisFile in fileList:
1185 1185
1186 1186 if not os.path.isfile(os.path.join(single_path, thisFile)):
1187 1187 continue
1188 1188
1189 1189 if not isRadarFile(thisFile):
1190 1190 continue
1191 1191
1192 1192 if not isFileInDateRange(thisFile, startDate, endDate):
1193 1193 continue
1194 1194
1195 1195 thisDate = getDateFromRadarFile(thisFile)
1196 1196
1197 1197 if thisDate in dateList:
1198 1198 continue
1199 1199
1200 1200 dateList.append(thisDate)
1201 1201 pathList.append(single_path)
1202 1202
1203 1203 else:
1204 1204 for single_path in multi_path:
1205 1205
1206 1206 if not os.path.isdir(single_path):
1207 1207 continue
1208 1208
1209 1209 dirList = []
1210 1210
1211 1211 for thisPath in os.listdir(single_path):
1212 1212
1213 1213 if not os.path.isdir(os.path.join(single_path,thisPath)):
1214 1214 continue
1215 1215
1216 1216 if not isRadarFolder(thisPath):
1217 1217 continue
1218 1218
1219 1219 if not isFolderInDateRange(thisPath, startDate, endDate):
1220 1220 continue
1221 1221
1222 1222 dirList.append(thisPath)
1223 1223
1224 1224 if not dirList:
1225 1225 continue
1226 1226
1227 1227 dirList.sort()
1228 1228
1229 1229 for thisDir in dirList:
1230 1230
1231 1231 datapath = os.path.join(single_path, thisDir, expLabel)
1232 1232 fileList = glob.glob1(datapath, "*"+ext)
1233 1233
1234 1234 if not fileList:
1235 1235 continue
1236 1236
1237 1237 path_empty = False
1238 1238
1239 1239 thisDate = getDateFromRadarFolder(thisDir)
1240 1240
1241 1241 pathList.append(datapath)
1242 1242 dateList.append(thisDate)
1243 1243
1244 1244 dateList.sort()
1245 1245
1246 1246 if walk:
1247 1247 pattern_path = os.path.join(multi_path[0], "[dYYYYDDD]", expLabel)
1248 1248 else:
1249 1249 pattern_path = multi_path[0]
1250 1250
1251 1251 if path_empty:
1252 1252 print "[Reading] No *%s files in %s for %s to %s" %(ext, pattern_path, startDate, endDate)
1253 1253 else:
1254 1254 if not dateList:
1255 1255 print "[Reading] Date range selected invalid [%s - %s]: No *%s files in %s)" %(startDate, endDate, ext, path)
1256 1256
1257 1257 if include_path:
1258 1258 return dateList, pathList
1259 1259
1260 1260 return dateList
1261 1261
1262 1262 def setup(self,
1263 1263 path=None,
1264 1264 startDate=None,
1265 1265 endDate=None,
1266 1266 startTime=datetime.time(0,0,0),
1267 1267 endTime=datetime.time(23,59,59),
1268 1268 set=None,
1269 1269 expLabel = "",
1270 1270 ext = None,
1271 1271 online = False,
1272 1272 delay = 60,
1273 1273 walk = True,
1274 1274 getblock = False,
1275 1275 nTxs = 1,
1276 1276 realtime=False,
1277 1277 blocksize=None,
1278 1278 blocktime=None,
1279 1279 queue=None,
1280 1280 skip=None,
1281 1281 cursor=None,
1282 1282 warnings=True,
1283 1283 verbose=True,
1284 1284 server=None):
1285 1285 if server is not None:
1286 1286 if 'tcp://' in server:
1287 1287 address = server
1288 1288 else:
1289 1289 address = 'ipc:///tmp/%s' % server
1290 1290 self.server = address
1291 1291 self.context = zmq.Context()
1292 1292 self.receiver = self.context.socket(zmq.PULL)
1293 self.receiver.bind(self.server)
1293 self.receiver.connect(self.server)
1294 1294 time.sleep(0.5)
1295 1295 print '[Starting] ReceiverData from {}'.format(self.server)
1296 1296 else:
1297 1297 self.server = None
1298 1298 if path == None:
1299 1299 raise ValueError, "[Reading] The path is not valid"
1300 1300
1301 1301 if ext == None:
1302 1302 ext = self.ext
1303 1303
1304 1304 if online:
1305 1305 print "[Reading] Searching files in online mode..."
1306 1306
1307 1307 for nTries in range( self.nTries ):
1308 1308 fullpath, foldercounter, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk, set=set)
1309 1309
1310 1310 if fullpath:
1311 1311 break
1312 1312
1313 1313 print '[Reading] Waiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
1314 1314 sleep( self.delay )
1315 1315
1316 1316 if not(fullpath):
1317 1317 print "[Reading] There 'isn't any valid file in %s" % path
1318 1318 return
1319 1319
1320 1320 self.year = year
1321 1321 self.doy = doy
1322 1322 self.set = set - 1
1323 1323 self.path = path
1324 1324 self.foldercounter = foldercounter
1325 1325 last_set = None
1326 1326 else:
1327 1327 print "[Reading] Searching files in offline mode ..."
1328 1328 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
1329 1329 startTime=startTime, endTime=endTime,
1330 1330 set=set, expLabel=expLabel, ext=ext,
1331 1331 walk=walk, cursor=cursor,
1332 1332 skip=skip, queue=queue)
1333 1333
1334 1334 if not(pathList):
1335 1335 # print "[Reading] No *%s files in %s (%s - %s)"%(ext, path,
1336 1336 # datetime.datetime.combine(startDate,startTime).ctime(),
1337 1337 # datetime.datetime.combine(endDate,endTime).ctime())
1338 1338
1339 1339 # sys.exit(-1)
1340 1340
1341 1341 self.fileIndex = -1
1342 1342 self.pathList = []
1343 1343 self.filenameList = []
1344 1344 return
1345 1345
1346 1346 self.fileIndex = -1
1347 1347 self.pathList = pathList
1348 1348 self.filenameList = filenameList
1349 1349 file_name = os.path.basename(filenameList[-1])
1350 1350 basename, ext = os.path.splitext(file_name)
1351 1351 last_set = int(basename[-3:])
1352 1352
1353 1353 self.online = online
1354 1354 self.realtime = realtime
1355 1355 self.delay = delay
1356 1356 ext = ext.lower()
1357 1357 self.ext = ext
1358 1358 self.getByBlock = getblock
1359 1359 self.nTxs = nTxs
1360 1360 self.startTime = startTime
1361 1361 self.endTime = endTime
1362 1362
1363 1363 #Added-----------------
1364 1364 self.selBlocksize = blocksize
1365 1365 self.selBlocktime = blocktime
1366 1366
1367 1367 # Verbose-----------
1368 1368 self.verbose = verbose
1369 1369 self.warnings = warnings
1370 1370
1371 1371 if not(self.setNextFile()):
1372 1372 if (startDate!=None) and (endDate!=None):
1373 1373 print "[Reading] No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
1374 1374 elif startDate != None:
1375 1375 print "[Reading] No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
1376 1376 else:
1377 1377 print "[Reading] No files"
1378 1378
1379 1379 self.fileIndex = -1
1380 1380 self.pathList = []
1381 1381 self.filenameList = []
1382 1382 return
1383 1383
1384 1384 # self.getBasicHeader()
1385 1385
1386 1386 if last_set != None:
1387 1387 self.dataOut.last_block = last_set * self.processingHeaderObj.dataBlocksPerFile + self.basicHeaderObj.dataBlock
1388 1388 return
1389 1389
1390 1390 def getBasicHeader(self):
1391 1391
1392 1392 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds
1393 1393
1394 1394 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
1395 1395
1396 1396 self.dataOut.timeZone = self.basicHeaderObj.timeZone
1397 1397
1398 1398 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
1399 1399
1400 1400 self.dataOut.errorCount = self.basicHeaderObj.errorCount
1401 1401
1402 1402 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
1403 1403
1404 1404 self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds/self.nTxs
1405 1405
1406 1406 # self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock*self.nTxs
1407 1407
1408 1408
1409 1409 def getFirstHeader(self):
1410 1410
1411 1411 raise NotImplementedError
1412 1412
1413 1413 def getData(self):
1414 1414
1415 1415 raise NotImplementedError
1416 1416
1417 1417 def hasNotDataInBuffer(self):
1418 1418
1419 1419 raise NotImplementedError
1420 1420
1421 1421 def readBlock(self):
1422 1422
1423 1423 raise NotImplementedError
1424 1424
1425 1425 def isEndProcess(self):
1426 1426
1427 1427 return self.flagNoMoreFiles
1428 1428
1429 1429 def printReadBlocks(self):
1430 1430
1431 1431 print "[Reading] Number of read blocks per file %04d" %self.nReadBlocks
1432 1432
1433 1433 def printTotalBlocks(self):
1434 1434
1435 1435 print "[Reading] Number of read blocks %04d" %self.nTotalBlocks
1436 1436
1437 1437 def printNumberOfBlock(self):
1438 1438
1439 1439 if self.flagIsNewBlock:
1440 1440 print "[Reading] Block No. %d/%d -> %s" %(self.nReadBlocks,
1441 1441 self.processingHeaderObj.dataBlocksPerFile,
1442 1442 self.dataOut.datatime.ctime())
1443 1443
1444 1444 def printInfo(self):
1445 1445
1446 1446 if self.__printInfo == False:
1447 1447 return
1448 1448
1449 1449 self.basicHeaderObj.printInfo()
1450 1450 self.systemHeaderObj.printInfo()
1451 1451 self.radarControllerHeaderObj.printInfo()
1452 1452 self.processingHeaderObj.printInfo()
1453 1453
1454 1454 self.__printInfo = False
1455 1455
1456 1456
1457 1457 def run(self,
1458 1458 path=None,
1459 1459 startDate=None,
1460 1460 endDate=None,
1461 1461 startTime=datetime.time(0,0,0),
1462 1462 endTime=datetime.time(23,59,59),
1463 1463 set=None,
1464 1464 expLabel = "",
1465 1465 ext = None,
1466 1466 online = False,
1467 1467 delay = 60,
1468 1468 walk = True,
1469 1469 getblock = False,
1470 1470 nTxs = 1,
1471 1471 realtime=False,
1472 1472 blocksize=None,
1473 1473 blocktime=None,
1474 1474 queue=None,
1475 1475 skip=None,
1476 1476 cursor=None,
1477 1477 warnings=True,
1478 1478 server=None,
1479 1479 verbose=True, **kwargs):
1480 1480
1481 1481 if not(self.isConfig):
1482 1482 # self.dataOut = dataOut
1483 1483 self.setup( path=path,
1484 1484 startDate=startDate,
1485 1485 endDate=endDate,
1486 1486 startTime=startTime,
1487 1487 endTime=endTime,
1488 1488 set=set,
1489 1489 expLabel=expLabel,
1490 1490 ext=ext,
1491 1491 online=online,
1492 1492 delay=delay,
1493 1493 walk=walk,
1494 1494 getblock=getblock,
1495 1495 nTxs=nTxs,
1496 1496 realtime=realtime,
1497 1497 blocksize=blocksize,
1498 1498 blocktime=blocktime,
1499 1499 queue=queue,
1500 1500 skip=skip,
1501 1501 cursor=cursor,
1502 1502 warnings=warnings,
1503 1503 server=server,
1504 1504 verbose=verbose)
1505 1505 self.isConfig = True
1506 print 'hola'
1507 self.getData()
1506 if server is None:
1507 self.getData()
1508 else:
1509 self.getFromServer()
1508 1510
1509 1511 class JRODataWriter(JRODataIO):
1510 1512
1511 1513 """
1512 1514 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
1513 1515 de los datos siempre se realiza por bloques.
1514 1516 """
1515 1517
1516 1518 blockIndex = 0
1517 1519
1518 1520 path = None
1519 1521
1520 1522 setFile = None
1521 1523
1522 1524 profilesPerBlock = None
1523 1525
1524 1526 blocksPerFile = None
1525 1527
1526 1528 nWriteBlocks = 0
1527 1529
1528 1530 fileDate = None
1529 1531
1530 1532 def __init__(self, dataOut=None):
1531 1533 raise NotImplementedError
1532 1534
1533 1535
1534 1536 def hasAllDataInBuffer(self):
1535 1537 raise NotImplementedError
1536 1538
1537 1539
1538 1540 def setBlockDimension(self):
1539 1541 raise NotImplementedError
1540 1542
1541 1543
1542 1544 def writeBlock(self):
1543 1545 raise NotImplementedError
1544 1546
1545 1547
1546 1548 def putData(self):
1547 1549 raise NotImplementedError
1548 1550
1549 1551
1550 1552 def getProcessFlags(self):
1551 1553
1552 1554 processFlags = 0
1553 1555
1554 1556 dtype_index = get_dtype_index(self.dtype)
1555 1557 procflag_dtype = get_procflag_dtype(dtype_index)
1556 1558
1557 1559 processFlags += procflag_dtype
1558 1560
1559 1561 if self.dataOut.flagDecodeData:
1560 1562 processFlags += PROCFLAG.DECODE_DATA
1561 1563
1562 1564 if self.dataOut.flagDeflipData:
1563 1565 processFlags += PROCFLAG.DEFLIP_DATA
1564 1566
1565 1567 if self.dataOut.code is not None:
1566 1568 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1567 1569
1568 1570 if self.dataOut.nCohInt > 1:
1569 1571 processFlags += PROCFLAG.COHERENT_INTEGRATION
1570 1572
1571 1573 if self.dataOut.type == "Spectra":
1572 1574 if self.dataOut.nIncohInt > 1:
1573 1575 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
1574 1576
1575 1577 if self.dataOut.data_dc is not None:
1576 1578 processFlags += PROCFLAG.SAVE_CHANNELS_DC
1577 1579
1578 1580 if self.dataOut.flagShiftFFT:
1579 1581 processFlags += PROCFLAG.SHIFT_FFT_DATA
1580 1582
1581 1583 return processFlags
1582 1584
1583 1585 def setBasicHeader(self):
1584 1586
1585 1587 self.basicHeaderObj.size = self.basicHeaderSize #bytes
1586 1588 self.basicHeaderObj.version = self.versionFile
1587 1589 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1588 1590
1589 1591 utc = numpy.floor(self.dataOut.utctime)
1590 1592 milisecond = (self.dataOut.utctime - utc)* 1000.0
1591 1593
1592 1594 self.basicHeaderObj.utc = utc
1593 1595 self.basicHeaderObj.miliSecond = milisecond
1594 1596 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1595 1597 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1596 1598 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1597 1599
1598 1600 def setFirstHeader(self):
1599 1601 """
1600 1602 Obtiene una copia del First Header
1601 1603
1602 1604 Affected:
1603 1605
1604 1606 self.basicHeaderObj
1605 1607 self.systemHeaderObj
1606 1608 self.radarControllerHeaderObj
1607 1609 self.processingHeaderObj self.
1608 1610
1609 1611 Return:
1610 1612 None
1611 1613 """
1612 1614
1613 1615 raise NotImplementedError
1614 1616
1615 1617 def __writeFirstHeader(self):
1616 1618 """
1617 1619 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1618 1620
1619 1621 Affected:
1620 1622 __dataType
1621 1623
1622 1624 Return:
1623 1625 None
1624 1626 """
1625 1627
1626 1628 # CALCULAR PARAMETROS
1627 1629
1628 1630 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1629 1631 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1630 1632
1631 1633 self.basicHeaderObj.write(self.fp)
1632 1634 self.systemHeaderObj.write(self.fp)
1633 1635 self.radarControllerHeaderObj.write(self.fp)
1634 1636 self.processingHeaderObj.write(self.fp)
1635 1637
1636 1638 def __setNewBlock(self):
1637 1639 """
1638 1640 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1639 1641
1640 1642 Return:
1641 1643 0 : si no pudo escribir nada
1642 1644 1 : Si escribio el Basic el First Header
1643 1645 """
1644 1646 if self.fp == None:
1645 1647 self.setNextFile()
1646 1648
1647 1649 if self.flagIsNewFile:
1648 1650 return 1
1649 1651
1650 1652 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1651 1653 self.basicHeaderObj.write(self.fp)
1652 1654 return 1
1653 1655
1654 1656 if not( self.setNextFile() ):
1655 1657 return 0
1656 1658
1657 1659 return 1
1658 1660
1659 1661
1660 1662 def writeNextBlock(self):
1661 1663 """
1662 1664 Selecciona el bloque siguiente de datos y los escribe en un file
1663 1665
1664 1666 Return:
1665 1667 0 : Si no hizo pudo escribir el bloque de datos
1666 1668 1 : Si no pudo escribir el bloque de datos
1667 1669 """
1668 1670 if not( self.__setNewBlock() ):
1669 1671 return 0
1670 1672
1671 1673 self.writeBlock()
1672 1674
1673 1675 print "[Writing] Block No. %d/%d" %(self.blockIndex,
1674 1676 self.processingHeaderObj.dataBlocksPerFile)
1675 1677
1676 1678 return 1
1677 1679
1678 1680 def setNextFile(self):
1679 1681 """
1680 1682 Determina el siguiente file que sera escrito
1681 1683
1682 1684 Affected:
1683 1685 self.filename
1684 1686 self.subfolder
1685 1687 self.fp
1686 1688 self.setFile
1687 1689 self.flagIsNewFile
1688 1690
1689 1691 Return:
1690 1692 0 : Si el archivo no puede ser escrito
1691 1693 1 : Si el archivo esta listo para ser escrito
1692 1694 """
1693 1695 ext = self.ext
1694 1696 path = self.path
1695 1697
1696 1698 if self.fp != None:
1697 1699 self.fp.close()
1698 1700
1699 1701 timeTuple = time.localtime( self.dataOut.utctime)
1700 1702 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1701 1703
1702 1704 fullpath = os.path.join( path, subfolder )
1703 1705 setFile = self.setFile
1704 1706
1705 1707 if not( os.path.exists(fullpath) ):
1706 1708 os.mkdir(fullpath)
1707 1709 setFile = -1 #inicializo mi contador de seteo
1708 1710 else:
1709 1711 filesList = os.listdir( fullpath )
1710 1712 if len( filesList ) > 0:
1711 1713 filesList = sorted( filesList, key=str.lower )
1712 1714 filen = filesList[-1]
1713 1715 # el filename debera tener el siguiente formato
1714 1716 # 0 1234 567 89A BCDE (hex)
1715 1717 # x YYYY DDD SSS .ext
1716 1718 if isNumber( filen[8:11] ):
1717 1719 setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1718 1720 else:
1719 1721 setFile = -1
1720 1722 else:
1721 1723 setFile = -1 #inicializo mi contador de seteo
1722 1724
1723 1725 setFile += 1
1724 1726
1725 1727 #If this is a new day it resets some values
1726 1728 if self.dataOut.datatime.date() > self.fileDate:
1727 1729 setFile = 0
1728 1730 self.nTotalBlocks = 0
1729 1731
1730 1732 filen = '%s%4.4d%3.3d%3.3d%s' % (self.optchar, timeTuple.tm_year, timeTuple.tm_yday, setFile, ext )
1731 1733
1732 1734 filename = os.path.join( path, subfolder, filen )
1733 1735
1734 1736 fp = open( filename,'wb' )
1735 1737
1736 1738 self.blockIndex = 0
1737 1739
1738 1740 #guardando atributos
1739 1741 self.filename = filename
1740 1742 self.subfolder = subfolder
1741 1743 self.fp = fp
1742 1744 self.setFile = setFile
1743 1745 self.flagIsNewFile = 1
1744 1746 self.fileDate = self.dataOut.datatime.date()
1745 1747
1746 1748 self.setFirstHeader()
1747 1749
1748 1750 print '[Writing] Opening file: %s'%self.filename
1749 1751
1750 1752 self.__writeFirstHeader()
1751 1753
1752 1754 return 1
1753 1755
1754 1756 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=None, ext=None, datatype=4):
1755 1757 """
1756 1758 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1757 1759
1758 1760 Inputs:
1759 1761 path : directory where data will be saved
1760 1762 profilesPerBlock : number of profiles per block
1761 1763 set : initial file set
1762 1764 datatype : An integer number that defines data type:
1763 1765 0 : int8 (1 byte)
1764 1766 1 : int16 (2 bytes)
1765 1767 2 : int32 (4 bytes)
1766 1768 3 : int64 (8 bytes)
1767 1769 4 : float32 (4 bytes)
1768 1770 5 : double64 (8 bytes)
1769 1771
1770 1772 Return:
1771 1773 0 : Si no realizo un buen seteo
1772 1774 1 : Si realizo un buen seteo
1773 1775 """
1774 1776
1775 1777 if ext == None:
1776 1778 ext = self.ext
1777 1779
1778 1780 self.ext = ext.lower()
1779 1781
1780 1782 self.path = path
1781 1783
1782 1784 if set is None:
1783 1785 self.setFile = -1
1784 1786 else:
1785 1787 self.setFile = set - 1
1786 1788
1787 1789 self.blocksPerFile = blocksPerFile
1788 1790
1789 1791 self.profilesPerBlock = profilesPerBlock
1790 1792
1791 1793 self.dataOut = dataOut
1792 1794 self.fileDate = self.dataOut.datatime.date()
1793 1795 #By default
1794 1796 self.dtype = self.dataOut.dtype
1795 1797
1796 1798 if datatype is not None:
1797 1799 self.dtype = get_numpy_dtype(datatype)
1798 1800
1799 1801 if not(self.setNextFile()):
1800 1802 print "[Writing] There isn't a next file"
1801 1803 return 0
1802 1804
1803 1805 self.setBlockDimension()
1804 1806
1805 1807 return 1
1806 1808
1807 1809 def run(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=None, ext=None, datatype=4, **kwargs):
1808 1810
1809 1811 if not(self.isConfig):
1810 1812
1811 1813 self.setup(dataOut, path, blocksPerFile, profilesPerBlock=profilesPerBlock, set=set, ext=ext, datatype=datatype, **kwargs)
1812 1814 self.isConfig = True
1813 1815
1814 1816 self.putData()
@@ -1,651 +1,739
1 1 '''
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6
7 7 import numpy
8 8
9 9 from jroIO_base import LOCALTIME, JRODataReader, JRODataWriter
10 10 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
11 11 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
12 12 from schainpy.model.data.jrodata import Voltage
13 13 import zmq
14 import tempfile
15 from StringIO import StringIO
14 16 # from _sha import blocksize
15 17
16 18 class VoltageReader(JRODataReader, ProcessingUnit):
17 19 """
18 20 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
19 21 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
20 22 perfiles*alturas*canales) son almacenados en la variable "buffer".
21 23
22 24 perfiles * alturas * canales
23 25
24 26 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
25 27 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
26 28 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
27 29 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
28 30
29 31 Example:
30 32
31 33 dpath = "/home/myuser/data"
32 34
33 35 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
34 36
35 37 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
36 38
37 39 readerObj = VoltageReader()
38 40
39 41 readerObj.setup(dpath, startTime, endTime)
40 42
41 43 while(True):
42 44
43 45 #to get one profile
44 46 profile = readerObj.getData()
45 47
46 48 #print the profile
47 49 print profile
48 50
49 51 #If you want to see all datablock
50 52 print readerObj.datablock
51 53
52 54 if readerObj.flagNoMoreFiles:
53 55 break
54 56
55 57 """
56 58
57 59 ext = ".r"
58 60
59 61 optchar = "D"
60 62 dataOut = None
61 63
62 64 def __init__(self, **kwargs):
63 65 """
64 66 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
65 67
66 68 Input:
67 69 dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para
68 70 almacenar un perfil de datos cada vez que se haga un requerimiento
69 71 (getData). El perfil sera obtenido a partir del buffer de datos,
70 72 si el buffer esta vacio se hara un nuevo proceso de lectura de un
71 73 bloque de datos.
72 74 Si este parametro no es pasado se creara uno internamente.
73 75
74 76 Variables afectadas:
75 77 self.dataOut
76 78
77 79 Return:
78 80 None
79 81 """
80 82
81 83 ProcessingUnit.__init__(self, **kwargs)
82 84
83 85 self.isConfig = False
84 86
85 87 self.datablock = None
86 88
87 89 self.utc = 0
88 90
89 91 self.ext = ".r"
90 92
91 93 self.optchar = "D"
92 94
93 95 self.basicHeaderObj = BasicHeader(LOCALTIME)
94 96
95 97 self.systemHeaderObj = SystemHeader()
96 98
97 99 self.radarControllerHeaderObj = RadarControllerHeader()
98 100
99 101 self.processingHeaderObj = ProcessingHeader()
100 102
101 103 self.online = 0
102 104
103 105 self.fp = None
104 106
105 107 self.idFile = None
106 108
107 109 self.dtype = None
108 110
109 111 self.fileSizeByHeader = None
110 112
111 113 self.filenameList = []
112 114
113 115 self.filename = None
114 116
115 117 self.fileSize = None
116 118
117 119 self.firstHeaderSize = 0
118 120
119 121 self.basicHeaderSize = 24
120 122
121 123 self.pathList = []
122 124
123 125 self.filenameList = []
124 126
125 127 self.lastUTTime = 0
126 128
127 129 self.maxTimeStep = 30
128 130
129 131 self.flagNoMoreFiles = 0
130 132
131 133 self.set = 0
132 134
133 135 self.path = None
134 136
135 137 self.profileIndex = 2**32-1
136 138
137 139 self.delay = 3 #seconds
138 140
139 141 self.nTries = 3 #quantity tries
140 142
141 143 self.nFiles = 3 #number of files for searching
142 144
143 145 self.nReadBlocks = 0
144 146
145 147 self.flagIsNewFile = 1
146 148
147 149 self.__isFirstTimeOnline = 1
148 150
149 151 # self.ippSeconds = 0
150 152
151 153 self.flagDiscontinuousBlock = 0
152 154
153 155 self.flagIsNewBlock = 0
154 156
155 157 self.nTotalBlocks = 0
156 158
157 159 self.blocksize = 0
158 160
159 161 self.dataOut = self.createObjByDefault()
160 162
161 163 self.nTxs = 1
162 164
163 165 self.txIndex = 0
164 166
165 167 def createObjByDefault(self):
166 168
167 169 dataObj = Voltage()
168 170
169 171 return dataObj
170 172
171 173 def __hasNotDataInBuffer(self):
172 174
173 175 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock*self.nTxs:
174 176 return 1
175 177
176 178 return 0
177 179
178 180
179 181 def getBlockDimension(self):
180 182 """
181 183 Obtiene la cantidad de puntos a leer por cada bloque de datos
182 184
183 185 Affected:
184 186 self.blocksize
185 187
186 188 Return:
187 189 None
188 190 """
189 191 pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
190 192 self.blocksize = pts2read
191 193
192 194
193 195
194 196 def readBlock(self):
195 197 """
196 198 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
197 199 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
198 200 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
199 201 es seteado a 0
200 202
201 203 Inputs:
202 204 None
203 205
204 206 Return:
205 207 None
206 208
207 209 Affected:
208 210 self.profileIndex
209 211 self.datablock
210 212 self.flagIsNewFile
211 213 self.flagIsNewBlock
212 214 self.nTotalBlocks
213 215
214 216 Exceptions:
215 217 Si un bloque leido no es un bloque valido
216 218 """
217 219
218 220 print 'READ BLOCK'
219 if self.server is not None:
220 self.zBlock = self.receiver.recv()
221 self.zHeader = self.zBlock[:24]
222 self.zDataBlock = self.zBlock[24:]
223 junk = numpy.fromstring(self.zDataBlock, numpy.dtype([('real','<i4'),('imag','<i4')]))
224 self.processingHeaderObj.profilesPerBlock = 240
225 self.processingHeaderObj.nHeights = 248
226 self.systemHeaderObj.nChannels
227 else:
228 current_pointer_location = self.fp.tell()
229 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
221 # if self.server is not None:
222 # self.zBlock = self.receiver.recv()
223 # self.zHeader = self.zBlock[:24]
224 # self.zDataBlock = self.zBlock[24:]
225 # junk = numpy.fromstring(self.zDataBlock, numpy.dtype([('real','<i4'),('imag','<i4')]))
226 # self.processingHeaderObj.profilesPerBlock = 240
227 # self.processingHeaderObj.nHeights = 248
228 # self.systemHeaderObj.nChannels
229 # else:
230 current_pointer_location = self.fp.tell()
231 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
230 232
231 233 try:
232 234 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
233 235 print'junked'
234 236 except:
235 237 #print "The read block (%3d) has not enough data" %self.nReadBlocks
236 238
237 239 if self.waitDataBlock(pointer_location=current_pointer_location):
238 240 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
239 241 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
240 242 # return 0
241 243
242 244 #Dimensions : nChannels, nProfiles, nSamples
243 245
244 246 junk = numpy.transpose(junk, (2,0,1))
245 247 self.datablock = junk['real'] + junk['imag']*1j
246 248
247 249 self.profileIndex = 0
248 250
249 251 self.flagIsNewFile = 0
250 252 self.flagIsNewBlock = 1
251 253
252 254 self.nTotalBlocks += 1
253 255 self.nReadBlocks += 1
254 256
255 257 return 1
256 258
257 259 def getFirstHeader(self):
258 260
259 261 self.getBasicHeader()
260 262
261 263 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
262 264
263 265 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
264 266
265 267 if self.nTxs > 1:
266 268 self.dataOut.radarControllerHeaderObj.ippSeconds = self.radarControllerHeaderObj.ippSeconds/self.nTxs
267 269
268 270 #Time interval and code are propierties of dataOut. Its value depends of radarControllerHeaderObj.
269 271
270 272 # self.dataOut.timeInterval = self.radarControllerHeaderObj.ippSeconds * self.processingHeaderObj.nCohInt
271 273 #
272 274 # if self.radarControllerHeaderObj.code is not None:
273 275 #
274 276 # self.dataOut.nCode = self.radarControllerHeaderObj.nCode
275 277 #
276 278 # self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
277 279 #
278 280 # self.dataOut.code = self.radarControllerHeaderObj.code
279 281
280 282 self.dataOut.dtype = self.dtype
281 283
282 284 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
283 285
284 286 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.nHeights) *self.processingHeaderObj.deltaHeight + self.processingHeaderObj.firstHeight
285 287
286 288 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
287 289
288 290 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
289 291
290 292 self.dataOut.flagDecodeData = self.processingHeaderObj.flag_decode #asumo q la data no esta decodificada
291 293
292 294 self.dataOut.flagDeflipData = self.processingHeaderObj.flag_deflip #asumo q la data no esta sin flip
293 295
294 296 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
295 297
296 298 def reshapeData(self):
297 299
298 300 if self.nTxs < 0:
299 301 return
300 302
301 303 if self.nTxs == 1:
302 304 return
303 305
304 306 if self.nTxs < 1 and self.processingHeaderObj.profilesPerBlock % (1./self.nTxs) != 0:
305 307 raise ValueError, "1./nTxs (=%f), should be a multiple of nProfiles (=%d)" %(1./self.nTxs, self.processingHeaderObj.profilesPerBlock)
306 308
307 309 if self.nTxs > 1 and self.processingHeaderObj.nHeights % self.nTxs != 0:
308 310 raise ValueError, "nTxs (=%d), should be a multiple of nHeights (=%d)" %(self.nTxs, self.processingHeaderObj.nHeights)
309 311
310 312 self.datablock = self.datablock.reshape((self.systemHeaderObj.nChannels, self.processingHeaderObj.profilesPerBlock*self.nTxs, self.processingHeaderObj.nHeights/self.nTxs))
311 313
312 314 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock*self.nTxs
313 315 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.nHeights/self.nTxs) *self.processingHeaderObj.deltaHeight + self.processingHeaderObj.firstHeight
314 316 self.dataOut.radarControllerHeaderObj.ippSeconds = self.radarControllerHeaderObj.ippSeconds/self.nTxs
315 317
316 318 return
317 319
320 def readFirstHeaderFromServer(self):
321
322 self.getFirstHeader()
323
324 self.firstHeaderSize = self.basicHeaderObj.size
325
326 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
327 if datatype == 0:
328 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
329 elif datatype == 1:
330 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
331 elif datatype == 2:
332 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
333 elif datatype == 3:
334 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
335 elif datatype == 4:
336 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
337 elif datatype == 5:
338 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
339 else:
340 raise ValueError, 'Data type was not defined'
341
342 self.dtype = datatype_str
343 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
344 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
345 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
346 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
347 self.getBlockDimension()
348
349
350 def getFromServer(self):
351 self.flagDiscontinuousBlock = 0
352 self.profileIndex = 0
353 self.flagIsNewBlock = 1
354 self.dataOut.flagNoData = False
355 self.nTotalBlocks += 1
356 self.nReadBlocks += 1
357 self.blockPointer = 0
358
359 block = self.receiver.recv()
360
361 self.basicHeaderObj.read(block[self.blockPointer:])
362 self.blockPointer += self.basicHeaderObj.length
363 self.systemHeaderObj.read(block[self.blockPointer:])
364 self.blockPointer += self.systemHeaderObj.length
365 self.radarControllerHeaderObj.read(block[self.blockPointer:])
366 self.blockPointer += self.radarControllerHeaderObj.length
367 self.processingHeaderObj.read(block[self.blockPointer:])
368 self.blockPointer += self.processingHeaderObj.length
369 self.readFirstHeaderFromServer()
370
371 timestamp = self.basicHeaderObj.get_datatime()
372 print '[Reading] - Block {} - {}'.format(self.nTotalBlocks, timestamp)
373 current_pointer_location = self.blockPointer
374 junk = numpy.fromstring( block[self.blockPointer:], self.dtype, self.blocksize )
375
376 try:
377 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
378 except:
379 #print "The read block (%3d) has not enough data" %self.nReadBlocks
380 if self.waitDataBlock(pointer_location=current_pointer_location):
381 junk = numpy.fromstring( block[self.blockPointer:], self.dtype, self.blocksize )
382 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
383 # return 0
384
385 #Dimensions : nChannels, nProfiles, nSamples
386
387 junk = numpy.transpose(junk, (2,0,1))
388 self.datablock = junk['real'] + junk['imag'] * 1j
389 self.profileIndex = 0
390 if self.selBlocksize == None: self.selBlocksize = self.dataOut.nProfiles
391 if self.selBlocktime != None:
392 if self.dataOut.nCohInt is not None:
393 nCohInt = self.dataOut.nCohInt
394 else:
395 nCohInt = 1
396 self.selBlocksize = int(self.dataOut.nProfiles*round(self.selBlocktime/(nCohInt*self.dataOut.ippSeconds*self.dataOut.nProfiles)))
397 self.dataOut.data = self.datablock[:,self.profileIndex:self.profileIndex+self.selBlocksize,:]
398 datasize = self.dataOut.data.shape[1]
399 if datasize < self.selBlocksize:
400 buffer = numpy.zeros((self.dataOut.data.shape[0], self.selBlocksize, self.dataOut.data.shape[2]), dtype = 'complex')
401 buffer[:,:datasize,:] = self.dataOut.data
402 self.dataOut.data = buffer
403 self.profileIndex = blockIndex
404
405 self.dataOut.flagDataAsBlock = True
406 self.flagIsNewBlock = 1
407 self.dataOut.realtime = self.online
408
409 return self.dataOut.data
410
318 411 def getData(self):
319 412 """
320 413 getData obtiene una unidad de datos del buffer de lectura, un perfil, y la copia al objeto self.dataOut
321 414 del tipo "Voltage" con todos los parametros asociados a este (metadata). cuando no hay datos
322 415 en el buffer de lectura es necesario hacer una nueva lectura de los bloques de datos usando
323 416 "readNextBlock"
324 417
325 418 Ademas incrementa el contador del buffer "self.profileIndex" en 1.
326 419
327 420 Return:
328 421
329 422 Si el flag self.getByBlock ha sido seteado el bloque completo es copiado a self.dataOut y el self.profileIndex
330 423 es igual al total de perfiles leidos desde el archivo.
331 424
332 425 Si self.getByBlock == False:
333 426
334 427 self.dataOut.data = buffer[:, thisProfile, :]
335 428
336 429 shape = [nChannels, nHeis]
337 430
338 431 Si self.getByBlock == True:
339 432
340 433 self.dataOut.data = buffer[:, :, :]
341 434
342 435 shape = [nChannels, nProfiles, nHeis]
343 436
344 437 Variables afectadas:
345 438 self.dataOut
346 439 self.profileIndex
347 440
348 441 Affected:
349 442 self.dataOut
350 443 self.profileIndex
351 444 self.flagDiscontinuousBlock
352 445 self.flagIsNewBlock
353 446 """
354 print 1
355 447 if self.flagNoMoreFiles:
356 448 self.dataOut.flagNoData = True
357 449 print 'Process finished'
358 450 return 0
359 print 2
360 451 self.flagDiscontinuousBlock = 0
361 452 self.flagIsNewBlock = 0
362 print 3
363 453 if self.__hasNotDataInBuffer():
364
365 454 if not( self.readNextBlock() ):
366 455 return 0
367 456
368 457 self.getFirstHeader()
369 458
370 459 self.reshapeData()
371 print 4
372 460 if self.datablock is None:
373 461 self.dataOut.flagNoData = True
374 462 return 0
375 463
376 464 if not self.getByBlock:
377 465
378 466 """
379 467 Return profile by profile
380 468
381 469 If nTxs > 1 then one profile is divided by nTxs and number of total
382 470 blocks is increased by nTxs (nProfiles *= nTxs)
383 471 """
384 472 self.dataOut.flagDataAsBlock = False
385 473 self.dataOut.data = self.datablock[:,self.profileIndex,:]
386 474 self.dataOut.profileIndex = self.profileIndex
387 475
388 476 self.profileIndex += 1
389 477
390 478 # elif self.selBlocksize==None or self.selBlocksize==self.dataOut.nProfiles:
391 479 # """
392 480 # Return all block
393 481 # """
394 482 # self.dataOut.flagDataAsBlock = True
395 483 # self.dataOut.data = self.datablock
396 484 # self.dataOut.profileIndex = self.dataOut.nProfiles - 1
397 485 #
398 486 # self.profileIndex = self.dataOut.nProfiles
399 487
400 488 else:
401 489 """
402 490 Return a block
403 491 """
404 492 if self.selBlocksize == None: self.selBlocksize = self.dataOut.nProfiles
405 493 if self.selBlocktime != None:
406 494 if self.dataOut.nCohInt is not None:
407 495 nCohInt = self.dataOut.nCohInt
408 496 else:
409 497 nCohInt = 1
410 498 self.selBlocksize = int(self.dataOut.nProfiles*round(self.selBlocktime/(nCohInt*self.dataOut.ippSeconds*self.dataOut.nProfiles)))
411 499
412 500 self.dataOut.data = self.datablock[:,self.profileIndex:self.profileIndex+self.selBlocksize,:]
413 501 self.profileIndex += self.selBlocksize
414 502 datasize = self.dataOut.data.shape[1]
415 503
416 504 if datasize < self.selBlocksize:
417 505 buffer = numpy.zeros((self.dataOut.data.shape[0],self.selBlocksize,self.dataOut.data.shape[2]), dtype = 'complex')
418 506 buffer[:,:datasize,:] = self.dataOut.data
419 507
420 508 while datasize < self.selBlocksize: #Not enough profiles to fill the block
421 509 if not( self.readNextBlock() ):
422 510 return 0
423 511 self.getFirstHeader()
424 512 self.reshapeData()
425 513 if self.datablock is None:
426 514 self.dataOut.flagNoData = True
427 515 return 0
428 516 #stack data
429 517 blockIndex = self.selBlocksize - datasize
430 518 datablock1 = self.datablock[:,:blockIndex,:]
431 519
432 520 buffer[:,datasize:datasize+datablock1.shape[1],:] = datablock1
433 521 datasize += datablock1.shape[1]
434 522
435 523 self.dataOut.data = buffer
436 524 self.profileIndex = blockIndex
437 525
438 526 self.dataOut.flagDataAsBlock = True
439 527 self.dataOut.nProfiles = self.dataOut.data.shape[1]
440 528
441 529 self.dataOut.flagNoData = False
442 530
443 531 self.getBasicHeader()
444 532
445 533 self.dataOut.realtime = self.online
446 534
447 535 return self.dataOut.data
448 536
449 537 class VoltageWriter(JRODataWriter, Operation):
450 538 """
451 539 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
452 540 de los datos siempre se realiza por bloques.
453 541 """
454 542
455 543 ext = ".r"
456 544
457 545 optchar = "D"
458 546
459 547 shapeBuffer = None
460 548
461 549
462 550 def __init__(self, **kwargs):
463 551 """
464 552 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
465 553
466 554 Affected:
467 555 self.dataOut
468 556
469 557 Return: None
470 558 """
471 559 Operation.__init__(self, **kwargs)
472 560
473 561 self.nTotalBlocks = 0
474 562
475 563 self.profileIndex = 0
476 564
477 565 self.isConfig = False
478 566
479 567 self.fp = None
480 568
481 569 self.flagIsNewFile = 1
482 570
483 571 self.blockIndex = 0
484 572
485 573 self.flagIsNewBlock = 0
486 574
487 575 self.setFile = None
488 576
489 577 self.dtype = None
490 578
491 579 self.path = None
492 580
493 581 self.filename = None
494 582
495 583 self.basicHeaderObj = BasicHeader(LOCALTIME)
496 584
497 585 self.systemHeaderObj = SystemHeader()
498 586
499 587 self.radarControllerHeaderObj = RadarControllerHeader()
500 588
501 589 self.processingHeaderObj = ProcessingHeader()
502 590
503 591 def hasAllDataInBuffer(self):
504 592 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
505 593 return 1
506 594 return 0
507 595
508 596
509 597 def setBlockDimension(self):
510 598 """
511 599 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
512 600
513 601 Affected:
514 602 self.shape_spc_Buffer
515 603 self.shape_cspc_Buffer
516 604 self.shape_dc_Buffer
517 605
518 606 Return: None
519 607 """
520 608 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
521 609 self.processingHeaderObj.nHeights,
522 610 self.systemHeaderObj.nChannels)
523 611
524 612 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
525 613 self.processingHeaderObj.profilesPerBlock,
526 614 self.processingHeaderObj.nHeights),
527 615 dtype=numpy.dtype('complex64'))
528 616
529 617 def writeBlock(self):
530 618 """
531 619 Escribe el buffer en el file designado
532 620
533 621 Affected:
534 622 self.profileIndex
535 623 self.flagIsNewFile
536 624 self.flagIsNewBlock
537 625 self.nTotalBlocks
538 626 self.blockIndex
539 627
540 628 Return: None
541 629 """
542 630 data = numpy.zeros( self.shapeBuffer, self.dtype )
543 631
544 632 junk = numpy.transpose(self.datablock, (1,2,0))
545 633
546 634 data['real'] = junk.real
547 635 data['imag'] = junk.imag
548 636
549 637 data = data.reshape( (-1) )
550 638
551 639 data.tofile( self.fp )
552 640
553 641 self.datablock.fill(0)
554 642
555 643 self.profileIndex = 0
556 644 self.flagIsNewFile = 0
557 645 self.flagIsNewBlock = 1
558 646
559 647 self.blockIndex += 1
560 648 self.nTotalBlocks += 1
561 649
562 650 # print "[Writing] Block = %04d" %self.blockIndex
563 651
564 652 def putData(self):
565 653 """
566 654 Setea un bloque de datos y luego los escribe en un file
567 655
568 656 Affected:
569 657 self.flagIsNewBlock
570 658 self.profileIndex
571 659
572 660 Return:
573 661 0 : Si no hay data o no hay mas files que puedan escribirse
574 662 1 : Si se escribio la data de un bloque en un file
575 663 """
576 664 if self.dataOut.flagNoData:
577 665 return 0
578 666
579 667 self.flagIsNewBlock = 0
580 668
581 669 if self.dataOut.flagDiscontinuousBlock:
582 670 self.datablock.fill(0)
583 671 self.profileIndex = 0
584 672 self.setNextFile()
585 673
586 674 if self.profileIndex == 0:
587 675 self.setBasicHeader()
588 676
589 677 self.datablock[:,self.profileIndex,:] = self.dataOut.data
590 678
591 679 self.profileIndex += 1
592 680
593 681 if self.hasAllDataInBuffer():
594 682 #if self.flagIsNewFile:
595 683 self.writeNextBlock()
596 684 # self.setFirstHeader()
597 685
598 686 return 1
599 687
600 688 def __getBlockSize(self):
601 689 '''
602 690 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
603 691 '''
604 692
605 693 dtype_width = self.getDtypeWidth()
606 694
607 695 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels * self.profilesPerBlock * dtype_width * 2)
608 696
609 697 return blocksize
610 698
611 699 def setFirstHeader(self):
612 700
613 701 """
614 702 Obtiene una copia del First Header
615 703
616 704 Affected:
617 705 self.systemHeaderObj
618 706 self.radarControllerHeaderObj
619 707 self.dtype
620 708
621 709 Return:
622 710 None
623 711 """
624 712
625 713 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
626 714 self.systemHeaderObj.nChannels = self.dataOut.nChannels
627 715 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
628 716
629 717 self.processingHeaderObj.dtype = 0 # Voltage
630 718 self.processingHeaderObj.blockSize = self.__getBlockSize()
631 719 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
632 720 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
633 721 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
634 722 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
635 723 self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage
636 724 self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage
637 725
638 726 if self.dataOut.code is not None:
639 727 self.processingHeaderObj.code = self.dataOut.code
640 728 self.processingHeaderObj.nCode = self.dataOut.nCode
641 729 self.processingHeaderObj.nBaud = self.dataOut.nBaud
642 730
643 731 if self.processingHeaderObj.nWindows != 0:
644 732 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
645 733 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
646 734 self.processingHeaderObj.nHeights = self.dataOut.nHeights
647 735 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
648 736
649 737 self.processingHeaderObj.processFlags = self.getProcessFlags()
650 738
651 739 self.setBasicHeader()
@@ -1,1283 +1,1283
1 1 import sys
2 2 import numpy
3 3 from scipy import interpolate
4 4
5 5 from jroproc_base import ProcessingUnit, Operation
6 6 from schainpy.model.data.jrodata import Voltage
7 7
8 8 class VoltageProc(ProcessingUnit):
9 9
10 10
11 11 def __init__(self, **kwargs):
12 12
13 13 ProcessingUnit.__init__(self, **kwargs)
14 14
15 15 # self.objectDict = {}
16 16 self.dataOut = Voltage()
17 17 self.flip = 1
18 18
19 19 def run(self):
20 20 if self.dataIn.type == 'AMISR':
21 21 self.__updateObjFromAmisrInput()
22 22
23 23 if self.dataIn.type == 'Voltage':
24 24 self.dataOut.copy(self.dataIn)
25 25
26 26 # self.dataOut.copy(self.dataIn)
27 27
28 28 def __updateObjFromAmisrInput(self):
29 29
30 30 self.dataOut.timeZone = self.dataIn.timeZone
31 31 self.dataOut.dstFlag = self.dataIn.dstFlag
32 32 self.dataOut.errorCount = self.dataIn.errorCount
33 33 self.dataOut.useLocalTime = self.dataIn.useLocalTime
34 34
35 35 self.dataOut.flagNoData = self.dataIn.flagNoData
36 36 self.dataOut.data = self.dataIn.data
37 37 self.dataOut.utctime = self.dataIn.utctime
38 38 self.dataOut.channelList = self.dataIn.channelList
39 39 # self.dataOut.timeInterval = self.dataIn.timeInterval
40 40 self.dataOut.heightList = self.dataIn.heightList
41 41 self.dataOut.nProfiles = self.dataIn.nProfiles
42 42
43 43 self.dataOut.nCohInt = self.dataIn.nCohInt
44 44 self.dataOut.ippSeconds = self.dataIn.ippSeconds
45 45 self.dataOut.frequency = self.dataIn.frequency
46 46
47 47 self.dataOut.azimuth = self.dataIn.azimuth
48 48 self.dataOut.zenith = self.dataIn.zenith
49 49
50 50 self.dataOut.beam.codeList = self.dataIn.beam.codeList
51 51 self.dataOut.beam.azimuthList = self.dataIn.beam.azimuthList
52 52 self.dataOut.beam.zenithList = self.dataIn.beam.zenithList
53 53 #
54 54 # pass#
55 55 #
56 56 # def init(self):
57 57 #
58 58 #
59 59 # if self.dataIn.type == 'AMISR':
60 60 # self.__updateObjFromAmisrInput()
61 61 #
62 62 # if self.dataIn.type == 'Voltage':
63 63 # self.dataOut.copy(self.dataIn)
64 64 # # No necesita copiar en cada init() los atributos de dataIn
65 65 # # la copia deberia hacerse por cada nuevo bloque de datos
66 66
67 67 def selectChannels(self, channelList):
68 68
69 69 channelIndexList = []
70 70
71 71 for channel in channelList:
72 72 if channel not in self.dataOut.channelList:
73 73 raise ValueError, "Channel %d is not in %s" %(channel, str(self.dataOut.channelList))
74 74
75 75 index = self.dataOut.channelList.index(channel)
76 76 channelIndexList.append(index)
77 77
78 78 self.selectChannelsByIndex(channelIndexList)
79 79
80 80 def selectChannelsByIndex(self, channelIndexList):
81 81 """
82 82 Selecciona un bloque de datos en base a canales segun el channelIndexList
83 83
84 84 Input:
85 85 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
86 86
87 87 Affected:
88 88 self.dataOut.data
89 89 self.dataOut.channelIndexList
90 90 self.dataOut.nChannels
91 91 self.dataOut.m_ProcessingHeader.totalSpectra
92 92 self.dataOut.systemHeaderObj.numChannels
93 93 self.dataOut.m_ProcessingHeader.blockSize
94 94
95 95 Return:
96 96 None
97 97 """
98 98
99 99 for channelIndex in channelIndexList:
100 100 if channelIndex not in self.dataOut.channelIndexList:
101 101 print channelIndexList
102 102 raise ValueError, "The value %d in channelIndexList is not valid" %channelIndex
103 103
104 104 if self.dataOut.flagDataAsBlock:
105 105 """
106 106 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
107 107 """
108 108 data = self.dataOut.data[channelIndexList,:,:]
109 109 else:
110 110 data = self.dataOut.data[channelIndexList,:]
111 111
112 112 self.dataOut.data = data
113 113 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
114 114 # self.dataOut.nChannels = nChannels
115 115
116 116 return 1
117 117
118 118 def selectHeights(self, minHei=None, maxHei=None):
119 119 """
120 120 Selecciona un bloque de datos en base a un grupo de valores de alturas segun el rango
121 121 minHei <= height <= maxHei
122 122
123 123 Input:
124 124 minHei : valor minimo de altura a considerar
125 125 maxHei : valor maximo de altura a considerar
126 126
127 127 Affected:
128 128 Indirectamente son cambiados varios valores a travez del metodo selectHeightsByIndex
129 129
130 130 Return:
131 131 1 si el metodo se ejecuto con exito caso contrario devuelve 0
132 132 """
133 133
134 134 if minHei == None:
135 135 minHei = self.dataOut.heightList[0]
136 136
137 137 if maxHei == None:
138 138 maxHei = self.dataOut.heightList[-1]
139 139
140 140 if (minHei < self.dataOut.heightList[0]):
141 141 minHei = self.dataOut.heightList[0]
142 142
143 143 if (maxHei > self.dataOut.heightList[-1]):
144 144 maxHei = self.dataOut.heightList[-1]
145 145
146 146 minIndex = 0
147 147 maxIndex = 0
148 148 heights = self.dataOut.heightList
149 149
150 150 inda = numpy.where(heights >= minHei)
151 151 indb = numpy.where(heights <= maxHei)
152 152
153 153 try:
154 154 minIndex = inda[0][0]
155 155 except:
156 156 minIndex = 0
157 157
158 158 try:
159 159 maxIndex = indb[0][-1]
160 160 except:
161 161 maxIndex = len(heights)
162 162
163 163 self.selectHeightsByIndex(minIndex, maxIndex)
164 164
165 165 return 1
166 166
167 167
168 168 def selectHeightsByIndex(self, minIndex, maxIndex):
169 169 """
170 170 Selecciona un bloque de datos en base a un grupo indices de alturas segun el rango
171 171 minIndex <= index <= maxIndex
172 172
173 173 Input:
174 174 minIndex : valor de indice minimo de altura a considerar
175 175 maxIndex : valor de indice maximo de altura a considerar
176 176
177 177 Affected:
178 178 self.dataOut.data
179 179 self.dataOut.heightList
180 180
181 181 Return:
182 182 1 si el metodo se ejecuto con exito caso contrario devuelve 0
183 183 """
184 184
185 185 if (minIndex < 0) or (minIndex > maxIndex):
186 186 raise ValueError, "Height index range (%d,%d) is not valid" % (minIndex, maxIndex)
187 187
188 188 if (maxIndex >= self.dataOut.nHeights):
189 189 maxIndex = self.dataOut.nHeights
190 190
191 191 #voltage
192 192 if self.dataOut.flagDataAsBlock:
193 193 """
194 194 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
195 195 """
196 196 data = self.dataOut.data[:,:, minIndex:maxIndex]
197 197 else:
198 198 data = self.dataOut.data[:, minIndex:maxIndex]
199 199
200 200 # firstHeight = self.dataOut.heightList[minIndex]
201 201
202 202 self.dataOut.data = data
203 203 self.dataOut.heightList = self.dataOut.heightList[minIndex:maxIndex]
204 204
205 205 if self.dataOut.nHeights <= 1:
206 206 raise ValueError, "selectHeights: Too few heights. Current number of heights is %d" %(self.dataOut.nHeights)
207 207
208 208 return 1
209 209
210 210
211 211 def filterByHeights(self, window):
212 212
213 213 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
214 214
215 215 if window == None:
216 216 window = (self.dataOut.radarControllerHeaderObj.txA/self.dataOut.radarControllerHeaderObj.nBaud) / deltaHeight
217 217
218 218 newdelta = deltaHeight * window
219 219 r = self.dataOut.nHeights % window
220 220 newheights = (self.dataOut.nHeights-r)/window
221 221
222 222 if newheights <= 1:
223 223 raise ValueError, "filterByHeights: Too few heights. Current number of heights is %d and window is %d" %(self.dataOut.nHeights, window)
224 224
225 225 if self.dataOut.flagDataAsBlock:
226 226 """
227 227 Si la data es obtenida por bloques, dimension = [nChannels, nProfiles, nHeis]
228 228 """
229 229 buffer = self.dataOut.data[:, :, 0:self.dataOut.nHeights-r]
230 230 buffer = buffer.reshape(self.dataOut.nChannels,self.dataOut.nProfiles,self.dataOut.nHeights/window,window)
231 231 buffer = numpy.sum(buffer,3)
232 232
233 233 else:
234 234 buffer = self.dataOut.data[:,0:self.dataOut.nHeights-r]
235 235 buffer = buffer.reshape(self.dataOut.nChannels,self.dataOut.nHeights/window,window)
236 236 buffer = numpy.sum(buffer,2)
237 237
238 238 self.dataOut.data = buffer
239 239 self.dataOut.heightList = self.dataOut.heightList[0] + numpy.arange( newheights )*newdelta
240 240 self.dataOut.windowOfFilter = window
241 241
242 242 def setH0(self, h0, deltaHeight = None):
243 243
244 244 if not deltaHeight:
245 245 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
246 246
247 247 nHeights = self.dataOut.nHeights
248 248
249 249 newHeiRange = h0 + numpy.arange(nHeights)*deltaHeight
250 250
251 251 self.dataOut.heightList = newHeiRange
252 252
253 253 def deFlip(self, channelList = []):
254 254
255 255 data = self.dataOut.data.copy()
256 256
257 257 if self.dataOut.flagDataAsBlock:
258 258 flip = self.flip
259 259 profileList = range(self.dataOut.nProfiles)
260 260
261 261 if not channelList:
262 262 for thisProfile in profileList:
263 263 data[:,thisProfile,:] = data[:,thisProfile,:]*flip
264 264 flip *= -1.0
265 265 else:
266 266 for thisChannel in channelList:
267 267 if thisChannel not in self.dataOut.channelList:
268 268 continue
269 269
270 270 for thisProfile in profileList:
271 271 data[thisChannel,thisProfile,:] = data[thisChannel,thisProfile,:]*flip
272 272 flip *= -1.0
273 273
274 274 self.flip = flip
275 275
276 276 else:
277 277 if not channelList:
278 278 data[:,:] = data[:,:]*self.flip
279 279 else:
280 280 for thisChannel in channelList:
281 281 if thisChannel not in self.dataOut.channelList:
282 282 continue
283 283
284 284 data[thisChannel,:] = data[thisChannel,:]*self.flip
285 285
286 286 self.flip *= -1.
287 287
288 288 self.dataOut.data = data
289 289
290 290 def setRadarFrequency(self, frequency=None):
291 291
292 292 if frequency != None:
293 293 self.dataOut.frequency = frequency
294 294
295 295 return 1
296 296
297 297 def interpolateHeights(self, topLim, botLim):
298 298 #69 al 72 para julia
299 299 #82-84 para meteoros
300 300 if len(numpy.shape(self.dataOut.data))==2:
301 301 sampInterp = (self.dataOut.data[:,botLim-1] + self.dataOut.data[:,topLim+1])/2
302 302 sampInterp = numpy.transpose(numpy.tile(sampInterp,(topLim-botLim + 1,1)))
303 303 #self.dataOut.data[:,botLim:limSup+1] = sampInterp
304 304 self.dataOut.data[:,botLim:topLim+1] = sampInterp
305 305 else:
306 306 nHeights = self.dataOut.data.shape[2]
307 307 x = numpy.hstack((numpy.arange(botLim),numpy.arange(topLim+1,nHeights)))
308 308 y = self.dataOut.data[:,:,range(botLim)+range(topLim+1,nHeights)]
309 309 f = interpolate.interp1d(x, y, axis = 2)
310 310 xnew = numpy.arange(botLim,topLim+1)
311 311 ynew = f(xnew)
312 312
313 313 self.dataOut.data[:,:,botLim:topLim+1] = ynew
314 314
315 315 # import collections
316 316
317 317 class CohInt(Operation):
318 318
319 319 isConfig = False
320 320
321 321 __profIndex = 0
322 322 __withOverapping = False
323 323
324 324 __byTime = False
325 325 __initime = None
326 326 __lastdatatime = None
327 327 __integrationtime = None
328 328
329 329 __buffer = None
330 330
331 331 __dataReady = False
332 332
333 333 n = None
334 334
335 335
336 336 def __init__(self, **kwargs):
337 337
338 338 Operation.__init__(self, **kwargs)
339 339
340 340 # self.isConfig = False
341 341
342 342 def setup(self, n=None, timeInterval=None, overlapping=False, byblock=False):
343 343 """
344 344 Set the parameters of the integration class.
345 345
346 346 Inputs:
347 347
348 348 n : Number of coherent integrations
349 349 timeInterval : Time of integration. If the parameter "n" is selected this one does not work
350 350 overlapping :
351 351
352 352 """
353 353
354 354 self.__initime = None
355 355 self.__lastdatatime = 0
356 356 self.__buffer = None
357 357 self.__dataReady = False
358 358 self.byblock = byblock
359 359
360 360 if n == None and timeInterval == None:
361 361 raise ValueError, "n or timeInterval should be specified ..."
362 362
363 363 if n != None:
364 364 self.n = n
365 365 self.__byTime = False
366 366 else:
367 367 self.__integrationtime = timeInterval #* 60. #if (type(timeInterval)!=integer) -> change this line
368 368 self.n = 9999
369 369 self.__byTime = True
370 370
371 371 if overlapping:
372 372 self.__withOverapping = True
373 373 self.__buffer = None
374 374 else:
375 375 self.__withOverapping = False
376 376 self.__buffer = 0
377 377
378 378 self.__profIndex = 0
379 379
380 380 def putData(self, data):
381 381
382 382 """
383 383 Add a profile to the __buffer and increase in one the __profileIndex
384 384
385 385 """
386 386
387 387 if not self.__withOverapping:
388 388 self.__buffer += data.copy()
389 389 self.__profIndex += 1
390 390 return
391 391
392 392 #Overlapping data
393 393 nChannels, nHeis = data.shape
394 394 data = numpy.reshape(data, (1, nChannels, nHeis))
395 395
396 396 #If the buffer is empty then it takes the data value
397 397 if self.__buffer is None:
398 398 self.__buffer = data
399 399 self.__profIndex += 1
400 400 return
401 401
402 402 #If the buffer length is lower than n then stakcing the data value
403 403 if self.__profIndex < self.n:
404 404 self.__buffer = numpy.vstack((self.__buffer, data))
405 405 self.__profIndex += 1
406 406 return
407 407
408 408 #If the buffer length is equal to n then replacing the last buffer value with the data value
409 409 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
410 410 self.__buffer[self.n-1] = data
411 411 self.__profIndex = self.n
412 412 return
413 413
414 414
415 415 def pushData(self):
416 416 """
417 417 Return the sum of the last profiles and the profiles used in the sum.
418 418
419 419 Affected:
420 420
421 421 self.__profileIndex
422 422
423 423 """
424 424
425 425 if not self.__withOverapping:
426 426 data = self.__buffer
427 427 n = self.__profIndex
428 428
429 429 self.__buffer = 0
430 430 self.__profIndex = 0
431 431
432 432 return data, n
433 433
434 434 #Integration with Overlapping
435 435 data = numpy.sum(self.__buffer, axis=0)
436 436 n = self.__profIndex
437 437
438 438 return data, n
439 439
440 440 def byProfiles(self, data):
441 441
442 442 self.__dataReady = False
443 443 avgdata = None
444 444 # n = None
445 445
446 446 self.putData(data)
447 447
448 448 if self.__profIndex == self.n:
449 449
450 450 avgdata, n = self.pushData()
451 451 self.__dataReady = True
452 452
453 453 return avgdata
454 454
455 455 def byTime(self, data, datatime):
456 456
457 457 self.__dataReady = False
458 458 avgdata = None
459 459 n = None
460 460
461 461 self.putData(data)
462 462
463 463 if (datatime - self.__initime) >= self.__integrationtime:
464 464 avgdata, n = self.pushData()
465 465 self.n = n
466 466 self.__dataReady = True
467 467
468 468 return avgdata
469 469
470 470 def integrate(self, data, datatime=None):
471 471
472 472 if self.__initime == None:
473 473 self.__initime = datatime
474 474
475 475 if self.__byTime:
476 476 avgdata = self.byTime(data, datatime)
477 477 else:
478 478 avgdata = self.byProfiles(data)
479 479
480 480
481 481 self.__lastdatatime = datatime
482 482
483 483 if avgdata is None:
484 484 return None, None
485 485
486 486 avgdatatime = self.__initime
487 487
488 488 deltatime = datatime -self.__lastdatatime
489 489
490 490 if not self.__withOverapping:
491 491 self.__initime = datatime
492 492 else:
493 493 self.__initime += deltatime
494 494
495 495 return avgdata, avgdatatime
496 496
497 497 def integrateByBlock(self, dataOut):
498 498
499 499 times = int(dataOut.data.shape[1]/self.n)
500 500 avgdata = numpy.zeros((dataOut.nChannels, times, dataOut.nHeights), dtype=numpy.complex)
501 501
502 502 id_min = 0
503 503 id_max = self.n
504 504
505 505 for i in range(times):
506 506 junk = dataOut.data[:,id_min:id_max,:]
507 507 avgdata[:,i,:] = junk.sum(axis=1)
508 508 id_min += self.n
509 509 id_max += self.n
510 510
511 511 timeInterval = dataOut.ippSeconds*self.n
512 512 avgdatatime = (times - 1) * timeInterval + dataOut.utctime
513 513 self.__dataReady = True
514 514 return avgdata, avgdatatime
515 515
516 def run(self, dataOut, **kwargs):
517
516
517 def run(self, dataOut, n=None, timeInterval=None, overlapping=False, byblock=False, **kwargs):
518 518 if not self.isConfig:
519 self.setup(**kwargs)
519 self.setup(n=n, timeInterval=timeInterval, overlapping=overlapping, byblock=byblock, **kwargs)
520 520 self.isConfig = True
521 521
522 522 if dataOut.flagDataAsBlock:
523 523 """
524 524 Si la data es leida por bloques, dimension = [nChannels, nProfiles, nHeis]
525 525 """
526 526 avgdata, avgdatatime = self.integrateByBlock(dataOut)
527 527 dataOut.nProfiles /= self.n
528 528 else:
529 529 avgdata, avgdatatime = self.integrate(dataOut.data, dataOut.utctime)
530 530
531 531 # dataOut.timeInterval *= n
532 532 dataOut.flagNoData = True
533 533
534 534 if self.__dataReady:
535 535 dataOut.data = avgdata
536 536 dataOut.nCohInt *= self.n
537 537 dataOut.utctime = avgdatatime
538 538 # dataOut.timeInterval = dataOut.ippSeconds * dataOut.nCohInt
539 539 dataOut.flagNoData = False
540 540
541 541 class Decoder(Operation):
542 542
543 543 isConfig = False
544 544 __profIndex = 0
545 545
546 546 code = None
547 547
548 548 nCode = None
549 549 nBaud = None
550 550
551 551
552 552 def __init__(self, **kwargs):
553 553
554 554 Operation.__init__(self, **kwargs)
555 555
556 556 self.times = None
557 557 self.osamp = None
558 558 # self.__setValues = False
559 559 self.isConfig = False
560 560
561 561 def setup(self, code, osamp, dataOut):
562 562
563 563 self.__profIndex = 0
564 564
565 565 self.code = code
566 566
567 567 self.nCode = len(code)
568 568 self.nBaud = len(code[0])
569 569
570 570 if (osamp != None) and (osamp >1):
571 571 self.osamp = osamp
572 572 self.code = numpy.repeat(code, repeats=self.osamp, axis=1)
573 573 self.nBaud = self.nBaud*self.osamp
574 574
575 575 self.__nChannels = dataOut.nChannels
576 576 self.__nProfiles = dataOut.nProfiles
577 577 self.__nHeis = dataOut.nHeights
578 578
579 579 if self.__nHeis < self.nBaud:
580 580 raise ValueError, 'Number of heights (%d) should be greater than number of bauds (%d)' %(self.__nHeis, self.nBaud)
581 581
582 582 #Frequency
583 583 __codeBuffer = numpy.zeros((self.nCode, self.__nHeis), dtype=numpy.complex)
584 584
585 585 __codeBuffer[:,0:self.nBaud] = self.code
586 586
587 587 self.fft_code = numpy.conj(numpy.fft.fft(__codeBuffer, axis=1))
588 588
589 589 if dataOut.flagDataAsBlock:
590 590
591 591 self.ndatadec = self.__nHeis #- self.nBaud + 1
592 592
593 593 self.datadecTime = numpy.zeros((self.__nChannels, self.__nProfiles, self.ndatadec), dtype=numpy.complex)
594 594
595 595 else:
596 596
597 597 #Time
598 598 self.ndatadec = self.__nHeis #- self.nBaud + 1
599 599
600 600 self.datadecTime = numpy.zeros((self.__nChannels, self.ndatadec), dtype=numpy.complex)
601 601
602 602 def __convolutionInFreq(self, data):
603 603
604 604 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
605 605
606 606 fft_data = numpy.fft.fft(data, axis=1)
607 607
608 608 conv = fft_data*fft_code
609 609
610 610 data = numpy.fft.ifft(conv,axis=1)
611 611
612 612 return data
613 613
614 614 def __convolutionInFreqOpt(self, data):
615 615
616 616 raise NotImplementedError
617 617
618 618 def __convolutionInTime(self, data):
619 619
620 620 code = self.code[self.__profIndex]
621 621
622 622 for i in range(self.__nChannels):
623 623 self.datadecTime[i,:] = numpy.correlate(data[i,:], code, mode='full')[self.nBaud-1:]
624 624
625 625 return self.datadecTime
626 626
627 627 def __convolutionByBlockInTime(self, data):
628 628
629 629 repetitions = self.__nProfiles / self.nCode
630 630
631 631 junk = numpy.lib.stride_tricks.as_strided(self.code, (repetitions, self.code.size), (0, self.code.itemsize))
632 632 junk = junk.flatten()
633 633 code_block = numpy.reshape(junk, (self.nCode*repetitions, self.nBaud))
634 634
635 635 for i in range(self.__nChannels):
636 636 for j in range(self.__nProfiles):
637 637 self.datadecTime[i,j,:] = numpy.correlate(data[i,j,:], code_block[j,:], mode='full')[self.nBaud-1:]
638 638
639 639 return self.datadecTime
640 640
641 641 def __convolutionByBlockInFreq(self, data):
642 642
643 643 raise NotImplementedError, "Decoder by frequency fro Blocks not implemented"
644 644
645 645
646 646 fft_code = self.fft_code[self.__profIndex].reshape(1,-1)
647 647
648 648 fft_data = numpy.fft.fft(data, axis=2)
649 649
650 650 conv = fft_data*fft_code
651 651
652 652 data = numpy.fft.ifft(conv,axis=2)
653 653
654 654 return data
655 655
656 656 def run(self, dataOut, code=None, nCode=None, nBaud=None, mode = 0, osamp=None, times=None):
657 657
658 658 if dataOut.flagDecodeData:
659 659 print "This data is already decoded, recoding again ..."
660 660
661 661 if not self.isConfig:
662 662
663 663 if code is None:
664 664 if dataOut.code is None:
665 665 raise ValueError, "Code could not be read from %s instance. Enter a value in Code parameter" %dataOut.type
666 666
667 667 code = dataOut.code
668 668 else:
669 669 code = numpy.array(code).reshape(nCode,nBaud)
670 670
671 671 self.setup(code, osamp, dataOut)
672 672
673 673 self.isConfig = True
674 674
675 675 if mode == 3:
676 676 sys.stderr.write("Decoder Warning: mode=%d is not valid, using mode=0\n" %mode)
677 677
678 678 if times != None:
679 679 sys.stderr.write("Decoder Warning: Argument 'times' in not used anymore\n")
680 680
681 681 if self.code is None:
682 682 print "Fail decoding: Code is not defined."
683 683 return
684 684
685 685 datadec = None
686 686 if mode == 3:
687 687 mode = 0
688 688
689 689 if dataOut.flagDataAsBlock:
690 690 """
691 691 Decoding when data have been read as block,
692 692 """
693 693
694 694 if mode == 0:
695 695 datadec = self.__convolutionByBlockInTime(dataOut.data)
696 696 if mode == 1:
697 697 datadec = self.__convolutionByBlockInFreq(dataOut.data)
698 698 else:
699 699 """
700 700 Decoding when data have been read profile by profile
701 701 """
702 702 if mode == 0:
703 703 datadec = self.__convolutionInTime(dataOut.data)
704 704
705 705 if mode == 1:
706 706 datadec = self.__convolutionInFreq(dataOut.data)
707 707
708 708 if mode == 2:
709 709 datadec = self.__convolutionInFreqOpt(dataOut.data)
710 710
711 711 if datadec is None:
712 712 raise ValueError, "Codification mode selected is not valid: mode=%d. Try selecting 0 or 1" %mode
713 713
714 714 dataOut.code = self.code
715 715 dataOut.nCode = self.nCode
716 716 dataOut.nBaud = self.nBaud
717 717
718 718 dataOut.data = datadec
719 719
720 720 dataOut.heightList = dataOut.heightList[0:datadec.shape[-1]]
721 721
722 722 dataOut.flagDecodeData = True #asumo q la data esta decodificada
723 723
724 724 if self.__profIndex == self.nCode-1:
725 725 self.__profIndex = 0
726 726 return 1
727 727
728 728 self.__profIndex += 1
729 729
730 730 return 1
731 731 # dataOut.flagDeflipData = True #asumo q la data no esta sin flip
732 732
733 733
734 734 class ProfileConcat(Operation):
735 735
736 736 isConfig = False
737 737 buffer = None
738 738
739 739 def __init__(self, **kwargs):
740 740
741 741 Operation.__init__(self, **kwargs)
742 742 self.profileIndex = 0
743 743
744 744 def reset(self):
745 745 self.buffer = numpy.zeros_like(self.buffer)
746 746 self.start_index = 0
747 747 self.times = 1
748 748
749 749 def setup(self, data, m, n=1):
750 750 self.buffer = numpy.zeros((data.shape[0],data.shape[1]*m),dtype=type(data[0,0]))
751 751 self.nHeights = data.shape[1]#.nHeights
752 752 self.start_index = 0
753 753 self.times = 1
754 754
755 755 def concat(self, data):
756 756
757 757 self.buffer[:,self.start_index:self.nHeights*self.times] = data.copy()
758 758 self.start_index = self.start_index + self.nHeights
759 759
760 760 def run(self, dataOut, m):
761 761
762 762 dataOut.flagNoData = True
763 763
764 764 if not self.isConfig:
765 765 self.setup(dataOut.data, m, 1)
766 766 self.isConfig = True
767 767
768 768 if dataOut.flagDataAsBlock:
769 769 raise ValueError, "ProfileConcat can only be used when voltage have been read profile by profile, getBlock = False"
770 770
771 771 else:
772 772 self.concat(dataOut.data)
773 773 self.times += 1
774 774 if self.times > m:
775 775 dataOut.data = self.buffer
776 776 self.reset()
777 777 dataOut.flagNoData = False
778 778 # se deben actualizar mas propiedades del header y del objeto dataOut, por ejemplo, las alturas
779 779 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
780 780 xf = dataOut.heightList[0] + dataOut.nHeights * deltaHeight * m
781 781 dataOut.heightList = numpy.arange(dataOut.heightList[0], xf, deltaHeight)
782 782 dataOut.ippSeconds *= m
783 783
784 784 class ProfileSelector(Operation):
785 785
786 786 profileIndex = None
787 787 # Tamanho total de los perfiles
788 788 nProfiles = None
789 789
790 790 def __init__(self, **kwargs):
791 791
792 792 Operation.__init__(self, **kwargs)
793 793 self.profileIndex = 0
794 794
795 795 def incProfileIndex(self):
796 796
797 797 self.profileIndex += 1
798 798
799 799 if self.profileIndex >= self.nProfiles:
800 800 self.profileIndex = 0
801 801
802 802 def isThisProfileInRange(self, profileIndex, minIndex, maxIndex):
803 803
804 804 if profileIndex < minIndex:
805 805 return False
806 806
807 807 if profileIndex > maxIndex:
808 808 return False
809 809
810 810 return True
811 811
812 812 def isThisProfileInList(self, profileIndex, profileList):
813 813
814 814 if profileIndex not in profileList:
815 815 return False
816 816
817 817 return True
818 818
819 819 def run(self, dataOut, profileList=None, profileRangeList=None, beam=None, byblock=False, rangeList = None, nProfiles=None):
820 820
821 821 """
822 822 ProfileSelector:
823 823
824 824 Inputs:
825 825 profileList : Index of profiles selected. Example: profileList = (0,1,2,7,8)
826 826
827 827 profileRangeList : Minimum and maximum profile indexes. Example: profileRangeList = (4, 30)
828 828
829 829 rangeList : List of profile ranges. Example: rangeList = ((4, 30), (32, 64), (128, 256))
830 830
831 831 """
832 832
833 833 if rangeList is not None:
834 834 if type(rangeList[0]) not in (tuple, list):
835 835 rangeList = [rangeList]
836 836
837 837 dataOut.flagNoData = True
838 838
839 839 if dataOut.flagDataAsBlock:
840 840 """
841 841 data dimension = [nChannels, nProfiles, nHeis]
842 842 """
843 843 if profileList != None:
844 844 dataOut.data = dataOut.data[:,profileList,:]
845 845
846 846 if profileRangeList != None:
847 847 minIndex = profileRangeList[0]
848 848 maxIndex = profileRangeList[1]
849 849 profileList = range(minIndex, maxIndex+1)
850 850
851 851 dataOut.data = dataOut.data[:,minIndex:maxIndex+1,:]
852 852
853 853 if rangeList != None:
854 854
855 855 profileList = []
856 856
857 857 for thisRange in rangeList:
858 858 minIndex = thisRange[0]
859 859 maxIndex = thisRange[1]
860 860
861 861 profileList.extend(range(minIndex, maxIndex+1))
862 862
863 863 dataOut.data = dataOut.data[:,profileList,:]
864 864
865 865 dataOut.nProfiles = len(profileList)
866 866 dataOut.profileIndex = dataOut.nProfiles - 1
867 867 dataOut.flagNoData = False
868 868
869 869 return True
870 870
871 871 """
872 872 data dimension = [nChannels, nHeis]
873 873 """
874 874
875 875 if profileList != None:
876 876
877 877 if self.isThisProfileInList(dataOut.profileIndex, profileList):
878 878
879 879 self.nProfiles = len(profileList)
880 880 dataOut.nProfiles = self.nProfiles
881 881 dataOut.profileIndex = self.profileIndex
882 882 dataOut.flagNoData = False
883 883
884 884 self.incProfileIndex()
885 885 return True
886 886
887 887 if profileRangeList != None:
888 888
889 889 minIndex = profileRangeList[0]
890 890 maxIndex = profileRangeList[1]
891 891
892 892 if self.isThisProfileInRange(dataOut.profileIndex, minIndex, maxIndex):
893 893
894 894 self.nProfiles = maxIndex - minIndex + 1
895 895 dataOut.nProfiles = self.nProfiles
896 896 dataOut.profileIndex = self.profileIndex
897 897 dataOut.flagNoData = False
898 898
899 899 self.incProfileIndex()
900 900 return True
901 901
902 902 if rangeList != None:
903 903
904 904 nProfiles = 0
905 905
906 906 for thisRange in rangeList:
907 907 minIndex = thisRange[0]
908 908 maxIndex = thisRange[1]
909 909
910 910 nProfiles += maxIndex - minIndex + 1
911 911
912 912 for thisRange in rangeList:
913 913
914 914 minIndex = thisRange[0]
915 915 maxIndex = thisRange[1]
916 916
917 917 if self.isThisProfileInRange(dataOut.profileIndex, minIndex, maxIndex):
918 918
919 919 self.nProfiles = nProfiles
920 920 dataOut.nProfiles = self.nProfiles
921 921 dataOut.profileIndex = self.profileIndex
922 922 dataOut.flagNoData = False
923 923
924 924 self.incProfileIndex()
925 925
926 926 break
927 927
928 928 return True
929 929
930 930
931 931 if beam != None: #beam is only for AMISR data
932 932 if self.isThisProfileInList(dataOut.profileIndex, dataOut.beamRangeDict[beam]):
933 933 dataOut.flagNoData = False
934 934 dataOut.profileIndex = self.profileIndex
935 935
936 936 self.incProfileIndex()
937 937
938 938 return True
939 939
940 940 raise ValueError, "ProfileSelector needs profileList, profileRangeList or rangeList parameter"
941 941
942 942 return False
943 943
944 944 class Reshaper(Operation):
945 945
946 946 def __init__(self, **kwargs):
947 947
948 948 Operation.__init__(self, **kwargs)
949 949
950 950 self.__buffer = None
951 951 self.__nitems = 0
952 952
953 953 def __appendProfile(self, dataOut, nTxs):
954 954
955 955 if self.__buffer is None:
956 956 shape = (dataOut.nChannels, int(dataOut.nHeights/nTxs) )
957 957 self.__buffer = numpy.empty(shape, dtype = dataOut.data.dtype)
958 958
959 959 ini = dataOut.nHeights * self.__nitems
960 960 end = ini + dataOut.nHeights
961 961
962 962 self.__buffer[:, ini:end] = dataOut.data
963 963
964 964 self.__nitems += 1
965 965
966 966 return int(self.__nitems*nTxs)
967 967
968 968 def __getBuffer(self):
969 969
970 970 if self.__nitems == int(1./self.__nTxs):
971 971
972 972 self.__nitems = 0
973 973
974 974 return self.__buffer.copy()
975 975
976 976 return None
977 977
978 978 def __checkInputs(self, dataOut, shape, nTxs):
979 979
980 980 if shape is None and nTxs is None:
981 981 raise ValueError, "Reshaper: shape of factor should be defined"
982 982
983 983 if nTxs:
984 984 if nTxs < 0:
985 985 raise ValueError, "nTxs should be greater than 0"
986 986
987 987 if nTxs < 1 and dataOut.nProfiles % (1./nTxs) != 0:
988 988 raise ValueError, "nProfiles= %d is not divisibled by (1./nTxs) = %f" %(dataOut.nProfiles, (1./nTxs))
989 989
990 990 shape = [dataOut.nChannels, dataOut.nProfiles*nTxs, dataOut.nHeights/nTxs]
991 991
992 992 return shape, nTxs
993 993
994 994 if len(shape) != 2 and len(shape) != 3:
995 995 raise ValueError, "shape dimension should be equal to 2 or 3. shape = (nProfiles, nHeis) or (nChannels, nProfiles, nHeis). Actually shape = (%d, %d, %d)" %(dataOut.nChannels, dataOut.nProfiles, dataOut.nHeights)
996 996
997 997 if len(shape) == 2:
998 998 shape_tuple = [dataOut.nChannels]
999 999 shape_tuple.extend(shape)
1000 1000 else:
1001 1001 shape_tuple = list(shape)
1002 1002
1003 1003 nTxs = 1.0*shape_tuple[1]/dataOut.nProfiles
1004 1004
1005 1005 return shape_tuple, nTxs
1006 1006
1007 1007 def run(self, dataOut, shape=None, nTxs=None):
1008 1008
1009 1009 shape_tuple, self.__nTxs = self.__checkInputs(dataOut, shape, nTxs)
1010 1010
1011 1011 dataOut.flagNoData = True
1012 1012 profileIndex = None
1013 1013
1014 1014 if dataOut.flagDataAsBlock:
1015 1015
1016 1016 dataOut.data = numpy.reshape(dataOut.data, shape_tuple)
1017 1017 dataOut.flagNoData = False
1018 1018
1019 1019 profileIndex = int(dataOut.nProfiles*self.__nTxs) - 1
1020 1020
1021 1021 else:
1022 1022
1023 1023 if self.__nTxs < 1:
1024 1024
1025 1025 self.__appendProfile(dataOut, self.__nTxs)
1026 1026 new_data = self.__getBuffer()
1027 1027
1028 1028 if new_data is not None:
1029 1029 dataOut.data = new_data
1030 1030 dataOut.flagNoData = False
1031 1031
1032 1032 profileIndex = dataOut.profileIndex*nTxs
1033 1033
1034 1034 else:
1035 1035 raise ValueError, "nTxs should be greater than 0 and lower than 1, or use VoltageReader(..., getblock=True)"
1036 1036
1037 1037 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1038 1038
1039 1039 dataOut.heightList = numpy.arange(dataOut.nHeights/self.__nTxs) * deltaHeight + dataOut.heightList[0]
1040 1040
1041 1041 dataOut.nProfiles = int(dataOut.nProfiles*self.__nTxs)
1042 1042
1043 1043 dataOut.profileIndex = profileIndex
1044 1044
1045 1045 dataOut.ippSeconds /= self.__nTxs
1046 1046
1047 1047 class SplitProfiles(Operation):
1048 1048
1049 1049 def __init__(self, **kwargs):
1050 1050
1051 1051 Operation.__init__(self, **kwargs)
1052 1052
1053 1053 def run(self, dataOut, n):
1054 1054
1055 1055 dataOut.flagNoData = True
1056 1056 profileIndex = None
1057 1057
1058 1058 if dataOut.flagDataAsBlock:
1059 1059
1060 1060 #nchannels, nprofiles, nsamples
1061 1061 shape = dataOut.data.shape
1062 1062
1063 1063 if shape[2] % n != 0:
1064 1064 raise ValueError, "Could not split the data, n=%d has to be multiple of %d" %(n, shape[2])
1065 1065
1066 1066 new_shape = shape[0], shape[1]*n, shape[2]/n
1067 1067
1068 1068 dataOut.data = numpy.reshape(dataOut.data, new_shape)
1069 1069 dataOut.flagNoData = False
1070 1070
1071 1071 profileIndex = int(dataOut.nProfiles/n) - 1
1072 1072
1073 1073 else:
1074 1074
1075 1075 raise ValueError, "Could not split the data when is read Profile by Profile. Use VoltageReader(..., getblock=True)"
1076 1076
1077 1077 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1078 1078
1079 1079 dataOut.heightList = numpy.arange(dataOut.nHeights/n) * deltaHeight + dataOut.heightList[0]
1080 1080
1081 1081 dataOut.nProfiles = int(dataOut.nProfiles*n)
1082 1082
1083 1083 dataOut.profileIndex = profileIndex
1084 1084
1085 1085 dataOut.ippSeconds /= n
1086 1086
1087 1087 class CombineProfiles(Operation):
1088 1088
1089 1089 def __init__(self, **kwargs):
1090 1090
1091 1091 Operation.__init__(self, **kwargs)
1092 1092
1093 1093 self.__remData = None
1094 1094 self.__profileIndex = 0
1095 1095
1096 1096 def run(self, dataOut, n):
1097 1097
1098 1098 dataOut.flagNoData = True
1099 1099 profileIndex = None
1100 1100
1101 1101 if dataOut.flagDataAsBlock:
1102 1102
1103 1103 #nchannels, nprofiles, nsamples
1104 1104 shape = dataOut.data.shape
1105 1105 new_shape = shape[0], shape[1]/n, shape[2]*n
1106 1106
1107 1107 if shape[1] % n != 0:
1108 1108 raise ValueError, "Could not split the data, n=%d has to be multiple of %d" %(n, shape[1])
1109 1109
1110 1110 dataOut.data = numpy.reshape(dataOut.data, new_shape)
1111 1111 dataOut.flagNoData = False
1112 1112
1113 1113 profileIndex = int(dataOut.nProfiles*n) - 1
1114 1114
1115 1115 else:
1116 1116
1117 1117 #nchannels, nsamples
1118 1118 if self.__remData is None:
1119 1119 newData = dataOut.data
1120 1120 else:
1121 1121 newData = numpy.concatenate((self.__remData, dataOut.data), axis=1)
1122 1122
1123 1123 self.__profileIndex += 1
1124 1124
1125 1125 if self.__profileIndex < n:
1126 1126 self.__remData = newData
1127 1127 #continue
1128 1128 return
1129 1129
1130 1130 self.__profileIndex = 0
1131 1131 self.__remData = None
1132 1132
1133 1133 dataOut.data = newData
1134 1134 dataOut.flagNoData = False
1135 1135
1136 1136 profileIndex = dataOut.profileIndex/n
1137 1137
1138 1138
1139 1139 deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1140 1140
1141 1141 dataOut.heightList = numpy.arange(dataOut.nHeights*n) * deltaHeight + dataOut.heightList[0]
1142 1142
1143 1143 dataOut.nProfiles = int(dataOut.nProfiles/n)
1144 1144
1145 1145 dataOut.profileIndex = profileIndex
1146 1146
1147 1147 dataOut.ippSeconds *= n
1148 1148
1149 1149 # import collections
1150 1150 # from scipy.stats import mode
1151 1151 #
1152 1152 # class Synchronize(Operation):
1153 1153 #
1154 1154 # isConfig = False
1155 1155 # __profIndex = 0
1156 1156 #
1157 1157 # def __init__(self, **kwargs):
1158 1158 #
1159 1159 # Operation.__init__(self, **kwargs)
1160 1160 # # self.isConfig = False
1161 1161 # self.__powBuffer = None
1162 1162 # self.__startIndex = 0
1163 1163 # self.__pulseFound = False
1164 1164 #
1165 1165 # def __findTxPulse(self, dataOut, channel=0, pulse_with = None):
1166 1166 #
1167 1167 # #Read data
1168 1168 #
1169 1169 # powerdB = dataOut.getPower(channel = channel)
1170 1170 # noisedB = dataOut.getNoise(channel = channel)[0]
1171 1171 #
1172 1172 # self.__powBuffer.extend(powerdB.flatten())
1173 1173 #
1174 1174 # dataArray = numpy.array(self.__powBuffer)
1175 1175 #
1176 1176 # filteredPower = numpy.correlate(dataArray, dataArray[0:self.__nSamples], "same")
1177 1177 #
1178 1178 # maxValue = numpy.nanmax(filteredPower)
1179 1179 #
1180 1180 # if maxValue < noisedB + 10:
1181 1181 # #No se encuentra ningun pulso de transmision
1182 1182 # return None
1183 1183 #
1184 1184 # maxValuesIndex = numpy.where(filteredPower > maxValue - 0.1*abs(maxValue))[0]
1185 1185 #
1186 1186 # if len(maxValuesIndex) < 2:
1187 1187 # #Solo se encontro un solo pulso de transmision de un baudio, esperando por el siguiente TX
1188 1188 # return None
1189 1189 #
1190 1190 # phasedMaxValuesIndex = maxValuesIndex - self.__nSamples
1191 1191 #
1192 1192 # #Seleccionar solo valores con un espaciamiento de nSamples
1193 1193 # pulseIndex = numpy.intersect1d(maxValuesIndex, phasedMaxValuesIndex)
1194 1194 #
1195 1195 # if len(pulseIndex) < 2:
1196 1196 # #Solo se encontro un pulso de transmision con ancho mayor a 1
1197 1197 # return None
1198 1198 #
1199 1199 # spacing = pulseIndex[1:] - pulseIndex[:-1]
1200 1200 #
1201 1201 # #remover senales que se distancien menos de 10 unidades o muestras
1202 1202 # #(No deberian existir IPP menor a 10 unidades)
1203 1203 #
1204 1204 # realIndex = numpy.where(spacing > 10 )[0]
1205 1205 #
1206 1206 # if len(realIndex) < 2:
1207 1207 # #Solo se encontro un pulso de transmision con ancho mayor a 1
1208 1208 # return None
1209 1209 #
1210 1210 # #Eliminar pulsos anchos (deja solo la diferencia entre IPPs)
1211 1211 # realPulseIndex = pulseIndex[realIndex]
1212 1212 #
1213 1213 # period = mode(realPulseIndex[1:] - realPulseIndex[:-1])[0][0]
1214 1214 #
1215 1215 # print "IPP = %d samples" %period
1216 1216 #
1217 1217 # self.__newNSamples = dataOut.nHeights #int(period)
1218 1218 # self.__startIndex = int(realPulseIndex[0])
1219 1219 #
1220 1220 # return 1
1221 1221 #
1222 1222 #
1223 1223 # def setup(self, nSamples, nChannels, buffer_size = 4):
1224 1224 #
1225 1225 # self.__powBuffer = collections.deque(numpy.zeros( buffer_size*nSamples,dtype=numpy.float),
1226 1226 # maxlen = buffer_size*nSamples)
1227 1227 #
1228 1228 # bufferList = []
1229 1229 #
1230 1230 # for i in range(nChannels):
1231 1231 # bufferByChannel = collections.deque(numpy.zeros( buffer_size*nSamples, dtype=numpy.complex) + numpy.NAN,
1232 1232 # maxlen = buffer_size*nSamples)
1233 1233 #
1234 1234 # bufferList.append(bufferByChannel)
1235 1235 #
1236 1236 # self.__nSamples = nSamples
1237 1237 # self.__nChannels = nChannels
1238 1238 # self.__bufferList = bufferList
1239 1239 #
1240 1240 # def run(self, dataOut, channel = 0):
1241 1241 #
1242 1242 # if not self.isConfig:
1243 1243 # nSamples = dataOut.nHeights
1244 1244 # nChannels = dataOut.nChannels
1245 1245 # self.setup(nSamples, nChannels)
1246 1246 # self.isConfig = True
1247 1247 #
1248 1248 # #Append new data to internal buffer
1249 1249 # for thisChannel in range(self.__nChannels):
1250 1250 # bufferByChannel = self.__bufferList[thisChannel]
1251 1251 # bufferByChannel.extend(dataOut.data[thisChannel])
1252 1252 #
1253 1253 # if self.__pulseFound:
1254 1254 # self.__startIndex -= self.__nSamples
1255 1255 #
1256 1256 # #Finding Tx Pulse
1257 1257 # if not self.__pulseFound:
1258 1258 # indexFound = self.__findTxPulse(dataOut, channel)
1259 1259 #
1260 1260 # if indexFound == None:
1261 1261 # dataOut.flagNoData = True
1262 1262 # return
1263 1263 #
1264 1264 # self.__arrayBuffer = numpy.zeros((self.__nChannels, self.__newNSamples), dtype = numpy.complex)
1265 1265 # self.__pulseFound = True
1266 1266 # self.__startIndex = indexFound
1267 1267 #
1268 1268 # #If pulse was found ...
1269 1269 # for thisChannel in range(self.__nChannels):
1270 1270 # bufferByChannel = self.__bufferList[thisChannel]
1271 1271 # #print self.__startIndex
1272 1272 # x = numpy.array(bufferByChannel)
1273 1273 # self.__arrayBuffer[thisChannel] = x[self.__startIndex:self.__startIndex+self.__newNSamples]
1274 1274 #
1275 1275 # deltaHeight = dataOut.heightList[1] - dataOut.heightList[0]
1276 1276 # dataOut.heightList = numpy.arange(self.__newNSamples)*deltaHeight
1277 1277 # # dataOut.ippSeconds = (self.__newNSamples / deltaHeight)/1e6
1278 1278 #
1279 1279 # dataOut.data = self.__arrayBuffer
1280 1280 #
1281 1281 # self.__startIndex += self.__newNSamples
1282 1282 #
1283 1283 # return
@@ -1,1 +1,1
1 <Project description="test" id="191" name="john"><ReadUnit datatype="VoltageReader" id="1911" inputId="0" name="VoltageReader"><Operation id="19111" name="run" priority="1" type="self"><Parameter format="str" id="191111" name="server" value="john" /></Operation></ReadUnit><ProcUnit datatype="SpectraProc" id="1912" inputId="1911" name="SpectraProc"><Operation id="19121" name="run" priority="1" type="self"><Parameter format="int" id="191211" name="nFFTPoints" value="64" /></Operation><Operation id="19122" name="SpectraPlot" priority="2" type="other"><Parameter format="int" id="191221" name="id" value="500" /><Parameter format="str" id="191222" name="wintitle" value="Jicamarca Radio Observatory" /><Parameter format="int" id="191223" name="showprofile" value="0" /></Operation></ProcUnit></Project> No newline at end of file
1 <Project description="test" id="191" name="john"><ReadUnit datatype="VoltageReader" id="1911" inputId="0" name="VoltageReader"><Operation id="19111" name="run" priority="1" type="self"><Parameter format="str" id="191111" name="server" value="tcp://10.10.10.121:5555" /></Operation></ReadUnit><ProcUnit datatype="VoltageProc" id="1912" inputId="1911" name="VoltageProc"><Operation id="19121" name="run" priority="1" type="self" /></ProcUnit></Project> No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now