##// END OF EJS Templates
Miguel Valdez -
r803:3f5bf501c4e1
parent child
Show More
@@ -1,73 +1,74
1 1 VERSIONS:
2 2
3 3 2.1.2:
4 4 -jroutils_ftp.py: Bug fixed, Any error sending file stopped the Server Thread
5 5 Server thread opens and closes remote server each time file list is sent
6 6 -jroplot_spectra.py: Noise path was not being created when noise data is saved.
7 7 -jroIO_base.py: startTime can be greater than endTime. Example: SpreadF [18:00 - 07:00]
8 8
9 9 2.1.3:
10 10 -jroplot_heispectra.py: SpectraHeisScope was not showing the right channels
11 11 -jroproc_voltage.py: Bug fixed selecting profiles (self.nProfiles took a wrong value),
12 12 Bug fixed selecting heights by block (selecting profiles instead heights)
13 13 -jroproc_voltage.py: New feature added: decoding data by block using FFT.
14 14 -jroIO_heispectra.py: Bug fixed in FitsReader. Using local Fits instance instead schainpy.mode.data.jrodata.Fits.
15 15 -jroIO_heispectra.py: Channel index list does not exist.
16 16
17 17 2.1.3.1:
18 18 -GUI: every icon were resized
19 19 -jroproc_voltage.py: Print a message when "Read from code" option is selected and the code is not defined inside data file
20 20
21 21 2.1.3.2:
22 22 -GUI: user interaction enhanced
23 23 -controller_api.py: Safe access to ControllerThead
24 24
25 25 2.1.3.3:
26 26 -Colored Button Icons were added to GUI
27 27
28 28 2.1.4:
29 29 -Sending error notifications to signal chain administrator
30 30 -Login to email server added
31 31
32 32 2.1.4.1:
33 33 -Send notifications when an error different to ValueError is detected
34 34
35 35 2.1.4.2:
36 36 -A new Plotter Class was added
37 37 -Project.start() does not accept filename as a parameter anymore
38 38
39 39 2.1.5:
40 40 -serializer module added to Signal Chain
41 41 -jroplotter.py added to Signal Chain
42 42
43 43 2.2.0:
44 44 -GUI: use of external plotter
45 45 -Compatible with matplotlib 1.5.0
46 46
47 47 2.2.1:
48 48 -Bugs fixed in GUI
49 49 -Views were improved in GUI
50 50 -Support to MST-ISR experiments
51 51 -Bug fixed getting noise using hyldebrant. (minimum number of points > 20%)
52 52 -handleError added to jroplotter.py
53 53
54 54 2.2.2:
55 55 -VoltageProc: ProfileSelector, Reshape, Decoder with nTxs!=1 and getblock=True was tested
56 56 -Rawdata and testRawdata.py added to Signal Chain project
57 57
58 58 2.2.3:
59 59 -Bug fixed in GUI: Error getting(reading) Code value
60 60 -Bug fixed in GUI: Flip option always needs channelList field
61 61 -Bug fixed in jrodata: when one branch modified a value in "dataOut" (example: dataOut.code) this value
62 62 was modified for every branch (because this was a reference). It was modified in data.copy()
63 63 -Bug fixed in jroproc_voltage.profileSelector(): rangeList replaces to profileRangeList.
64 64
65 65
66 66 2.2.3.1:
67 67 -Filtering block by time has been added.
68 68 -Bug fixed plotting RTI, CoherenceMap and others using xmin and xmax parameters. The first day worked
69 69 properly but the next days did not.
70 70
71 71 2.2.4:
72 72 -jroproc_spectra_lags.py added to schainpy
73 -Bug fixed in schainGUI: project created the same procUnit in some cases.
73 -Bug fixed in schainGUI: ProcUnit was created with the same id in some cases.
74 -Bug fixed in jroHeaderIO: Header size validation.
@@ -1,7 +1,7
1 1 '''
2 2 Created on Feb 7, 2012
3 3
4 4 @author $Author$
5 5 @version $Id$
6 6 '''
7 __version__ = "2.2.3.3" No newline at end of file
7 __version__ = "2.2.4" No newline at end of file
@@ -1,758 +1,762
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JROHeaderIO.py 151 2012-10-31 19:00:51Z murco $
5 5 '''
6 6 import sys
7 7 import numpy
8 8 import copy
9 9 import datetime
10 10
11 11 SPEED_OF_LIGHT = 299792458
12 12 SPEED_OF_LIGHT = 3e8
13 13
14 14 BASIC_STRUCTURE = numpy.dtype([
15 15 ('nSize','<u4'),
16 16 ('nVersion','<u2'),
17 17 ('nDataBlockId','<u4'),
18 18 ('nUtime','<u4'),
19 19 ('nMilsec','<u2'),
20 20 ('nTimezone','<i2'),
21 21 ('nDstflag','<i2'),
22 22 ('nErrorCount','<u4')
23 23 ])
24 24
25 25 SYSTEM_STRUCTURE = numpy.dtype([
26 26 ('nSize','<u4'),
27 27 ('nNumSamples','<u4'),
28 28 ('nNumProfiles','<u4'),
29 29 ('nNumChannels','<u4'),
30 30 ('nADCResolution','<u4'),
31 31 ('nPCDIOBusWidth','<u4'),
32 32 ])
33 33
34 34 RADAR_STRUCTURE = numpy.dtype([
35 35 ('nSize','<u4'),
36 36 ('nExpType','<u4'),
37 37 ('nNTx','<u4'),
38 38 ('fIpp','<f4'),
39 39 ('fTxA','<f4'),
40 40 ('fTxB','<f4'),
41 41 ('nNumWindows','<u4'),
42 42 ('nNumTaus','<u4'),
43 43 ('nCodeType','<u4'),
44 44 ('nLine6Function','<u4'),
45 45 ('nLine5Function','<u4'),
46 46 ('fClock','<f4'),
47 47 ('nPrePulseBefore','<u4'),
48 48 ('nPrePulseAfter','<u4'),
49 49 ('sRangeIPP','<a20'),
50 50 ('sRangeTxA','<a20'),
51 51 ('sRangeTxB','<a20'),
52 52 ])
53 53
54 54 SAMPLING_STRUCTURE = numpy.dtype([('h0','<f4'),('dh','<f4'),('nsa','<u4')])
55 55
56 56
57 57 PROCESSING_STRUCTURE = numpy.dtype([
58 58 ('nSize','<u4'),
59 59 ('nDataType','<u4'),
60 60 ('nSizeOfDataBlock','<u4'),
61 61 ('nProfilesperBlock','<u4'),
62 62 ('nDataBlocksperFile','<u4'),
63 63 ('nNumWindows','<u4'),
64 64 ('nProcessFlags','<u4'),
65 65 ('nCoherentIntegrations','<u4'),
66 66 ('nIncoherentIntegrations','<u4'),
67 67 ('nTotalSpectra','<u4')
68 68 ])
69 69
70 70 class Header(object):
71 71
72 72 def __init__(self):
73 73 raise NotImplementedError
74 74
75 75 def copy(self):
76 76 return copy.deepcopy(self)
77 77
78 78 def read(self):
79 79
80 80 raise NotImplementedError
81 81
82 82 def write(self):
83 83
84 84 raise NotImplementedError
85 85
86 86 def printInfo(self):
87 87
88 88 message = "#"*50 + "\n"
89 89 message += self.__class__.__name__.upper() + "\n"
90 90 message += "#"*50 + "\n"
91 91
92 92 keyList = self.__dict__.keys()
93 93 keyList.sort()
94 94
95 95 for key in keyList:
96 96 message += "%s = %s" %(key, self.__dict__[key]) + "\n"
97 97
98 98 if "size" not in keyList:
99 99 attr = getattr(self, "size")
100 100
101 101 if attr:
102 102 message += "%s = %s" %("size", attr) + "\n"
103 103
104 104 print message
105 105
106 106 class BasicHeader(Header):
107 107
108 108 size = None
109 109 version = None
110 110 dataBlock = None
111 111 utc = None
112 112 ltc = None
113 113 miliSecond = None
114 114 timeZone = None
115 115 dstFlag = None
116 116 errorCount = None
117 117 datatime = None
118 118
119 119 __LOCALTIME = None
120 120
121 121 def __init__(self, useLocalTime=True):
122 122
123 123 self.size = 24
124 124 self.version = 0
125 125 self.dataBlock = 0
126 126 self.utc = 0
127 127 self.miliSecond = 0
128 128 self.timeZone = 0
129 129 self.dstFlag = 0
130 130 self.errorCount = 0
131 131
132 132 self.useLocalTime = useLocalTime
133 133
134 134 def read(self, fp):
135 135
136 136 try:
137 137 header = numpy.fromfile(fp, BASIC_STRUCTURE,1)
138 138
139 139 except Exception, e:
140 140 print "BasicHeader: "
141 141 print e
142 142 return 0
143 143
144 144 self.size = int(header['nSize'][0])
145 145 self.version = int(header['nVersion'][0])
146 146 self.dataBlock = int(header['nDataBlockId'][0])
147 147 self.utc = int(header['nUtime'][0])
148 148 self.miliSecond = int(header['nMilsec'][0])
149 149 self.timeZone = int(header['nTimezone'][0])
150 150 self.dstFlag = int(header['nDstflag'][0])
151 151 self.errorCount = int(header['nErrorCount'][0])
152
153 if self.size < 24:
154 return 0
152 155
153 156 return 1
154 157
155 158 def write(self, fp):
156 159
157 160 headerTuple = (self.size,self.version,self.dataBlock,self.utc,self.miliSecond,self.timeZone,self.dstFlag,self.errorCount)
158 161 header = numpy.array(headerTuple, BASIC_STRUCTURE)
159 162 header.tofile(fp)
160 163
161 164 return 1
162 165
163 166 def get_ltc(self):
164 167
165 168 return self.utc - self.timeZone*60
166 169
167 170 def set_ltc(self, value):
168 171
169 172 self.utc = value + self.timeZone*60
170 173
171 174 def get_datatime(self):
172 175
173 176 return datetime.datetime.utcfromtimestamp(self.ltc)
174 177
175 178 ltc = property(get_ltc, set_ltc)
176 179 datatime = property(get_datatime)
177 180
178 181 class SystemHeader(Header):
179 182
180 183 size = None
181 184 nSamples = None
182 185 nProfiles = None
183 186 nChannels = None
184 187 adcResolution = None
185 188 pciDioBusWidth = None
186 189
187 190 def __init__(self, nSamples=0, nProfiles=0, nChannels=0, adcResolution=14, pciDioBusWith=0):
188 191
189 192 self.size = 24
190 193 self.nSamples = nSamples
191 194 self.nProfiles = nProfiles
192 195 self.nChannels = nChannels
193 196 self.adcResolution = adcResolution
194 197 self.pciDioBusWidth = pciDioBusWith
195 198
196 199 def read(self, fp):
197 200
198 201 startFp = fp.tell()
199 202
200 203 try:
201 204 header = numpy.fromfile(fp,SYSTEM_STRUCTURE,1)
202 205 except Exception, e:
203 206 print "System Header: " + e
204 207 return 0
205 208
206 209 self.size = header['nSize'][0]
207 210 self.nSamples = header['nNumSamples'][0]
208 211 self.nProfiles = header['nNumProfiles'][0]
209 212 self.nChannels = header['nNumChannels'][0]
210 213 self.adcResolution = header['nADCResolution'][0]
211 214 self.pciDioBusWidth = header['nPCDIOBusWidth'][0]
212 215
213 216 endFp = self.size + startFp
214 217
215 218 if fp.tell() > endFp:
216 sys.stderr.write("Warning: System header size is lower than it has to be")
219 sys.stderr.write("Warning %s: Size value read from System Header is lower than it has to be\n" %fp.name)
217 220 return 0
218 221
219 222 if fp.tell() < endFp:
220 sys.stderr.write("Warning: System header size is greater than it is considered")
221
223 sys.stderr.write("Warning %s: Size value read from System Header size is greater than it has to be\n" %fp.name)
224 return 0
225
222 226 return 1
223 227
224 228 def write(self, fp):
225 229
226 230 headerTuple = (self.size,self.nSamples,self.nProfiles,self.nChannels,self.adcResolution,self.pciDioBusWidth)
227 231 header = numpy.array(headerTuple,SYSTEM_STRUCTURE)
228 232 header.tofile(fp)
229 233
230 234 return 1
231 235
232 236 class RadarControllerHeader(Header):
233 237
234 238 expType = None
235 239 nTx = None
236 240 ipp = None
237 241 txA = None
238 242 txB = None
239 243 nWindows = None
240 244 numTaus = None
241 245 codeType = None
242 246 line6Function = None
243 247 line5Function = None
244 248 fClock = None
245 249 prePulseBefore = None
246 250 prePulserAfter = None
247 251 rangeIpp = None
248 252 rangeTxA = None
249 253 rangeTxB = None
250 254
251 255 __size = None
252 256
253 257 def __init__(self, expType=2, nTx=1,
254 258 ippKm=None, txA=0, txB=0,
255 259 nWindows=None, nHeights=None, firstHeight=None, deltaHeight=None,
256 260 numTaus=0, line6Function=0, line5Function=0, fClock=None,
257 261 prePulseBefore=0, prePulseAfter=0,
258 262 codeType=0, nCode=0, nBaud=0, code=None,
259 263 flip1=0, flip2=0):
260 264
261 265 # self.size = 116
262 266 self.expType = expType
263 267 self.nTx = nTx
264 268 self.ipp = ippKm
265 269 self.txA = txA
266 270 self.txB = txB
267 271 self.rangeIpp = ippKm
268 272 self.rangeTxA = txA
269 273 self.rangeTxB = txB
270 274
271 275 self.nWindows = nWindows
272 276 self.numTaus = numTaus
273 277 self.codeType = codeType
274 278 self.line6Function = line6Function
275 279 self.line5Function = line5Function
276 280 self.fClock = fClock
277 281 self.prePulseBefore = prePulseBefore
278 282 self.prePulserAfter = prePulseAfter
279 283
280 284 self.nHeights = nHeights
281 285 self.firstHeight = firstHeight
282 286 self.deltaHeight = deltaHeight
283 287 self.samplesWin = nHeights
284 288
285 289 self.nCode = nCode
286 290 self.nBaud = nBaud
287 291 self.code = code
288 292 self.flip1 = flip1
289 293 self.flip2 = flip2
290 294
291 295 self.code_size = int(numpy.ceil(self.nBaud/32.))*self.nCode*4
292 296 # self.dynamic = numpy.array([],numpy.dtype('byte'))
293 297
294 298 if self.fClock is None and self.deltaHeight is not None:
295 299 self.fClock = 0.15/(deltaHeight*1e-6) #0.15Km / (height * 1u)
296 300
297 301 def read(self, fp):
298 302
299 303
300 304 startFp = fp.tell()
301 305 try:
302 306 header = numpy.fromfile(fp,RADAR_STRUCTURE,1)
303 307 except Exception, e:
304 308 print "RadarControllerHeader: " + e
305 309 return 0
306 310
307 311 size = int(header['nSize'][0])
308 312 self.expType = int(header['nExpType'][0])
309 313 self.nTx = int(header['nNTx'][0])
310 314 self.ipp = float(header['fIpp'][0])
311 315 self.txA = float(header['fTxA'][0])
312 316 self.txB = float(header['fTxB'][0])
313 317 self.nWindows = int(header['nNumWindows'][0])
314 318 self.numTaus = int(header['nNumTaus'][0])
315 319 self.codeType = int(header['nCodeType'][0])
316 320 self.line6Function = int(header['nLine6Function'][0])
317 321 self.line5Function = int(header['nLine5Function'][0])
318 322 self.fClock = float(header['fClock'][0])
319 323 self.prePulseBefore = int(header['nPrePulseBefore'][0])
320 324 self.prePulserAfter = int(header['nPrePulseAfter'][0])
321 325 self.rangeIpp = header['sRangeIPP'][0]
322 326 self.rangeTxA = header['sRangeTxA'][0]
323 327 self.rangeTxB = header['sRangeTxB'][0]
324 328
325 329 samplingWindow = numpy.fromfile(fp,SAMPLING_STRUCTURE,self.nWindows)
326 330
327 331 self.nHeights = int(numpy.sum(samplingWindow['nsa']))
328 332 self.firstHeight = samplingWindow['h0']
329 333 self.deltaHeight = samplingWindow['dh']
330 334 self.samplesWin = samplingWindow['nsa']
331 335
332 336 self.Taus = numpy.fromfile(fp,'<f4',self.numTaus)
333 337
334 338 self.code_size = 0
335 339 if self.codeType != 0:
336 340 self.nCode = int(numpy.fromfile(fp,'<u4',1))
337 341 self.nBaud = int(numpy.fromfile(fp,'<u4',1))
338 342
339 343 code = numpy.empty([self.nCode,self.nBaud],dtype='i1')
340 344 for ic in range(self.nCode):
341 345 temp = numpy.fromfile(fp,'u4',int(numpy.ceil(self.nBaud/32.)))
342 346 for ib in range(self.nBaud-1,-1,-1):
343 347 code[ic,ib] = temp[ib/32]%2
344 348 temp[ib/32] = temp[ib/32]/2
345 349
346 350 self.code = 2.0*code - 1.0
347 351 self.code_size = int(numpy.ceil(self.nBaud/32.))*self.nCode*4
348 352
349 353 # if self.line5Function == RCfunction.FLIP:
350 354 # self.flip1 = numpy.fromfile(fp,'<u4',1)
351 355 #
352 356 # if self.line6Function == RCfunction.FLIP:
353 357 # self.flip2 = numpy.fromfile(fp,'<u4',1)
354 358
355 359 endFp = size + startFp
356 360
357 361 if fp.tell() != endFp:
358 362 # fp.seek(endFp)
359 print "Radar Controller Header is not consistent read[%d] != header[%d]" %(fp.tell()-startFp,endFp)
363 print "%s: Radar Controller Header size is not consistent: from data [%d] != from header field [%d]" %(fp.name, fp.tell()-startFp, size)
360 364 # return 0
361 365
362 366 if fp.tell() > endFp:
363 sys.stderr.write("Warning: Radar Controller header size is lower than it has to be")
367 sys.stderr.write("Warning %s: Size value read from Radar Controller header is lower than it has to be\n" %fp.name)
364 368 # return 0
365 369
366 370 if fp.tell() < endFp:
367 sys.stderr.write("Warning: Radar Controller header size is greater than it is considered")
371 sys.stderr.write("Warning %s: Size value read from Radar Controller header is greater than it has to be\n" %fp.name)
368 372
369 373
370 374 return 1
371 375
372 376 def write(self, fp):
373 377
374 378 headerTuple = (self.size,
375 379 self.expType,
376 380 self.nTx,
377 381 self.ipp,
378 382 self.txA,
379 383 self.txB,
380 384 self.nWindows,
381 385 self.numTaus,
382 386 self.codeType,
383 387 self.line6Function,
384 388 self.line5Function,
385 389 self.fClock,
386 390 self.prePulseBefore,
387 391 self.prePulserAfter,
388 392 self.rangeIpp,
389 393 self.rangeTxA,
390 394 self.rangeTxB)
391 395
392 396 header = numpy.array(headerTuple,RADAR_STRUCTURE)
393 397 header.tofile(fp)
394 398
395 399 sampleWindowTuple = (self.firstHeight,self.deltaHeight,self.samplesWin)
396 400 samplingWindow = numpy.array(sampleWindowTuple,SAMPLING_STRUCTURE)
397 401 samplingWindow.tofile(fp)
398 402
399 403 if self.numTaus > 0:
400 404 self.Taus.tofile(fp)
401 405
402 406 if self.codeType !=0:
403 407 nCode = numpy.array(self.nCode, '<u4')
404 408 nCode.tofile(fp)
405 409 nBaud = numpy.array(self.nBaud, '<u4')
406 410 nBaud.tofile(fp)
407 411 code1 = (self.code + 1.0)/2.
408 412
409 413 for ic in range(self.nCode):
410 414 tempx = numpy.zeros(numpy.ceil(self.nBaud/32.))
411 415 start = 0
412 416 end = 32
413 417 for i in range(len(tempx)):
414 418 code_selected = code1[ic,start:end]
415 419 for j in range(len(code_selected)-1,-1,-1):
416 420 if code_selected[j] == 1:
417 421 tempx[i] = tempx[i] + 2**(len(code_selected)-1-j)
418 422 start = start + 32
419 423 end = end + 32
420 424
421 425 tempx = tempx.astype('u4')
422 426 tempx.tofile(fp)
423 427
424 428 # if self.line5Function == RCfunction.FLIP:
425 429 # self.flip1.tofile(fp)
426 430 #
427 431 # if self.line6Function == RCfunction.FLIP:
428 432 # self.flip2.tofile(fp)
429 433
430 434 return 1
431 435
432 436 def get_ippSeconds(self):
433 437 '''
434 438 '''
435 439 ippSeconds = 2.0 * 1000 * self.ipp / SPEED_OF_LIGHT
436 440
437 441 return ippSeconds
438 442
439 443 def set_ippSeconds(self, ippSeconds):
440 444 '''
441 445 '''
442 446
443 447 self.ipp = ippSeconds * SPEED_OF_LIGHT / (2.0*1000)
444 448
445 449 return
446 450
447 451 def get_size(self):
448 452
449 453 self.__size = 116 + 12*self.nWindows + 4*self.numTaus
450 454
451 455 if self.codeType != 0:
452 456 self.__size += 4 + 4 + 4*self.nCode*numpy.ceil(self.nBaud/32.)
453 457
454 458 return self.__size
455 459
456 460 def set_size(self, value):
457 461
458 462 raise IOError, "size is a property and it cannot be set, just read"
459 463
460 464 return
461 465
462 466 ippSeconds = property(get_ippSeconds, set_ippSeconds)
463 467 size = property(get_size, set_size)
464 468
465 469 class ProcessingHeader(Header):
466 470
467 471 # size = None
468 472 dtype = None
469 473 blockSize = None
470 474 profilesPerBlock = None
471 475 dataBlocksPerFile = None
472 476 nWindows = None
473 477 processFlags = None
474 478 nCohInt = None
475 479 nIncohInt = None
476 480 totalSpectra = None
477 481
478 482 flag_dc = None
479 483 flag_cspc = None
480 484
481 485 def __init__(self):
482 486
483 487 # self.size = 0
484 488 self.dtype = 0
485 489 self.blockSize = 0
486 490 self.profilesPerBlock = 0
487 491 self.dataBlocksPerFile = 0
488 492 self.nWindows = 0
489 493 self.processFlags = 0
490 494 self.nCohInt = 0
491 495 self.nIncohInt = 0
492 496 self.totalSpectra = 0
493 497
494 498 self.nHeights = 0
495 499 self.firstHeight = 0
496 500 self.deltaHeight = 0
497 501 self.samplesWin = 0
498 502 self.spectraComb = 0
499 503 self.nCode = None
500 504 self.code = None
501 505 self.nBaud = None
502 506
503 507 self.shif_fft = False
504 508 self.flag_dc = False
505 509 self.flag_cspc = False
506 510 self.flag_decode = False
507 511 self.flag_deflip = False
508 512
509 513 def read(self, fp):
510 514
511 515 startFp = fp.tell()
512 516
513 517 try:
514 518 header = numpy.fromfile(fp,PROCESSING_STRUCTURE,1)
515 519 except Exception, e:
516 520 print "ProcessingHeader: " + e
517 521 return 0
518 522
519 523 size = int(header['nSize'][0])
520 524 self.dtype = int(header['nDataType'][0])
521 525 self.blockSize = int(header['nSizeOfDataBlock'][0])
522 526 self.profilesPerBlock = int(header['nProfilesperBlock'][0])
523 527 self.dataBlocksPerFile = int(header['nDataBlocksperFile'][0])
524 528 self.nWindows = int(header['nNumWindows'][0])
525 529 self.processFlags = header['nProcessFlags']
526 530 self.nCohInt = int(header['nCoherentIntegrations'][0])
527 531 self.nIncohInt = int(header['nIncoherentIntegrations'][0])
528 532 self.totalSpectra = int(header['nTotalSpectra'][0])
529 533
530 534 samplingWindow = numpy.fromfile(fp,SAMPLING_STRUCTURE,self.nWindows)
531 535
532 536 self.nHeights = int(numpy.sum(samplingWindow['nsa']))
533 537 self.firstHeight = float(samplingWindow['h0'][0])
534 538 self.deltaHeight = float(samplingWindow['dh'][0])
535 539 self.samplesWin = samplingWindow['nsa'][0]
536 540
537 541 self.spectraComb = numpy.fromfile(fp,'u1',2*self.totalSpectra)
538 542
539 543 if ((self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE) == PROCFLAG.DEFINE_PROCESS_CODE):
540 544 self.nCode = int(numpy.fromfile(fp,'<u4',1))
541 545 self.nBaud = int(numpy.fromfile(fp,'<u4',1))
542 546 self.code = numpy.fromfile(fp,'<f4',self.nCode*self.nBaud).reshape(self.nCode,self.nBaud)
543 547
544 548 if ((self.processFlags & PROCFLAG.EXP_NAME_ESP) == PROCFLAG.EXP_NAME_ESP):
545 549 exp_name_len = int(numpy.fromfile(fp,'<u4',1))
546 550 exp_name = numpy.fromfile(fp,'u1',exp_name_len+1)
547 551
548 552 if ((self.processFlags & PROCFLAG.SHIFT_FFT_DATA) == PROCFLAG.SHIFT_FFT_DATA):
549 553 self.shif_fft = True
550 554 else:
551 555 self.shif_fft = False
552 556
553 557 if ((self.processFlags & PROCFLAG.SAVE_CHANNELS_DC) == PROCFLAG.SAVE_CHANNELS_DC):
554 558 self.flag_dc = True
555 559 else:
556 560 self.flag_dc = False
557 561
558 562 if ((self.processFlags & PROCFLAG.DECODE_DATA) == PROCFLAG.DECODE_DATA):
559 563 self.flag_decode = True
560 564 else:
561 565 self.flag_decode = False
562 566
563 567 if ((self.processFlags & PROCFLAG.DEFLIP_DATA) == PROCFLAG.DEFLIP_DATA):
564 568 self.flag_deflip = True
565 569 else:
566 570 self.flag_deflip = False
567 571
568 572 nChannels = 0
569 573 nPairs = 0
570 574 pairList = []
571 575
572 576 for i in range( 0, self.totalSpectra*2, 2 ):
573 577 if self.spectraComb[i] == self.spectraComb[i+1]:
574 578 nChannels = nChannels + 1 #par de canales iguales
575 579 else:
576 580 nPairs = nPairs + 1 #par de canales diferentes
577 581 pairList.append( (self.spectraComb[i], self.spectraComb[i+1]) )
578 582
579 583 self.flag_cspc = False
580 584 if nPairs > 0:
581 585 self.flag_cspc = True
582 586
583 587 endFp = size + startFp
584 588
585 589 if fp.tell() > endFp:
586 590 sys.stderr.write("Warning: Processing header size is lower than it has to be")
587 591 return 0
588 592
589 593 if fp.tell() < endFp:
590 594 sys.stderr.write("Warning: Processing header size is greater than it is considered")
591 595
592 596 return 1
593 597
594 598 def write(self, fp):
595 599 #Clear DEFINE_PROCESS_CODE
596 600 self.processFlags = self.processFlags & (~PROCFLAG.DEFINE_PROCESS_CODE)
597 601
598 602 headerTuple = (self.size,
599 603 self.dtype,
600 604 self.blockSize,
601 605 self.profilesPerBlock,
602 606 self.dataBlocksPerFile,
603 607 self.nWindows,
604 608 self.processFlags,
605 609 self.nCohInt,
606 610 self.nIncohInt,
607 611 self.totalSpectra)
608 612
609 613 header = numpy.array(headerTuple,PROCESSING_STRUCTURE)
610 614 header.tofile(fp)
611 615
612 616 if self.nWindows != 0:
613 617 sampleWindowTuple = (self.firstHeight,self.deltaHeight,self.samplesWin)
614 618 samplingWindow = numpy.array(sampleWindowTuple,SAMPLING_STRUCTURE)
615 619 samplingWindow.tofile(fp)
616 620
617 621 if self.totalSpectra != 0:
618 622 # spectraComb = numpy.array([],numpy.dtype('u1'))
619 623 spectraComb = self.spectraComb
620 624 spectraComb.tofile(fp)
621 625
622 626 # if self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE == PROCFLAG.DEFINE_PROCESS_CODE:
623 627 # nCode = numpy.array([self.nCode], numpy.dtype('u4')) #Probar con un dato que almacene codigo, hasta el momento no se hizo la prueba
624 628 # nCode.tofile(fp)
625 629 #
626 630 # nBaud = numpy.array([self.nBaud], numpy.dtype('u4'))
627 631 # nBaud.tofile(fp)
628 632 #
629 633 # code = self.code.reshape(self.nCode*self.nBaud)
630 634 # code = code.astype(numpy.dtype('<f4'))
631 635 # code.tofile(fp)
632 636
633 637 return 1
634 638
635 639 def get_size(self):
636 640
637 641 self.__size = 40 + 12*self.nWindows + 2*self.totalSpectra
638 642
639 643 # if self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE == PROCFLAG.DEFINE_PROCESS_CODE:
640 644 # self.__size += 4 + 4 + 4*self.nCode*numpy.ceil(self.nBaud/32.)
641 645 # self.__size += 4 + 4 + 4 * self.nCode * self.nBaud
642 646
643 647 return self.__size
644 648
645 649 def set_size(self, value):
646 650
647 651 raise IOError, "size is a property and it cannot be set, just read"
648 652
649 653 return
650 654
651 655 size = property(get_size, set_size)
652 656
653 657 class RCfunction:
654 658 NONE=0
655 659 FLIP=1
656 660 CODE=2
657 661 SAMPLING=3
658 662 LIN6DIV256=4
659 663 SYNCHRO=5
660 664
661 665 class nCodeType:
662 666 NONE=0
663 667 USERDEFINE=1
664 668 BARKER2=2
665 669 BARKER3=3
666 670 BARKER4=4
667 671 BARKER5=5
668 672 BARKER7=6
669 673 BARKER11=7
670 674 BARKER13=8
671 675 AC128=9
672 676 COMPLEMENTARYCODE2=10
673 677 COMPLEMENTARYCODE4=11
674 678 COMPLEMENTARYCODE8=12
675 679 COMPLEMENTARYCODE16=13
676 680 COMPLEMENTARYCODE32=14
677 681 COMPLEMENTARYCODE64=15
678 682 COMPLEMENTARYCODE128=16
679 683 CODE_BINARY28=17
680 684
681 685 class PROCFLAG:
682 686
683 687 COHERENT_INTEGRATION = numpy.uint32(0x00000001)
684 688 DECODE_DATA = numpy.uint32(0x00000002)
685 689 SPECTRA_CALC = numpy.uint32(0x00000004)
686 690 INCOHERENT_INTEGRATION = numpy.uint32(0x00000008)
687 691 POST_COHERENT_INTEGRATION = numpy.uint32(0x00000010)
688 692 SHIFT_FFT_DATA = numpy.uint32(0x00000020)
689 693
690 694 DATATYPE_CHAR = numpy.uint32(0x00000040)
691 695 DATATYPE_SHORT = numpy.uint32(0x00000080)
692 696 DATATYPE_LONG = numpy.uint32(0x00000100)
693 697 DATATYPE_INT64 = numpy.uint32(0x00000200)
694 698 DATATYPE_FLOAT = numpy.uint32(0x00000400)
695 699 DATATYPE_DOUBLE = numpy.uint32(0x00000800)
696 700
697 701 DATAARRANGE_CONTIGUOUS_CH = numpy.uint32(0x00001000)
698 702 DATAARRANGE_CONTIGUOUS_H = numpy.uint32(0x00002000)
699 703 DATAARRANGE_CONTIGUOUS_P = numpy.uint32(0x00004000)
700 704
701 705 SAVE_CHANNELS_DC = numpy.uint32(0x00008000)
702 706 DEFLIP_DATA = numpy.uint32(0x00010000)
703 707 DEFINE_PROCESS_CODE = numpy.uint32(0x00020000)
704 708
705 709 ACQ_SYS_NATALIA = numpy.uint32(0x00040000)
706 710 ACQ_SYS_ECHOTEK = numpy.uint32(0x00080000)
707 711 ACQ_SYS_ADRXD = numpy.uint32(0x000C0000)
708 712 ACQ_SYS_JULIA = numpy.uint32(0x00100000)
709 713 ACQ_SYS_XXXXXX = numpy.uint32(0x00140000)
710 714
711 715 EXP_NAME_ESP = numpy.uint32(0x00200000)
712 716 CHANNEL_NAMES_ESP = numpy.uint32(0x00400000)
713 717
714 718 OPERATION_MASK = numpy.uint32(0x0000003F)
715 719 DATATYPE_MASK = numpy.uint32(0x00000FC0)
716 720 DATAARRANGE_MASK = numpy.uint32(0x00007000)
717 721 ACQ_SYS_MASK = numpy.uint32(0x001C0000)
718 722
719 723 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
720 724 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
721 725 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
722 726 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
723 727 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
724 728 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
725 729
726 730 NUMPY_DTYPE_LIST = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
727 731
728 732 PROCFLAG_DTYPE_LIST = [PROCFLAG.DATATYPE_CHAR,
729 733 PROCFLAG.DATATYPE_SHORT,
730 734 PROCFLAG.DATATYPE_LONG,
731 735 PROCFLAG.DATATYPE_INT64,
732 736 PROCFLAG.DATATYPE_FLOAT,
733 737 PROCFLAG.DATATYPE_DOUBLE]
734 738
735 739 DTYPE_WIDTH = [1, 2, 4, 8, 4, 8]
736 740
737 741 def get_dtype_index(numpy_dtype):
738 742
739 743 index = None
740 744
741 745 for i in range(len(NUMPY_DTYPE_LIST)):
742 746 if numpy_dtype == NUMPY_DTYPE_LIST[i]:
743 747 index = i
744 748 break
745 749
746 750 return index
747 751
748 752 def get_numpy_dtype(index):
749 753
750 754 return NUMPY_DTYPE_LIST[index]
751 755
752 756 def get_procflag_dtype(index):
753 757
754 758 return PROCFLAG_DTYPE_LIST[index]
755 759
756 760 def get_dtype_width(index):
757 761
758 762 return DTYPE_WIDTH[index] No newline at end of file
@@ -1,1704 +1,1709
1 1 '''
2 2 Created on Jul 2, 2014
3 3
4 4 @author: roj-idl71
5 5 '''
6 6 import os
7 7 import sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import time, datetime
13 13 #import h5py
14 14 import traceback
15 15
16 16 try:
17 17 from gevent import sleep
18 18 except:
19 19 from time import sleep
20 20
21 21 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader
22 22 from schainpy.model.data.jroheaderIO import get_dtype_index, get_numpy_dtype, get_procflag_dtype, get_dtype_width
23 23
24 24 LOCALTIME = True
25 25
26 26 def isNumber(cad):
27 27 """
28 28 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
29 29
30 30 Excepciones:
31 31 Si un determinado string no puede ser convertido a numero
32 32 Input:
33 33 str, string al cual se le analiza para determinar si convertible a un numero o no
34 34
35 35 Return:
36 36 True : si el string es uno numerico
37 37 False : no es un string numerico
38 38 """
39 39 try:
40 40 float( cad )
41 41 return True
42 42 except:
43 43 return False
44 44
45 45 def isFileInEpoch(filename, startUTSeconds, endUTSeconds):
46 46 """
47 47 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
48 48
49 49 Inputs:
50 50 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
51 51
52 52 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
53 53 segundos contados desde 01/01/1970.
54 54 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
55 55 segundos contados desde 01/01/1970.
56 56
57 57 Return:
58 58 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
59 59 fecha especificado, de lo contrario retorna False.
60 60
61 61 Excepciones:
62 62 Si el archivo no existe o no puede ser abierto
63 63 Si la cabecera no puede ser leida.
64 64
65 65 """
66 66 basicHeaderObj = BasicHeader(LOCALTIME)
67 67
68 68 try:
69 69 fp = open(filename,'rb')
70 70 except IOError:
71 71 print "The file %s can't be opened" %(filename)
72 72 return 0
73 73
74 74 sts = basicHeaderObj.read(fp)
75 75 fp.close()
76 76
77 77 if not(sts):
78 78 print "Skipping the file %s because it has not a valid header" %(filename)
79 79 return 0
80 80
81 81 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
82 82 return 0
83 83
84 84 return 1
85 85
86 86 def isTimeInRange(thisTime, startTime, endTime):
87 87
88 88 if endTime >= startTime:
89 89 if (thisTime < startTime) or (thisTime > endTime):
90 90 return 0
91 91
92 92 return 1
93 93 else:
94 94 if (thisTime < startTime) and (thisTime > endTime):
95 95 return 0
96 96
97 97 return 1
98 98
99 99 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
100 100 """
101 101 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
102 102
103 103 Inputs:
104 104 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
105 105
106 106 startDate : fecha inicial del rango seleccionado en formato datetime.date
107 107
108 108 endDate : fecha final del rango seleccionado en formato datetime.date
109 109
110 110 startTime : tiempo inicial del rango seleccionado en formato datetime.time
111 111
112 112 endTime : tiempo final del rango seleccionado en formato datetime.time
113 113
114 114 Return:
115 115 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
116 116 fecha especificado, de lo contrario retorna False.
117 117
118 118 Excepciones:
119 119 Si el archivo no existe o no puede ser abierto
120 120 Si la cabecera no puede ser leida.
121 121
122 122 """
123 123
124 124
125 125 try:
126 126 fp = open(filename,'rb')
127 127 except IOError:
128 128 print "The file %s can't be opened" %(filename)
129 129 return None
130 130
131 131 firstBasicHeaderObj = BasicHeader(LOCALTIME)
132 132 systemHeaderObj = SystemHeader()
133 133 radarControllerHeaderObj = RadarControllerHeader()
134 134 processingHeaderObj = ProcessingHeader()
135 135
136 136 lastBasicHeaderObj = BasicHeader(LOCALTIME)
137 137
138 138 sts = firstBasicHeaderObj.read(fp)
139 139
140 140 if not(sts):
141 141 print "[Reading] Skipping the file %s because it has not a valid header" %(filename)
142 142 return None
143 143
144 sts = systemHeaderObj.read(fp)
145 sts = radarControllerHeaderObj.read(fp)
146 sts = processingHeaderObj.read(fp)
144 if not systemHeaderObj.read(fp):
145 return None
146
147 if not radarControllerHeaderObj.read(fp):
148 return None
149
150 if not processingHeaderObj.read(fp):
151 return None
147 152
148 153 filesize = os.path.getsize(filename)
149 154
150 155 offset = processingHeaderObj.blockSize + 24 #header size
151 156
152 157 if filesize <= offset:
153 158 print "[Reading] %s: This file has not enough data" %filename
154 159 return None
155 160
156 161 fp.seek(-offset, 2)
157 162
158 163 sts = lastBasicHeaderObj.read(fp)
159 164
160 165 fp.close()
161 166
162 167 thisDatetime = lastBasicHeaderObj.datatime
163 168 thisTime_last_block = thisDatetime.time()
164 169
165 170 thisDatetime = firstBasicHeaderObj.datatime
166 171 thisDate = thisDatetime.date()
167 172 thisTime_first_block = thisDatetime.time()
168 173
169 174 #General case
170 175 # o>>>>>>>>>>>>>><<<<<<<<<<<<<<o
171 176 #-----------o----------------------------o-----------
172 177 # startTime endTime
173 178
174 179 if endTime >= startTime:
175 180 if (thisTime_last_block < startTime) or (thisTime_first_block > endTime):
176 181 return None
177 182
178 183 return thisDatetime
179 184
180 185 #If endTime < startTime then endTime belongs to the next day
181 186
182 187
183 188 #<<<<<<<<<<<o o>>>>>>>>>>>
184 189 #-----------o----------------------------o-----------
185 190 # endTime startTime
186 191
187 192 if (thisDate == startDate) and (thisTime_last_block < startTime):
188 193 return None
189 194
190 195 if (thisDate == endDate) and (thisTime_first_block > endTime):
191 196 return None
192 197
193 198 if (thisTime_last_block < startTime) and (thisTime_first_block > endTime):
194 199 return None
195 200
196 201 return thisDatetime
197 202
198 203 def isFolderInDateRange(folder, startDate=None, endDate=None):
199 204 """
200 205 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
201 206
202 207 Inputs:
203 208 folder : nombre completo del directorio.
204 209 Su formato deberia ser "/path_root/?YYYYDDD"
205 210
206 211 siendo:
207 212 YYYY : Anio (ejemplo 2015)
208 213 DDD : Dia del anio (ejemplo 305)
209 214
210 215 startDate : fecha inicial del rango seleccionado en formato datetime.date
211 216
212 217 endDate : fecha final del rango seleccionado en formato datetime.date
213 218
214 219 Return:
215 220 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
216 221 fecha especificado, de lo contrario retorna False.
217 222 Excepciones:
218 223 Si el directorio no tiene el formato adecuado
219 224 """
220 225
221 226 basename = os.path.basename(folder)
222 227
223 228 if not isRadarFolder(basename):
224 229 print "The folder %s has not the rigth format" %folder
225 230 return 0
226 231
227 232 if startDate and endDate:
228 233 thisDate = getDateFromRadarFolder(basename)
229 234
230 235 if thisDate < startDate:
231 236 return 0
232 237
233 238 if thisDate > endDate:
234 239 return 0
235 240
236 241 return 1
237 242
238 243 def isFileInDateRange(filename, startDate=None, endDate=None):
239 244 """
240 245 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
241 246
242 247 Inputs:
243 248 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
244 249
245 250 Su formato deberia ser "?YYYYDDDsss"
246 251
247 252 siendo:
248 253 YYYY : Anio (ejemplo 2015)
249 254 DDD : Dia del anio (ejemplo 305)
250 255 sss : set
251 256
252 257 startDate : fecha inicial del rango seleccionado en formato datetime.date
253 258
254 259 endDate : fecha final del rango seleccionado en formato datetime.date
255 260
256 261 Return:
257 262 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
258 263 fecha especificado, de lo contrario retorna False.
259 264 Excepciones:
260 265 Si el archivo no tiene el formato adecuado
261 266 """
262 267
263 268 basename = os.path.basename(filename)
264 269
265 270 if not isRadarFile(basename):
266 271 print "The filename %s has not the rigth format" %filename
267 272 return 0
268 273
269 274 if startDate and endDate:
270 275 thisDate = getDateFromRadarFile(basename)
271 276
272 277 if thisDate < startDate:
273 278 return 0
274 279
275 280 if thisDate > endDate:
276 281 return 0
277 282
278 283 return 1
279 284
280 285 def getFileFromSet(path, ext, set):
281 286 validFilelist = []
282 287 fileList = os.listdir(path)
283 288
284 289 # 0 1234 567 89A BCDE
285 290 # H YYYY DDD SSS .ext
286 291
287 292 for thisFile in fileList:
288 293 try:
289 294 year = int(thisFile[1:5])
290 295 doy = int(thisFile[5:8])
291 296 except:
292 297 continue
293 298
294 299 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
295 300 continue
296 301
297 302 validFilelist.append(thisFile)
298 303
299 304 myfile = fnmatch.filter(validFilelist,'*%4.4d%3.3d%3.3d*'%(year,doy,set))
300 305
301 306 if len(myfile)!= 0:
302 307 return myfile[0]
303 308 else:
304 309 filename = '*%4.4d%3.3d%3.3d%s'%(year,doy,set,ext.lower())
305 310 print 'the filename %s does not exist'%filename
306 311 print '...going to the last file: '
307 312
308 313 if validFilelist:
309 314 validFilelist = sorted( validFilelist, key=str.lower )
310 315 return validFilelist[-1]
311 316
312 317 return None
313 318
314 319 def getlastFileFromPath(path, ext):
315 320 """
316 321 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
317 322 al final de la depuracion devuelve el ultimo file de la lista que quedo.
318 323
319 324 Input:
320 325 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
321 326 ext : extension de los files contenidos en una carpeta
322 327
323 328 Return:
324 329 El ultimo file de una determinada carpeta, no se considera el path.
325 330 """
326 331 validFilelist = []
327 332 fileList = os.listdir(path)
328 333
329 334 # 0 1234 567 89A BCDE
330 335 # H YYYY DDD SSS .ext
331 336
332 337 for thisFile in fileList:
333 338
334 339 year = thisFile[1:5]
335 340 if not isNumber(year):
336 341 continue
337 342
338 343 doy = thisFile[5:8]
339 344 if not isNumber(doy):
340 345 continue
341 346
342 347 year = int(year)
343 348 doy = int(doy)
344 349
345 350 if (os.path.splitext(thisFile)[-1].lower() != ext.lower()):
346 351 continue
347 352
348 353 validFilelist.append(thisFile)
349 354
350 355 if validFilelist:
351 356 validFilelist = sorted( validFilelist, key=str.lower )
352 357 return validFilelist[-1]
353 358
354 359 return None
355 360
356 361 def checkForRealPath(path, foldercounter, year, doy, set, ext):
357 362 """
358 363 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
359 364 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
360 365 el path exacto de un determinado file.
361 366
362 367 Example :
363 368 nombre correcto del file es .../.../D2009307/P2009307367.ext
364 369
365 370 Entonces la funcion prueba con las siguientes combinaciones
366 371 .../.../y2009307367.ext
367 372 .../.../Y2009307367.ext
368 373 .../.../x2009307/y2009307367.ext
369 374 .../.../x2009307/Y2009307367.ext
370 375 .../.../X2009307/y2009307367.ext
371 376 .../.../X2009307/Y2009307367.ext
372 377 siendo para este caso, la ultima combinacion de letras, identica al file buscado
373 378
374 379 Return:
375 380 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
376 381 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
377 382 para el filename
378 383 """
379 384 fullfilename = None
380 385 find_flag = False
381 386 filename = None
382 387
383 388 prefixDirList = [None,'d','D']
384 389 if ext.lower() == ".r": #voltage
385 390 prefixFileList = ['d','D']
386 391 elif ext.lower() == ".pdata": #spectra
387 392 prefixFileList = ['p','P']
388 393 else:
389 394 return None, filename
390 395
391 396 #barrido por las combinaciones posibles
392 397 for prefixDir in prefixDirList:
393 398 thispath = path
394 399 if prefixDir != None:
395 400 #formo el nombre del directorio xYYYYDDD (x=d o x=D)
396 401 if foldercounter == 0:
397 402 thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy ))
398 403 else:
399 404 thispath = os.path.join(path, "%s%04d%03d_%02d" % ( prefixDir, year, doy , foldercounter))
400 405 for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D"
401 406 filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
402 407 fullfilename = os.path.join( thispath, filename ) #formo el path completo
403 408
404 409 if os.path.exists( fullfilename ): #verifico que exista
405 410 find_flag = True
406 411 break
407 412 if find_flag:
408 413 break
409 414
410 415 if not(find_flag):
411 416 return None, filename
412 417
413 418 return fullfilename, filename
414 419
415 420 def isRadarFolder(folder):
416 421 try:
417 422 year = int(folder[1:5])
418 423 doy = int(folder[5:8])
419 424 except:
420 425 return 0
421 426
422 427 return 1
423 428
424 429 def isRadarFile(file):
425 430 try:
426 431 year = int(file[1:5])
427 432 doy = int(file[5:8])
428 433 set = int(file[8:11])
429 434 except:
430 435 return 0
431 436
432 437 return 1
433 438
434 439 def getDateFromRadarFile(file):
435 440 try:
436 441 year = int(file[1:5])
437 442 doy = int(file[5:8])
438 443 set = int(file[8:11])
439 444 except:
440 445 return None
441 446
442 447 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy-1)
443 448 return thisDate
444 449
445 450 def getDateFromRadarFolder(folder):
446 451 try:
447 452 year = int(folder[1:5])
448 453 doy = int(folder[5:8])
449 454 except:
450 455 return None
451 456
452 457 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy-1)
453 458 return thisDate
454 459
455 460 class JRODataIO:
456 461
457 462 c = 3E8
458 463
459 464 isConfig = False
460 465
461 466 basicHeaderObj = None
462 467
463 468 systemHeaderObj = None
464 469
465 470 radarControllerHeaderObj = None
466 471
467 472 processingHeaderObj = None
468 473
469 474 dtype = None
470 475
471 476 pathList = []
472 477
473 478 filenameList = []
474 479
475 480 filename = None
476 481
477 482 ext = None
478 483
479 484 flagIsNewFile = 1
480 485
481 486 flagDiscontinuousBlock = 0
482 487
483 488 flagIsNewBlock = 0
484 489
485 490 fp = None
486 491
487 492 firstHeaderSize = 0
488 493
489 494 basicHeaderSize = 24
490 495
491 496 versionFile = 1103
492 497
493 498 fileSize = None
494 499
495 500 # ippSeconds = None
496 501
497 502 fileSizeByHeader = None
498 503
499 504 fileIndex = None
500 505
501 506 profileIndex = None
502 507
503 508 blockIndex = None
504 509
505 510 nTotalBlocks = None
506 511
507 512 maxTimeStep = 30
508 513
509 514 lastUTTime = None
510 515
511 516 datablock = None
512 517
513 518 dataOut = None
514 519
515 520 blocksize = None
516 521
517 522 getByBlock = False
518 523
519 524 def __init__(self):
520 525
521 526 raise NotImplementedError
522 527
523 528 def run(self):
524 529
525 530 raise NotImplementedError
526 531
527 532 def getDtypeWidth(self):
528 533
529 534 dtype_index = get_dtype_index(self.dtype)
530 535 dtype_width = get_dtype_width(dtype_index)
531 536
532 537 return dtype_width
533 538
534 539 class JRODataReader(JRODataIO):
535 540
536 541
537 542 online = 0
538 543
539 544 realtime = 0
540 545
541 546 nReadBlocks = 0
542 547
543 548 delay = 10 #number of seconds waiting a new file
544 549
545 550 nTries = 3 #quantity tries
546 551
547 552 nFiles = 3 #number of files for searching
548 553
549 554 path = None
550 555
551 556 foldercounter = 0
552 557
553 558 flagNoMoreFiles = 0
554 559
555 560 datetimeList = []
556 561
557 562 __isFirstTimeOnline = 1
558 563
559 564 __printInfo = True
560 565
561 566 profileIndex = None
562 567
563 568 nTxs = 1
564 569
565 570 txIndex = None
566 571
567 572 def __init__(self):
568 573
569 574 """
570 575 This class is used to find data files
571 576
572 577 Example:
573 578 reader = JRODataReader()
574 579 fileList = reader.findDataFiles()
575 580
576 581 """
577 582 pass
578 583
579 584
580 585 def createObjByDefault(self):
581 586 """
582 587
583 588 """
584 589 raise NotImplementedError
585 590
586 591 def getBlockDimension(self):
587 592
588 593 raise NotImplementedError
589 594
590 595 def __searchFilesOffLine(self,
591 596 path,
592 597 startDate=None,
593 598 endDate=None,
594 599 startTime=datetime.time(0,0,0),
595 600 endTime=datetime.time(23,59,59),
596 601 set=None,
597 602 expLabel='',
598 603 ext='.r',
599 604 walk=True):
600 605
601 606 self.filenameList = []
602 607 self.datetimeList = []
603 608
604 609 pathList = []
605 610
606 611 dateList, pathList = self.findDatafiles(path, startDate, endDate, expLabel, ext, walk, include_path=True)
607 612
608 613 if dateList == []:
609 614 # print "[Reading] Date range selected invalid [%s - %s]: No *%s files in %s)" %(startDate, endDate, ext, path)
610 615 return None, None
611 616
612 617 if len(dateList) > 1:
613 618 print "[Reading] Data found for date range [%s - %s]: total days = %d" %(startDate, endDate, len(dateList))
614 619 else:
615 620 print "[Reading] Data found for date range [%s - %s]: date = %s" %(startDate, endDate, dateList[0])
616 621
617 622 filenameList = []
618 623 datetimeList = []
619 624
620 625 for thisPath in pathList:
621 626 # thisPath = pathList[pathDict[file]]
622 627
623 628 fileList = glob.glob1(thisPath, "*%s" %ext)
624 629 fileList.sort()
625 630
626 631 for file in fileList:
627 632
628 633 filename = os.path.join(thisPath,file)
629 634
630 635 if not isFileInDateRange(filename, startDate, endDate):
631 636 continue
632 637
633 638 thisDatetime = isFileInTimeRange(filename, startDate, endDate, startTime, endTime)
634 639
635 640 if not(thisDatetime):
636 641 continue
637 642
638 643 filenameList.append(filename)
639 644 datetimeList.append(thisDatetime)
640 645
641 646 if not(filenameList):
642 647 print "[Reading] Time range selected invalid [%s - %s]: No *%s files in %s)" %(startTime, endTime, ext, path)
643 648 return None, None
644 649
645 650 print "[Reading] %d file(s) was(were) found in time range: %s - %s" %(len(filenameList), startTime, endTime)
646 651 print
647 652
648 653 for i in range(len(filenameList)):
649 654 print "[Reading] %s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
650 655
651 656 self.filenameList = filenameList
652 657 self.datetimeList = datetimeList
653 658
654 659 return pathList, filenameList
655 660
656 661 def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True, set=None):
657 662
658 663 """
659 664 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
660 665 devuelve el archivo encontrado ademas de otros datos.
661 666
662 667 Input:
663 668 path : carpeta donde estan contenidos los files que contiene data
664 669
665 670 expLabel : Nombre del subexperimento (subfolder)
666 671
667 672 ext : extension de los files
668 673
669 674 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
670 675
671 676 Return:
672 677 directory : eL directorio donde esta el file encontrado
673 678 filename : el ultimo file de una determinada carpeta
674 679 year : el anho
675 680 doy : el numero de dia del anho
676 681 set : el set del archivo
677 682
678 683
679 684 """
680 685 if not os.path.isdir(path):
681 686 return None, None, None, None, None, None
682 687
683 688 dirList = []
684 689
685 690 if not walk:
686 691 fullpath = path
687 692 foldercounter = 0
688 693 else:
689 694 #Filtra solo los directorios
690 695 for thisPath in os.listdir(path):
691 696 if not os.path.isdir(os.path.join(path,thisPath)):
692 697 continue
693 698 if not isRadarFolder(thisPath):
694 699 continue
695 700
696 701 dirList.append(thisPath)
697 702
698 703 if not(dirList):
699 704 return None, None, None, None, None, None
700 705
701 706 dirList = sorted( dirList, key=str.lower )
702 707
703 708 doypath = dirList[-1]
704 709 foldercounter = int(doypath.split('_')[1]) if len(doypath.split('_'))>1 else 0
705 710 fullpath = os.path.join(path, doypath, expLabel)
706 711
707 712
708 713 print "[Reading] %s folder was found: " %(fullpath )
709 714
710 715 if set == None:
711 716 filename = getlastFileFromPath(fullpath, ext)
712 717 else:
713 718 filename = getFileFromSet(fullpath, ext, set)
714 719
715 720 if not(filename):
716 721 return None, None, None, None, None, None
717 722
718 723 print "[Reading] %s file was found" %(filename)
719 724
720 725 if not(self.__verifyFile(os.path.join(fullpath, filename))):
721 726 return None, None, None, None, None, None
722 727
723 728 year = int( filename[1:5] )
724 729 doy = int( filename[5:8] )
725 730 set = int( filename[8:11] )
726 731
727 732 return fullpath, foldercounter, filename, year, doy, set
728 733
729 734 def __setNextFileOffline(self):
730 735
731 736 idFile = self.fileIndex
732 737
733 738 while (True):
734 739 idFile += 1
735 740 if not(idFile < len(self.filenameList)):
736 741 self.flagNoMoreFiles = 1
737 742 # print "[Reading] No more Files"
738 743 return 0
739 744
740 745 filename = self.filenameList[idFile]
741 746
742 747 if not(self.__verifyFile(filename)):
743 748 continue
744 749
745 750 fileSize = os.path.getsize(filename)
746 751 fp = open(filename,'rb')
747 752 break
748 753
749 754 self.flagIsNewFile = 1
750 755 self.fileIndex = idFile
751 756 self.filename = filename
752 757 self.fileSize = fileSize
753 758 self.fp = fp
754 759
755 760 # print "[Reading] Setting the file: %s"%self.filename
756 761
757 762 return 1
758 763
759 764 def __setNextFileOnline(self):
760 765 """
761 766 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
762 767 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
763 768 siguientes.
764 769
765 770 Affected:
766 771 self.flagIsNewFile
767 772 self.filename
768 773 self.fileSize
769 774 self.fp
770 775 self.set
771 776 self.flagNoMoreFiles
772 777
773 778 Return:
774 779 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
775 780 1 : si el file fue abierto con exito y esta listo a ser leido
776 781
777 782 Excepciones:
778 783 Si un determinado file no puede ser abierto
779 784 """
780 785 nFiles = 0
781 786 fileOk_flag = False
782 787 firstTime_flag = True
783 788
784 789 self.set += 1
785 790
786 791 if self.set > 999:
787 792 self.set = 0
788 793 self.foldercounter += 1
789 794
790 795 #busca el 1er file disponible
791 796 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
792 797 if fullfilename:
793 798 if self.__verifyFile(fullfilename, False):
794 799 fileOk_flag = True
795 800
796 801 #si no encuentra un file entonces espera y vuelve a buscar
797 802 if not(fileOk_flag):
798 803 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
799 804
800 805 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
801 806 tries = self.nTries
802 807 else:
803 808 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
804 809
805 810 for nTries in range( tries ):
806 811 if firstTime_flag:
807 812 print "\t[Reading] Waiting %0.2f sec for the next file: \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
808 813 sleep( self.delay )
809 814 else:
810 815 print "\t[Reading] Searching the next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
811 816
812 817 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
813 818 if fullfilename:
814 819 if self.__verifyFile(fullfilename):
815 820 fileOk_flag = True
816 821 break
817 822
818 823 if fileOk_flag:
819 824 break
820 825
821 826 firstTime_flag = False
822 827
823 828 print "\t[Reading] Skipping the file \"%s\" due to this file doesn't exist" % filename
824 829 self.set += 1
825 830
826 831 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
827 832 self.set = 0
828 833 self.doy += 1
829 834 self.foldercounter = 0
830 835
831 836 if fileOk_flag:
832 837 self.fileSize = os.path.getsize( fullfilename )
833 838 self.filename = fullfilename
834 839 self.flagIsNewFile = 1
835 840 if self.fp != None: self.fp.close()
836 841 self.fp = open(fullfilename, 'rb')
837 842 self.flagNoMoreFiles = 0
838 843 # print '[Reading] Setting the file: %s' % fullfilename
839 844 else:
840 845 self.fileSize = 0
841 846 self.filename = None
842 847 self.flagIsNewFile = 0
843 848 self.fp = None
844 849 self.flagNoMoreFiles = 1
845 850 # print '[Reading] No more files to read'
846 851
847 852 return fileOk_flag
848 853
849 854 def setNextFile(self):
850 855 if self.fp != None:
851 856 self.fp.close()
852 857
853 858 if self.online:
854 859 newFile = self.__setNextFileOnline()
855 860 else:
856 861 newFile = self.__setNextFileOffline()
857 862
858 863 if not(newFile):
859 864 print '[Reading] No more files to read'
860 865 return 0
861 866
862 867 print '[Reading] Setting the file: %s' % self.filename
863 868
864 869 self.__readFirstHeader()
865 870 self.nReadBlocks = 0
866 871 return 1
867 872
868 873 def __waitNewBlock(self):
869 874 """
870 875 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
871 876
872 877 Si el modo de lectura es OffLine siempre retorn 0
873 878 """
874 879 if not self.online:
875 880 return 0
876 881
877 882 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
878 883 return 0
879 884
880 885 currentPointer = self.fp.tell()
881 886
882 887 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
883 888
884 889 for nTries in range( self.nTries ):
885 890
886 891 self.fp.close()
887 892 self.fp = open( self.filename, 'rb' )
888 893 self.fp.seek( currentPointer )
889 894
890 895 self.fileSize = os.path.getsize( self.filename )
891 896 currentSize = self.fileSize - currentPointer
892 897
893 898 if ( currentSize >= neededSize ):
894 899 self.basicHeaderObj.read(self.fp)
895 900 return 1
896 901
897 902 if self.fileSize == self.fileSizeByHeader:
898 903 # self.flagEoF = True
899 904 return 0
900 905
901 906 print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
902 907 sleep( self.delay )
903 908
904 909
905 910 return 0
906 911
907 912 def waitDataBlock(self,pointer_location):
908 913
909 914 currentPointer = pointer_location
910 915
911 916 neededSize = self.processingHeaderObj.blockSize #+ self.basicHeaderSize
912 917
913 918 for nTries in range( self.nTries ):
914 919 self.fp.close()
915 920 self.fp = open( self.filename, 'rb' )
916 921 self.fp.seek( currentPointer )
917 922
918 923 self.fileSize = os.path.getsize( self.filename )
919 924 currentSize = self.fileSize - currentPointer
920 925
921 926 if ( currentSize >= neededSize ):
922 927 return 1
923 928
924 929 print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
925 930 sleep( self.delay )
926 931
927 932 return 0
928 933
929 934 def __jumpToLastBlock(self):
930 935
931 936 if not(self.__isFirstTimeOnline):
932 937 return
933 938
934 939 csize = self.fileSize - self.fp.tell()
935 940 blocksize = self.processingHeaderObj.blockSize
936 941
937 942 #salta el primer bloque de datos
938 943 if csize > self.processingHeaderObj.blockSize:
939 944 self.fp.seek(self.fp.tell() + blocksize)
940 945 else:
941 946 return
942 947
943 948 csize = self.fileSize - self.fp.tell()
944 949 neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
945 950 while True:
946 951
947 952 if self.fp.tell()<self.fileSize:
948 953 self.fp.seek(self.fp.tell() + neededsize)
949 954 else:
950 955 self.fp.seek(self.fp.tell() - neededsize)
951 956 break
952 957
953 958 # csize = self.fileSize - self.fp.tell()
954 959 # neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
955 960 # factor = int(csize/neededsize)
956 961 # if factor > 0:
957 962 # self.fp.seek(self.fp.tell() + factor*neededsize)
958 963
959 964 self.flagIsNewFile = 0
960 965 self.__isFirstTimeOnline = 0
961 966
962 967 def __setNewBlock(self):
963 968
964 969 if self.fp == None:
965 970 return 0
966 971
967 972 # if self.online:
968 973 # self.__jumpToLastBlock()
969 974
970 975 if self.flagIsNewFile:
971 976 self.lastUTTime = self.basicHeaderObj.utc
972 977 return 1
973 978
974 979 if self.realtime:
975 980 self.flagDiscontinuousBlock = 1
976 981 if not(self.setNextFile()):
977 982 return 0
978 983 else:
979 984 return 1
980 985
981 986 currentSize = self.fileSize - self.fp.tell()
982 987 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
983 988
984 989 if (currentSize >= neededSize):
985 990 self.basicHeaderObj.read(self.fp)
986 991 self.lastUTTime = self.basicHeaderObj.utc
987 992 return 1
988 993
989 994 if self.__waitNewBlock():
990 995 self.lastUTTime = self.basicHeaderObj.utc
991 996 return 1
992 997
993 998 if not(self.setNextFile()):
994 999 return 0
995 1000
996 1001 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
997 1002 self.lastUTTime = self.basicHeaderObj.utc
998 1003
999 1004 self.flagDiscontinuousBlock = 0
1000 1005
1001 1006 if deltaTime > self.maxTimeStep:
1002 1007 self.flagDiscontinuousBlock = 1
1003 1008
1004 1009 return 1
1005 1010
1006 1011 def readNextBlock(self):
1007 1012
1008 1013 #Skip block out of startTime and endTime
1009 1014 while True:
1010 1015 if not(self.__setNewBlock()):
1011 1016 return 0
1012 1017
1013 1018 if not(self.readBlock()):
1014 1019 return 0
1015 1020
1016 1021 self.getBasicHeader()
1017 1022
1018 1023 if not isTimeInRange(self.dataOut.datatime.time(), self.startTime, self.endTime):
1019 1024
1020 1025 print "[Reading] Block No. %d/%d -> %s [Skipping]" %(self.nReadBlocks,
1021 1026 self.processingHeaderObj.dataBlocksPerFile,
1022 1027 self.dataOut.datatime.ctime())
1023 1028 continue
1024 1029
1025 1030 break
1026 1031
1027 1032 print "[Reading] Block No. %d/%d -> %s" %(self.nReadBlocks,
1028 1033 self.processingHeaderObj.dataBlocksPerFile,
1029 1034 self.dataOut.datatime.ctime())
1030 1035 return 1
1031 1036
1032 1037 def __readFirstHeader(self):
1033 1038
1034 1039 self.basicHeaderObj.read(self.fp)
1035 1040 self.systemHeaderObj.read(self.fp)
1036 1041 self.radarControllerHeaderObj.read(self.fp)
1037 1042 self.processingHeaderObj.read(self.fp)
1038 1043
1039 1044 self.firstHeaderSize = self.basicHeaderObj.size
1040 1045
1041 1046 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
1042 1047 if datatype == 0:
1043 1048 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
1044 1049 elif datatype == 1:
1045 1050 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
1046 1051 elif datatype == 2:
1047 1052 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
1048 1053 elif datatype == 3:
1049 1054 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
1050 1055 elif datatype == 4:
1051 1056 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
1052 1057 elif datatype == 5:
1053 1058 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
1054 1059 else:
1055 1060 raise ValueError, 'Data type was not defined'
1056 1061
1057 1062 self.dtype = datatype_str
1058 1063 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
1059 1064 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
1060 1065 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
1061 1066 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
1062 1067 self.getBlockDimension()
1063 1068
1064 1069 def __verifyFile(self, filename, msgFlag=True):
1065 1070
1066 1071 msg = None
1067 1072
1068 1073 try:
1069 1074 fp = open(filename, 'rb')
1070 1075 except IOError:
1071 1076
1072 1077 if msgFlag:
1073 1078 print "[Reading] File %s can't be opened" % (filename)
1074 1079
1075 1080 return False
1076 1081
1077 1082 currentPosition = fp.tell()
1078 1083 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
1079 1084
1080 1085 if neededSize == 0:
1081 1086 basicHeaderObj = BasicHeader(LOCALTIME)
1082 1087 systemHeaderObj = SystemHeader()
1083 1088 radarControllerHeaderObj = RadarControllerHeader()
1084 1089 processingHeaderObj = ProcessingHeader()
1085 1090
1086 1091 if not( basicHeaderObj.read(fp) ):
1087 1092 fp.close()
1088 1093 return False
1089 1094
1090 1095 if not( systemHeaderObj.read(fp) ):
1091 1096 fp.close()
1092 1097 return False
1093 1098
1094 1099 if not( radarControllerHeaderObj.read(fp) ):
1095 1100 fp.close()
1096 1101 return False
1097 1102
1098 1103 if not( processingHeaderObj.read(fp) ):
1099 1104 fp.close()
1100 1105 return False
1101 1106
1102 1107 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
1103 1108 else:
1104 1109 msg = "[Reading] Skipping the file %s due to it hasn't enough data" %filename
1105 1110
1106 1111 fp.close()
1107 1112
1108 1113 fileSize = os.path.getsize(filename)
1109 1114 currentSize = fileSize - currentPosition
1110 1115
1111 1116 if currentSize < neededSize:
1112 1117 if msgFlag and (msg != None):
1113 1118 print msg
1114 1119 return False
1115 1120
1116 1121 return True
1117 1122
1118 1123 def findDatafiles(self, path, startDate=None, endDate=None, expLabel='', ext='.r', walk=True, include_path=False):
1119 1124
1120 1125 path_empty = True
1121 1126
1122 1127 dateList = []
1123 1128 pathList = []
1124 1129
1125 1130 multi_path = path.split(',')
1126 1131
1127 1132 if not walk:
1128 1133
1129 1134 for single_path in multi_path:
1130 1135
1131 1136 if not os.path.isdir(single_path):
1132 1137 continue
1133 1138
1134 1139 fileList = glob.glob1(single_path, "*"+ext)
1135 1140
1136 1141 if not fileList:
1137 1142 continue
1138 1143
1139 1144 path_empty = False
1140 1145
1141 1146 fileList.sort()
1142 1147
1143 1148 for thisFile in fileList:
1144 1149
1145 1150 if not os.path.isfile(os.path.join(single_path, thisFile)):
1146 1151 continue
1147 1152
1148 1153 if not isRadarFile(thisFile):
1149 1154 continue
1150 1155
1151 1156 if not isFileInDateRange(thisFile, startDate, endDate):
1152 1157 continue
1153 1158
1154 1159 thisDate = getDateFromRadarFile(thisFile)
1155 1160
1156 1161 if thisDate in dateList:
1157 1162 continue
1158 1163
1159 1164 dateList.append(thisDate)
1160 1165 pathList.append(single_path)
1161 1166
1162 1167 else:
1163 1168 for single_path in multi_path:
1164 1169
1165 1170 if not os.path.isdir(single_path):
1166 1171 continue
1167 1172
1168 1173 dirList = []
1169 1174
1170 1175 for thisPath in os.listdir(single_path):
1171 1176
1172 1177 if not os.path.isdir(os.path.join(single_path,thisPath)):
1173 1178 continue
1174 1179
1175 1180 if not isRadarFolder(thisPath):
1176 1181 continue
1177 1182
1178 1183 if not isFolderInDateRange(thisPath, startDate, endDate):
1179 1184 continue
1180 1185
1181 1186 dirList.append(thisPath)
1182 1187
1183 1188 if not dirList:
1184 1189 continue
1185 1190
1186 1191 dirList.sort()
1187 1192
1188 1193 for thisDir in dirList:
1189 1194
1190 1195 datapath = os.path.join(single_path, thisDir, expLabel)
1191 1196 fileList = glob.glob1(datapath, "*"+ext)
1192 1197
1193 1198 if not fileList:
1194 1199 continue
1195 1200
1196 1201 path_empty = False
1197 1202
1198 1203 thisDate = getDateFromRadarFolder(thisDir)
1199 1204
1200 1205 pathList.append(datapath)
1201 1206 dateList.append(thisDate)
1202 1207
1203 1208 dateList.sort()
1204 1209
1205 1210 if walk:
1206 1211 pattern_path = os.path.join(multi_path[0], "[dYYYYDDD]", expLabel)
1207 1212 else:
1208 1213 pattern_path = multi_path[0]
1209 1214
1210 1215 if path_empty:
1211 1216 print "[Reading] No *%s files in %s for %s to %s" %(ext, pattern_path, startDate, endDate)
1212 1217 else:
1213 1218 if not dateList:
1214 1219 print "[Reading] Date range selected invalid [%s - %s]: No *%s files in %s)" %(startDate, endDate, ext, path)
1215 1220
1216 1221 if include_path:
1217 1222 return dateList, pathList
1218 1223
1219 1224 return dateList
1220 1225
1221 1226 def setup(self,
1222 1227 path=None,
1223 1228 startDate=None,
1224 1229 endDate=None,
1225 1230 startTime=datetime.time(0,0,0),
1226 1231 endTime=datetime.time(23,59,59),
1227 1232 set=None,
1228 1233 expLabel = "",
1229 1234 ext = None,
1230 1235 online = False,
1231 1236 delay = 60,
1232 1237 walk = True,
1233 1238 getblock = False,
1234 1239 nTxs = 1,
1235 1240 realtime=False):
1236 1241
1237 1242 if path == None:
1238 1243 raise ValueError, "[Reading] The path is not valid"
1239 1244
1240 1245 if ext == None:
1241 1246 ext = self.ext
1242 1247
1243 1248 if online:
1244 1249 print "[Reading] Searching files in online mode..."
1245 1250
1246 1251 for nTries in range( self.nTries ):
1247 1252 fullpath, foldercounter, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk, set=set)
1248 1253
1249 1254 if fullpath:
1250 1255 break
1251 1256
1252 1257 print '[Reading] Waiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
1253 1258 sleep( self.delay )
1254 1259
1255 1260 if not(fullpath):
1256 1261 print "[Reading] There 'isn't any valid file in %s" % path
1257 1262 return
1258 1263
1259 1264 self.year = year
1260 1265 self.doy = doy
1261 1266 self.set = set - 1
1262 1267 self.path = path
1263 1268 self.foldercounter = foldercounter
1264 1269 last_set = None
1265 1270
1266 1271 else:
1267 1272 print "[Reading] Searching files in offline mode ..."
1268 1273 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
1269 1274 startTime=startTime, endTime=endTime,
1270 1275 set=set, expLabel=expLabel, ext=ext,
1271 1276 walk=walk)
1272 1277
1273 1278 if not(pathList):
1274 1279 # print "[Reading] No *%s files in %s (%s - %s)"%(ext, path,
1275 1280 # datetime.datetime.combine(startDate,startTime).ctime(),
1276 1281 # datetime.datetime.combine(endDate,endTime).ctime())
1277 1282
1278 1283 # sys.exit(-1)
1279 1284
1280 1285 self.fileIndex = -1
1281 1286 self.pathList = []
1282 1287 self.filenameList = []
1283 1288 return
1284 1289
1285 1290 self.fileIndex = -1
1286 1291 self.pathList = pathList
1287 1292 self.filenameList = filenameList
1288 1293 file_name = os.path.basename(filenameList[-1])
1289 1294 basename, ext = os.path.splitext(file_name)
1290 1295 last_set = int(basename[-3:])
1291 1296
1292 1297 self.online = online
1293 1298 self.realtime = realtime
1294 1299 self.delay = delay
1295 1300 ext = ext.lower()
1296 1301 self.ext = ext
1297 1302 self.getByBlock = getblock
1298 1303 self.nTxs = nTxs
1299 1304 self.startTime = startTime
1300 1305 self.endTime = endTime
1301 1306
1302 1307 if not(self.setNextFile()):
1303 1308 if (startDate!=None) and (endDate!=None):
1304 1309 print "[Reading] No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
1305 1310 elif startDate != None:
1306 1311 print "[Reading] No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
1307 1312 else:
1308 1313 print "[Reading] No files"
1309 1314
1310 1315 self.fileIndex = -1
1311 1316 self.pathList = []
1312 1317 self.filenameList = []
1313 1318 return
1314 1319
1315 1320 # self.getBasicHeader()
1316 1321
1317 1322 if last_set != None:
1318 1323 self.dataOut.last_block = last_set * self.processingHeaderObj.dataBlocksPerFile + self.basicHeaderObj.dataBlock
1319 1324 return
1320 1325
1321 1326 def getBasicHeader(self):
1322 1327
1323 1328 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds
1324 1329
1325 1330 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
1326 1331
1327 1332 self.dataOut.timeZone = self.basicHeaderObj.timeZone
1328 1333
1329 1334 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
1330 1335
1331 1336 self.dataOut.errorCount = self.basicHeaderObj.errorCount
1332 1337
1333 1338 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
1334 1339
1335 1340 self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds/self.nTxs
1336 1341
1337 1342 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock*self.nTxs
1338 1343
1339 1344
1340 1345 def getFirstHeader(self):
1341 1346
1342 1347 raise NotImplementedError
1343 1348
1344 1349 def getData(self):
1345 1350
1346 1351 raise NotImplementedError
1347 1352
1348 1353 def hasNotDataInBuffer(self):
1349 1354
1350 1355 raise NotImplementedError
1351 1356
1352 1357 def readBlock(self):
1353 1358
1354 1359 raise NotImplementedError
1355 1360
1356 1361 def isEndProcess(self):
1357 1362
1358 1363 return self.flagNoMoreFiles
1359 1364
1360 1365 def printReadBlocks(self):
1361 1366
1362 1367 print "[Reading] Number of read blocks per file %04d" %self.nReadBlocks
1363 1368
1364 1369 def printTotalBlocks(self):
1365 1370
1366 1371 print "[Reading] Number of read blocks %04d" %self.nTotalBlocks
1367 1372
1368 1373 def printNumberOfBlock(self):
1369 1374
1370 1375 if self.flagIsNewBlock:
1371 1376 print "[Reading] Block No. %d/%d -> %s" %(self.nReadBlocks,
1372 1377 self.processingHeaderObj.dataBlocksPerFile,
1373 1378 self.dataOut.datatime.ctime())
1374 1379
1375 1380 def printInfo(self):
1376 1381
1377 1382 if self.__printInfo == False:
1378 1383 return
1379 1384
1380 1385 self.basicHeaderObj.printInfo()
1381 1386 self.systemHeaderObj.printInfo()
1382 1387 self.radarControllerHeaderObj.printInfo()
1383 1388 self.processingHeaderObj.printInfo()
1384 1389
1385 1390 self.__printInfo = False
1386 1391
1387 1392
1388 1393 def run(self, **kwargs):
1389 1394
1390 1395 if not(self.isConfig):
1391 1396
1392 1397 # self.dataOut = dataOut
1393 1398 self.setup(**kwargs)
1394 1399 self.isConfig = True
1395 1400
1396 1401 self.getData()
1397 1402
1398 1403 class JRODataWriter(JRODataIO):
1399 1404
1400 1405 """
1401 1406 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
1402 1407 de los datos siempre se realiza por bloques.
1403 1408 """
1404 1409
1405 1410 blockIndex = 0
1406 1411
1407 1412 path = None
1408 1413
1409 1414 setFile = None
1410 1415
1411 1416 profilesPerBlock = None
1412 1417
1413 1418 blocksPerFile = None
1414 1419
1415 1420 nWriteBlocks = 0
1416 1421
1417 1422 fileDate = None
1418 1423
1419 1424 def __init__(self, dataOut=None):
1420 1425 raise NotImplementedError
1421 1426
1422 1427
1423 1428 def hasAllDataInBuffer(self):
1424 1429 raise NotImplementedError
1425 1430
1426 1431
1427 1432 def setBlockDimension(self):
1428 1433 raise NotImplementedError
1429 1434
1430 1435
1431 1436 def writeBlock(self):
1432 1437 raise NotImplementedError
1433 1438
1434 1439
1435 1440 def putData(self):
1436 1441 raise NotImplementedError
1437 1442
1438 1443
1439 1444 def getProcessFlags(self):
1440 1445
1441 1446 processFlags = 0
1442 1447
1443 1448 dtype_index = get_dtype_index(self.dtype)
1444 1449 procflag_dtype = get_procflag_dtype(dtype_index)
1445 1450
1446 1451 processFlags += procflag_dtype
1447 1452
1448 1453 if self.dataOut.flagDecodeData:
1449 1454 processFlags += PROCFLAG.DECODE_DATA
1450 1455
1451 1456 if self.dataOut.flagDeflipData:
1452 1457 processFlags += PROCFLAG.DEFLIP_DATA
1453 1458
1454 1459 if self.dataOut.code is not None:
1455 1460 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1456 1461
1457 1462 if self.dataOut.nCohInt > 1:
1458 1463 processFlags += PROCFLAG.COHERENT_INTEGRATION
1459 1464
1460 1465 if self.dataOut.type == "Spectra":
1461 1466 if self.dataOut.nIncohInt > 1:
1462 1467 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
1463 1468
1464 1469 if self.dataOut.data_dc is not None:
1465 1470 processFlags += PROCFLAG.SAVE_CHANNELS_DC
1466 1471
1467 1472 if self.dataOut.flagShiftFFT:
1468 1473 processFlags += PROCFLAG.SHIFT_FFT_DATA
1469 1474
1470 1475 return processFlags
1471 1476
1472 1477 def setBasicHeader(self):
1473 1478
1474 1479 self.basicHeaderObj.size = self.basicHeaderSize #bytes
1475 1480 self.basicHeaderObj.version = self.versionFile
1476 1481 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1477 1482
1478 1483 utc = numpy.floor(self.dataOut.utctime)
1479 1484 milisecond = (self.dataOut.utctime - utc)* 1000.0
1480 1485
1481 1486 self.basicHeaderObj.utc = utc
1482 1487 self.basicHeaderObj.miliSecond = milisecond
1483 1488 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1484 1489 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1485 1490 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1486 1491
1487 1492 def setFirstHeader(self):
1488 1493 """
1489 1494 Obtiene una copia del First Header
1490 1495
1491 1496 Affected:
1492 1497
1493 1498 self.basicHeaderObj
1494 1499 self.systemHeaderObj
1495 1500 self.radarControllerHeaderObj
1496 1501 self.processingHeaderObj self.
1497 1502
1498 1503 Return:
1499 1504 None
1500 1505 """
1501 1506
1502 1507 raise NotImplementedError
1503 1508
1504 1509 def __writeFirstHeader(self):
1505 1510 """
1506 1511 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1507 1512
1508 1513 Affected:
1509 1514 __dataType
1510 1515
1511 1516 Return:
1512 1517 None
1513 1518 """
1514 1519
1515 1520 # CALCULAR PARAMETROS
1516 1521
1517 1522 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1518 1523 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1519 1524
1520 1525 self.basicHeaderObj.write(self.fp)
1521 1526 self.systemHeaderObj.write(self.fp)
1522 1527 self.radarControllerHeaderObj.write(self.fp)
1523 1528 self.processingHeaderObj.write(self.fp)
1524 1529
1525 1530 def __setNewBlock(self):
1526 1531 """
1527 1532 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1528 1533
1529 1534 Return:
1530 1535 0 : si no pudo escribir nada
1531 1536 1 : Si escribio el Basic el First Header
1532 1537 """
1533 1538 if self.fp == None:
1534 1539 self.setNextFile()
1535 1540
1536 1541 if self.flagIsNewFile:
1537 1542 return 1
1538 1543
1539 1544 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1540 1545 self.basicHeaderObj.write(self.fp)
1541 1546 return 1
1542 1547
1543 1548 if not( self.setNextFile() ):
1544 1549 return 0
1545 1550
1546 1551 return 1
1547 1552
1548 1553
1549 1554 def writeNextBlock(self):
1550 1555 """
1551 1556 Selecciona el bloque siguiente de datos y los escribe en un file
1552 1557
1553 1558 Return:
1554 1559 0 : Si no hizo pudo escribir el bloque de datos
1555 1560 1 : Si no pudo escribir el bloque de datos
1556 1561 """
1557 1562 if not( self.__setNewBlock() ):
1558 1563 return 0
1559 1564
1560 1565 self.writeBlock()
1561 1566
1562 1567 print "[Writing] Block No. %d/%d" %(self.blockIndex,
1563 1568 self.processingHeaderObj.dataBlocksPerFile)
1564 1569
1565 1570 return 1
1566 1571
1567 1572 def setNextFile(self):
1568 1573 """
1569 1574 Determina el siguiente file que sera escrito
1570 1575
1571 1576 Affected:
1572 1577 self.filename
1573 1578 self.subfolder
1574 1579 self.fp
1575 1580 self.setFile
1576 1581 self.flagIsNewFile
1577 1582
1578 1583 Return:
1579 1584 0 : Si el archivo no puede ser escrito
1580 1585 1 : Si el archivo esta listo para ser escrito
1581 1586 """
1582 1587 ext = self.ext
1583 1588 path = self.path
1584 1589
1585 1590 if self.fp != None:
1586 1591 self.fp.close()
1587 1592
1588 1593 timeTuple = time.localtime( self.dataOut.utctime)
1589 1594 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1590 1595
1591 1596 fullpath = os.path.join( path, subfolder )
1592 1597 setFile = self.setFile
1593 1598
1594 1599 if not( os.path.exists(fullpath) ):
1595 1600 os.mkdir(fullpath)
1596 1601 setFile = -1 #inicializo mi contador de seteo
1597 1602 else:
1598 1603 filesList = os.listdir( fullpath )
1599 1604 if len( filesList ) > 0:
1600 1605 filesList = sorted( filesList, key=str.lower )
1601 1606 filen = filesList[-1]
1602 1607 # el filename debera tener el siguiente formato
1603 1608 # 0 1234 567 89A BCDE (hex)
1604 1609 # x YYYY DDD SSS .ext
1605 1610 if isNumber( filen[8:11] ):
1606 1611 setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1607 1612 else:
1608 1613 setFile = -1
1609 1614 else:
1610 1615 setFile = -1 #inicializo mi contador de seteo
1611 1616
1612 1617 setFile += 1
1613 1618
1614 1619 #If this is a new day it resets some values
1615 1620 if self.dataOut.datatime.date() > self.fileDate:
1616 1621 setFile = 0
1617 1622 self.nTotalBlocks = 0
1618 1623
1619 1624 filen = '%s%4.4d%3.3d%3.3d%s' % (self.optchar, timeTuple.tm_year, timeTuple.tm_yday, setFile, ext )
1620 1625
1621 1626 filename = os.path.join( path, subfolder, filen )
1622 1627
1623 1628 fp = open( filename,'wb' )
1624 1629
1625 1630 self.blockIndex = 0
1626 1631
1627 1632 #guardando atributos
1628 1633 self.filename = filename
1629 1634 self.subfolder = subfolder
1630 1635 self.fp = fp
1631 1636 self.setFile = setFile
1632 1637 self.flagIsNewFile = 1
1633 1638 self.fileDate = self.dataOut.datatime.date()
1634 1639
1635 1640 self.setFirstHeader()
1636 1641
1637 1642 print '[Writing] Opening file: %s'%self.filename
1638 1643
1639 1644 self.__writeFirstHeader()
1640 1645
1641 1646 return 1
1642 1647
1643 1648 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=None, ext=None, datatype=4):
1644 1649 """
1645 1650 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1646 1651
1647 1652 Inputs:
1648 1653 path : directory where data will be saved
1649 1654 profilesPerBlock : number of profiles per block
1650 1655 set : initial file set
1651 1656 datatype : An integer number that defines data type:
1652 1657 0 : int8 (1 byte)
1653 1658 1 : int16 (2 bytes)
1654 1659 2 : int32 (4 bytes)
1655 1660 3 : int64 (8 bytes)
1656 1661 4 : float32 (4 bytes)
1657 1662 5 : double64 (8 bytes)
1658 1663
1659 1664 Return:
1660 1665 0 : Si no realizo un buen seteo
1661 1666 1 : Si realizo un buen seteo
1662 1667 """
1663 1668
1664 1669 if ext == None:
1665 1670 ext = self.ext
1666 1671
1667 1672 self.ext = ext.lower()
1668 1673
1669 1674 self.path = path
1670 1675
1671 1676 if set is None:
1672 1677 self.setFile = -1
1673 1678 else:
1674 1679 self.setFile = set - 1
1675 1680
1676 1681 self.blocksPerFile = blocksPerFile
1677 1682
1678 1683 self.profilesPerBlock = profilesPerBlock
1679 1684
1680 1685 self.dataOut = dataOut
1681 1686 self.fileDate = self.dataOut.datatime.date()
1682 1687 #By default
1683 1688 self.dtype = self.dataOut.dtype
1684 1689
1685 1690 if datatype is not None:
1686 1691 self.dtype = get_numpy_dtype(datatype)
1687 1692
1688 1693 if not(self.setNextFile()):
1689 1694 print "[Writing] There isn't a next file"
1690 1695 return 0
1691 1696
1692 1697 self.setBlockDimension()
1693 1698
1694 1699 return 1
1695 1700
1696 1701 def run(self, dataOut, **kwargs):
1697 1702
1698 1703 if not(self.isConfig):
1699 1704
1700 1705 self.setup(dataOut, **kwargs)
1701 1706 self.isConfig = True
1702 1707
1703 1708 self.putData()
1704 1709
General Comments 0
You need to be logged in to leave comments. Login now