##// END OF EJS Templates
Se agrega el metodo FitsReader y el test FitsExp.py
Daniel Valdez -
r353:f28c6f8a3e79
parent child
Show More
@@ -0,0 +1,45
1 import os, sys
2
3 path = os.path.split(os.getcwd())[0]
4 sys.path.append(path)
5
6 from controller import *
7
8 desc = "FITS Test"
9 filename = "fitsexp.xml"
10
11 controllerObj = Project()
12
13 controllerObj.setup(id = '191', name='test01', description=desc)
14
15 readUnitConfObj = controllerObj.addReadUnit(datatype='Fits',
16 path='/Users/dsuarez/Remote/d2013043',
17 startDate='2013/02/06',
18 endDate='2013/12/31',
19 startTime='00:30:00',
20 endTime='17:40:59',
21 online=0,
22 delay=3,
23 walk=0)
24
25 #procUnitConfObj0 = controllerObj.addProcUnit(datatype='Voltage', inputId=readUnitConfObj.getId())
26
27 #procUnitConfObj1 = controllerObj.addProcUnit(datatype='SpectraHeis', inputId=procUnitConfObj0.getId())
28 #procUnitConfObj1.addParameter(name='timeInterval', value='5', format='int')
29
30 #opObj11 = procUnitConfObj1.addOperation(name='IncohInt4SpectraHeis', optype='other')
31 #opObj11.addParameter(name='timeInterval', value='1', format='float')
32
33
34
35
36 print "Escribiendo el archivo XML"
37 controllerObj.writeXml(filename)
38 print "Leyendo el archivo XML"
39 controllerObj.readXml(filename)
40
41 controllerObj.createObjects()
42 controllerObj.connectObjects()
43 controllerObj.run()
44
45 No newline at end of file
@@ -1,574 +1,624
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JROData.py 173 2012-11-20 15:06:21Z murco $
5 5 '''
6 6
7 7 import os, sys
8 8 import copy
9 9 import numpy
10 10 import datetime
11 11
12 12 from jroheaderIO import SystemHeader, RadarControllerHeader
13 13
14 14 def hildebrand_sekhon(data, navg):
15 15 """
16 16 This method is for the objective determination of de noise level in Doppler spectra. This
17 17 implementation technique is based on the fact that the standard deviation of the spectral
18 18 densities is equal to the mean spectral density for white Gaussian noise
19 19
20 20 Inputs:
21 21 Data : heights
22 22 navg : numbers of averages
23 23
24 24 Return:
25 25 -1 : any error
26 26 anoise : noise's level
27 27 """
28 28
29 29 dataflat = data.copy().reshape(-1)
30 30 dataflat.sort()
31 31 npts = dataflat.size #numbers of points of the data
32 32 npts_noise = 0.2*npts
33 33
34 34 if npts < 32:
35 35 print "error in noise - requires at least 32 points"
36 36 return -1.0
37 37
38 38 dataflat2 = numpy.power(dataflat,2)
39 39
40 40 cs = numpy.cumsum(dataflat)
41 41 cs2 = numpy.cumsum(dataflat2)
42 42
43 43 # data sorted in ascending order
44 44 nmin = int((npts + 7.)/8)
45 45
46 46 for i in range(nmin, npts):
47 47 s = cs[i]
48 48 s2 = cs2[i]
49 49 p = s / float(i);
50 50 p2 = p**2;
51 51 q = s2 / float(i) - p2;
52 52 leftc = p2;
53 53 rightc = q * float(navg);
54 54 R2 = leftc/rightc
55 55
56 56 # Signal detect: R2 < 1 (R2 = leftc/rightc)
57 57 if R2 < 1:
58 58 npts_noise = i
59 59 break
60 60
61 61
62 62 anoise = numpy.average(dataflat[0:npts_noise])
63 63
64 64 return anoise;
65 65
66 66 def sorting_bruce(data, navg):
67 67
68 68 data = data.copy()
69 69
70 70 sortdata = numpy.sort(data)
71 71 lenOfData = len(data)
72 72 nums_min = lenOfData/10
73 73
74 74 if (lenOfData/10) > 0:
75 75 nums_min = lenOfData/10
76 76 else:
77 77 nums_min = 0
78 78
79 79 rtest = 1.0 + 1.0/navg
80 80
81 81 sum = 0.
82 82
83 83 sumq = 0.
84 84
85 85 j = 0
86 86
87 87 cont = 1
88 88
89 89 while((cont==1)and(j<lenOfData)):
90 90
91 91 sum += sortdata[j]
92 92
93 93 sumq += sortdata[j]**2
94 94
95 95 j += 1
96 96
97 97 if j > nums_min:
98 98 if ((sumq*j) <= (rtest*sum**2)):
99 99 lnoise = sum / j
100 100 else:
101 101 j = j - 1
102 102 sum = sum - sordata[j]
103 103 sumq = sumq - sordata[j]**2
104 104 cont = 0
105 105
106 106 if j == nums_min:
107 107 lnoise = sum /j
108 108
109 109 return lnoise
110 110
111 111 class JROData:
112 112
113 113 # m_BasicHeader = BasicHeader()
114 114 # m_ProcessingHeader = ProcessingHeader()
115 115
116 116 systemHeaderObj = SystemHeader()
117 117
118 118 radarControllerHeaderObj = RadarControllerHeader()
119 119
120 120 # data = None
121 121
122 122 type = None
123 123
124 124 dtype = None
125 125
126 126 # nChannels = None
127 127
128 128 # nHeights = None
129 129
130 130 nProfiles = None
131 131
132 132 heightList = None
133 133
134 134 channelList = None
135 135
136 136 flagNoData = True
137 137
138 138 flagTimeBlock = False
139 139
140 140 useLocalTime = False
141 141
142 142 utctime = None
143 143
144 144 timeZone = None
145 145
146 146 dstFlag = None
147 147
148 148 errorCount = None
149 149
150 150 blocksize = None
151 151
152 152 nCode = None
153 153
154 154 nBaud = None
155 155
156 156 code = None
157 157
158 158 flagDecodeData = False #asumo q la data no esta decodificada
159 159
160 160 flagDeflipData = False #asumo q la data no esta sin flip
161 161
162 162 flagShiftFFT = False
163 163
164 164 ippSeconds = None
165 165
166 166 timeInterval = None
167 167
168 168 nCohInt = None
169 169
170 170 noise = None
171 171
172 172 windowOfFilter = 1
173 173
174 174 #Speed of ligth
175 175 C = 3e8
176 176
177 177 frequency = 49.92e6
178 178
179 179 def __init__(self):
180 180
181 181 raise ValueError, "This class has not been implemented"
182 182
183 183 def copy(self, inputObj=None):
184 184
185 185 if inputObj == None:
186 186 return copy.deepcopy(self)
187 187
188 188 for key in inputObj.__dict__.keys():
189 189 self.__dict__[key] = inputObj.__dict__[key]
190 190
191 191 def deepcopy(self):
192 192
193 193 return copy.deepcopy(self)
194 194
195 195 def isEmpty(self):
196 196
197 197 return self.flagNoData
198 198
199 199 def getNoise(self):
200 200
201 201 raise ValueError, "Not implemented"
202 202
203 203 def getNChannels(self):
204 204
205 205 return len(self.channelList)
206 206
207 207 def getChannelIndexList(self):
208 208
209 209 return range(self.nChannels)
210 210
211 211 def getNHeights(self):
212 212
213 213 return len(self.heightList)
214 214
215 215 def getHeiRange(self, extrapoints=0):
216 216
217 217 heis = self.heightList
218 218 # deltah = self.heightList[1] - self.heightList[0]
219 219 #
220 220 # heis.append(self.heightList[-1])
221 221
222 222 return heis
223 223
224 224 def getltctime(self):
225 225
226 226 if self.useLocalTime:
227 227 return self.utctime - self.timeZone*60
228 228
229 229 return self.utctime
230 230
231 231 def getDatatime(self):
232 232
233 233 datatime = datetime.datetime.utcfromtimestamp(self.ltctime)
234 234 return datatime
235 235
236 236 def getTimeRange(self):
237 237
238 238 datatime = []
239 239
240 240 datatime.append(self.ltctime)
241 241 datatime.append(self.ltctime + self.timeInterval)
242 242
243 243 datatime = numpy.array(datatime)
244 244
245 245 return datatime
246 246
247 247 def getFmax(self):
248 248
249 249 PRF = 1./(self.ippSeconds * self.nCohInt)
250 250
251 251 fmax = PRF/2.
252 252
253 253 return fmax
254 254
255 255 def getVmax(self):
256 256
257 257 _lambda = self.C/self.frequency
258 258
259 259 vmax = self.getFmax() * _lambda
260 260
261 261 return vmax
262 262
263 263 nChannels = property(getNChannels, "I'm the 'nChannel' property.")
264 264 channelIndexList = property(getChannelIndexList, "I'm the 'channelIndexList' property.")
265 265 nHeights = property(getNHeights, "I'm the 'nHeights' property.")
266 266 noise = property(getNoise, "I'm the 'nHeights' property.")
267 267 datatime = property(getDatatime, "I'm the 'datatime' property")
268 268 ltctime = property(getltctime, "I'm the 'ltctime' property")
269 269
270 270 class Voltage(JROData):
271 271
272 272 #data es un numpy array de 2 dmensiones (canales, alturas)
273 273 data = None
274 274
275 275 def __init__(self):
276 276 '''
277 277 Constructor
278 278 '''
279 279
280 280 self.radarControllerHeaderObj = RadarControllerHeader()
281 281
282 282 self.systemHeaderObj = SystemHeader()
283 283
284 284 self.type = "Voltage"
285 285
286 286 self.data = None
287 287
288 288 self.dtype = None
289 289
290 290 # self.nChannels = 0
291 291
292 292 # self.nHeights = 0
293 293
294 294 self.nProfiles = None
295 295
296 296 self.heightList = None
297 297
298 298 self.channelList = None
299 299
300 300 # self.channelIndexList = None
301 301
302 302 self.flagNoData = True
303 303
304 304 self.flagTimeBlock = False
305 305
306 306 self.utctime = None
307 307
308 308 self.timeZone = None
309 309
310 310 self.dstFlag = None
311 311
312 312 self.errorCount = None
313 313
314 314 self.nCohInt = None
315 315
316 316 self.blocksize = None
317 317
318 318 self.flagDecodeData = False #asumo q la data no esta decodificada
319 319
320 320 self.flagDeflipData = False #asumo q la data no esta sin flip
321 321
322 322 self.flagShiftFFT = False
323 323
324 324
325 325 def getNoisebyHildebrand(self):
326 326 """
327 327 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
328 328
329 329 Return:
330 330 noiselevel
331 331 """
332 332
333 333 for channel in range(self.nChannels):
334 334 daux = self.data_spc[channel,:,:]
335 335 self.noise[channel] = hildebrand_sekhon(daux, self.nCohInt)
336 336
337 337 return self.noise
338 338
339 339 def getNoise(self, type = 1):
340 340
341 341 self.noise = numpy.zeros(self.nChannels)
342 342
343 343 if type == 1:
344 344 noise = self.getNoisebyHildebrand()
345 345
346 346 return 10*numpy.log10(noise)
347 347
348 348 class Spectra(JROData):
349 349
350 350 #data es un numpy array de 2 dmensiones (canales, perfiles, alturas)
351 351 data_spc = None
352 352
353 353 #data es un numpy array de 2 dmensiones (canales, pares, alturas)
354 354 data_cspc = None
355 355
356 356 #data es un numpy array de 2 dmensiones (canales, alturas)
357 357 data_dc = None
358 358
359 359 nFFTPoints = None
360 360
361 361 nPairs = None
362 362
363 363 pairsList = None
364 364
365 365 nIncohInt = None
366 366
367 367 wavelength = None #Necesario para cacular el rango de velocidad desde la frecuencia
368 368
369 369 nCohInt = None #se requiere para determinar el valor de timeInterval
370 370
371 371 def __init__(self):
372 372 '''
373 373 Constructor
374 374 '''
375 375
376 376 self.radarControllerHeaderObj = RadarControllerHeader()
377 377
378 378 self.systemHeaderObj = SystemHeader()
379 379
380 380 self.type = "Spectra"
381 381
382 382 # self.data = None
383 383
384 384 self.dtype = None
385 385
386 386 # self.nChannels = 0
387 387
388 388 # self.nHeights = 0
389 389
390 390 self.nProfiles = None
391 391
392 392 self.heightList = None
393 393
394 394 self.channelList = None
395 395
396 396 # self.channelIndexList = None
397 397
398 398 self.flagNoData = True
399 399
400 400 self.flagTimeBlock = False
401 401
402 402 self.utctime = None
403 403
404 404 self.nCohInt = None
405 405
406 406 self.nIncohInt = None
407 407
408 408 self.blocksize = None
409 409
410 410 self.nFFTPoints = None
411 411
412 412 self.wavelength = None
413 413
414 414 self.flagDecodeData = False #asumo q la data no esta decodificada
415 415
416 416 self.flagDeflipData = False #asumo q la data no esta sin flip
417 417
418 418 self.flagShiftFFT = False
419 419
420 420 def getNoisebyHildebrand(self):
421 421 """
422 422 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
423 423
424 424 Return:
425 425 noiselevel
426 426 """
427 427
428 428 for channel in range(self.nChannels):
429 429 daux = self.data_spc[channel,:,:]
430 430 self.noise[channel] = hildebrand_sekhon(daux, self.nIncohInt)
431 431
432 432 return self.noise
433 433
434 434 def getNoisebyWindow(self, heiIndexMin=0, heiIndexMax=-1, freqIndexMin=0, freqIndexMax=-1):
435 435 """
436 436 Determina el ruido del canal utilizando la ventana indicada con las coordenadas:
437 437 (heiIndexMIn, freqIndexMin) hasta (heiIndexMax, freqIndexMAx)
438 438
439 439 Inputs:
440 440 heiIndexMin: Limite inferior del eje de alturas
441 441 heiIndexMax: Limite superior del eje de alturas
442 442 freqIndexMin: Limite inferior del eje de frecuencia
443 443 freqIndexMax: Limite supoerior del eje de frecuencia
444 444 """
445 445
446 446 data = self.data_spc[:, heiIndexMin:heiIndexMax, freqIndexMin:freqIndexMax]
447 447
448 448 for channel in range(self.nChannels):
449 449 daux = data[channel,:,:]
450 450 self.noise[channel] = numpy.average(daux)
451 451
452 452 return self.noise
453 453
454 454 def getNoisebySort(self):
455 455
456 456 for channel in range(self.nChannels):
457 457 daux = self.data_spc[channel,:,:]
458 458 self.noise[channel] = sorting_bruce(daux, self.nIncohInt)
459 459
460 460 return self.noise
461 461
462 462 def getNoise(self, type = 1):
463 463
464 464 self.noise = numpy.zeros(self.nChannels)
465 465
466 466 if type == 1:
467 467 noise = self.getNoisebyHildebrand()
468 468
469 469 if type == 2:
470 470 noise = self.getNoisebySort()
471 471
472 472 if type == 3:
473 473 noise = self.getNoisebyWindow()
474 474
475 475 return noise
476 476
477 477
478 478 def getFreqRange(self, extrapoints=0):
479 479
480 480 deltafreq = self.getFmax() / self.nFFTPoints
481 481 freqrange = deltafreq*(numpy.arange(self.nFFTPoints+extrapoints)-self.nFFTPoints/2.) - deltafreq/2
482 482
483 483 return freqrange
484 484
485 485 def getVelRange(self, extrapoints=0):
486 486
487 487 deltav = self.getVmax() / self.nFFTPoints
488 488 velrange = deltav*(numpy.arange(self.nFFTPoints+extrapoints)-self.nFFTPoints/2.) - deltav/2
489 489
490 490 return velrange
491 491
492 492 def getNPairs(self):
493 493
494 494 return len(self.pairsList)
495 495
496 496 def getPairsIndexList(self):
497 497
498 498 return range(self.nPairs)
499 499
500 500 def getNormFactor(self):
501 501 pwcode = 1
502 502 if self.flagDecodeData:
503 503 pwcode = numpy.sum(self.code[0]**2)
504 504 normFactor = min(self.nFFTPoints,self.nProfiles)*self.nIncohInt*self.nCohInt*pwcode
505 505
506 506 return normFactor
507 507
508 508 def getFlagCspc(self):
509 509
510 510 if self.data_cspc == None:
511 511 return True
512 512
513 513 return False
514 514
515 515 def getFlagDc(self):
516 516
517 517 if self.data_dc == None:
518 518 return True
519 519
520 520 return False
521 521
522 522 nPairs = property(getNPairs, "I'm the 'nPairs' property.")
523 523 pairsIndexList = property(getPairsIndexList, "I'm the 'pairsIndexList' property.")
524 524 normFactor = property(getNormFactor, "I'm the 'getNormFactor' property.")
525 525 flag_cspc = property(getFlagCspc)
526 526 flag_dc = property(getFlagDc)
527 527
528 528 class SpectraHeis(JROData):
529 529
530 530 data_spc = None
531 531
532 532 data_cspc = None
533 533
534 534 data_dc = None
535 535
536 536 nFFTPoints = None
537 537
538 538 nPairs = None
539 539
540 540 pairsList = None
541 541
542 542 nIncohInt = None
543 543
544 544 def __init__(self):
545 545
546 546 self.radarControllerHeaderObj = RadarControllerHeader()
547 547
548 548 self.systemHeaderObj = SystemHeader()
549 549
550 550 self.type = "SpectraHeis"
551 551
552 552 self.dtype = None
553 553
554 554 # self.nChannels = 0
555 555
556 556 # self.nHeights = 0
557 557
558 558 self.nProfiles = None
559 559
560 560 self.heightList = None
561 561
562 562 self.channelList = None
563 563
564 564 # self.channelIndexList = None
565 565
566 566 self.flagNoData = True
567 567
568 568 self.flagTimeBlock = False
569 569
570 570 self.nPairs = 0
571 571
572 572 self.utctime = None
573 573
574 574 self.blocksize = None
575
576 class Fits:
577
578 def __init__(self):
579 self.useLocalTime = False
580 self.utctime = None
581 self.timeZone = None
582 self.ltctime = None
583 self.timeInterval = None
584 self.header = None
585 self.data_header = None
586 self.data = None
587 self.datatime = None
588 self.flagNoData = False
589 self.expName = ''
590 self.nChannels = None
591 self.nSamples = None
592 self.dataBlocksPerFile = None
593 self.comments = ''
594
595
596 def getltctime(self):
597
598 if self.useLocalTime:
599 return self.utctime - self.timeZone*60
600
601 return self.utctime
602
603 def getDatatime(self):
604
605 datatime = datetime.datetime.utcfromtimestamp(self.ltctime)
606 return datatime
607
608 def getTimeRange(self):
609
610 datatime = []
611
612 datatime.append(self.ltctime)
613 datatime.append(self.ltctime + self.timeInterval)
614
615 datatime = numpy.array(datatime)
616
617 return datatime
618
619 def isEmpty(self):
620
621 return self.flagNoData
622
623 datatime = property(getDatatime, "I'm the 'datatime' property")
624 ltctime = property(getltctime, "I'm the 'ltctime' property") No newline at end of file
@@ -1,2921 +1,3313
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JRODataIO.py 169 2012-11-19 21:57:03Z murco $
5 5 '''
6 6
7 7 import os, sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import time, datetime
13 13 from xml.etree.ElementTree import Element, SubElement, ElementTree
14 14 try:
15 15 import pyfits
16 16 except:
17 17 print "pyfits module has not been imported, it should be installed to save files in fits format"
18 18
19 19 from jrodata import *
20 20 from jroheaderIO import *
21 21 from jroprocessing import *
22 22
23 23 LOCALTIME = True #-18000
24 24
25 25 def isNumber(str):
26 26 """
27 27 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
28 28
29 29 Excepciones:
30 30 Si un determinado string no puede ser convertido a numero
31 31 Input:
32 32 str, string al cual se le analiza para determinar si convertible a un numero o no
33 33
34 34 Return:
35 35 True : si el string es uno numerico
36 36 False : no es un string numerico
37 37 """
38 38 try:
39 39 float( str )
40 40 return True
41 41 except:
42 42 return False
43 43
44 44 def isThisFileinRange(filename, startUTSeconds, endUTSeconds):
45 45 """
46 46 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
47 47
48 48 Inputs:
49 49 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
50 50
51 51 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
52 52 segundos contados desde 01/01/1970.
53 53 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
54 54 segundos contados desde 01/01/1970.
55 55
56 56 Return:
57 57 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
58 58 fecha especificado, de lo contrario retorna False.
59 59
60 60 Excepciones:
61 61 Si el archivo no existe o no puede ser abierto
62 62 Si la cabecera no puede ser leida.
63 63
64 64 """
65 65 basicHeaderObj = BasicHeader(LOCALTIME)
66 66
67 67 try:
68 68 fp = open(filename,'rb')
69 69 except:
70 70 raise IOError, "The file %s can't be opened" %(filename)
71 71
72 72 sts = basicHeaderObj.read(fp)
73 73 fp.close()
74 74
75 75 if not(sts):
76 76 print "Skipping the file %s because it has not a valid header" %(filename)
77 77 return 0
78 78
79 79 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
80 80 return 0
81 81
82 82 return 1
83 83
84 84 def isFileinThisTime(filename, startTime, endTime):
85 85 """
86 86 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
87 87
88 88 Inputs:
89 89 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
90 90
91 91 startTime : tiempo inicial del rango seleccionado en formato datetime.time
92 92
93 93 endTime : tiempo final del rango seleccionado en formato datetime.time
94 94
95 95 Return:
96 96 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
97 97 fecha especificado, de lo contrario retorna False.
98 98
99 99 Excepciones:
100 100 Si el archivo no existe o no puede ser abierto
101 101 Si la cabecera no puede ser leida.
102 102
103 103 """
104 104
105 105
106 106 try:
107 107 fp = open(filename,'rb')
108 108 except:
109 109 raise IOError, "The file %s can't be opened" %(filename)
110 110
111 111 basicHeaderObj = BasicHeader(LOCALTIME)
112 112 sts = basicHeaderObj.read(fp)
113 113 fp.close()
114 114
115 115 thisDatetime = basicHeaderObj.datatime
116 116 thisTime = basicHeaderObj.datatime.time()
117 117
118 118 if not(sts):
119 119 print "Skipping the file %s because it has not a valid header" %(filename)
120 120 return None
121 121
122 122 if not ((startTime <= thisTime) and (endTime > thisTime)):
123 123 return None
124 124
125 125 return thisDatetime
126 126
127 127 def getlastFileFromPath(path, ext):
128 128 """
129 129 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
130 130 al final de la depuracion devuelve el ultimo file de la lista que quedo.
131 131
132 132 Input:
133 133 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
134 134 ext : extension de los files contenidos en una carpeta
135 135
136 136 Return:
137 137 El ultimo file de una determinada carpeta, no se considera el path.
138 138 """
139 139 validFilelist = []
140 140 fileList = os.listdir(path)
141 141
142 142 # 0 1234 567 89A BCDE
143 143 # H YYYY DDD SSS .ext
144 144
145 145 for file in fileList:
146 146 try:
147 147 year = int(file[1:5])
148 148 doy = int(file[5:8])
149 149
150 150
151 151 except:
152 152 continue
153 153
154 154 if (os.path.splitext(file)[-1].lower() != ext.lower()):
155 155 continue
156 156
157 157 validFilelist.append(file)
158 158
159 159 if validFilelist:
160 160 validFilelist = sorted( validFilelist, key=str.lower )
161 161 return validFilelist[-1]
162 162
163 163 return None
164 164
165 165 def checkForRealPath(path, foldercounter, year, doy, set, ext):
166 166 """
167 167 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
168 168 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
169 169 el path exacto de un determinado file.
170 170
171 171 Example :
172 172 nombre correcto del file es .../.../D2009307/P2009307367.ext
173 173
174 174 Entonces la funcion prueba con las siguientes combinaciones
175 175 .../.../y2009307367.ext
176 176 .../.../Y2009307367.ext
177 177 .../.../x2009307/y2009307367.ext
178 178 .../.../x2009307/Y2009307367.ext
179 179 .../.../X2009307/y2009307367.ext
180 180 .../.../X2009307/Y2009307367.ext
181 181 siendo para este caso, la ultima combinacion de letras, identica al file buscado
182 182
183 183 Return:
184 184 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
185 185 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
186 186 para el filename
187 187 """
188 188 fullfilename = None
189 189 find_flag = False
190 190 filename = None
191 191
192 192 prefixDirList = [None,'d','D']
193 193 if ext.lower() == ".r": #voltage
194 194 prefixFileList = ['d','D']
195 195 elif ext.lower() == ".pdata": #spectra
196 196 prefixFileList = ['p','P']
197 197 else:
198 198 return None, filename
199 199
200 200 #barrido por las combinaciones posibles
201 201 for prefixDir in prefixDirList:
202 202 thispath = path
203 203 if prefixDir != None:
204 204 #formo el nombre del directorio xYYYYDDD (x=d o x=D)
205 205 if foldercounter == 0:
206 206 thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy ))
207 207 else:
208 208 thispath = os.path.join(path, "%s%04d%03d_%02d" % ( prefixDir, year, doy , foldercounter))
209 209 for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D"
210 210 filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
211 211 fullfilename = os.path.join( thispath, filename ) #formo el path completo
212 212
213 213 if os.path.exists( fullfilename ): #verifico que exista
214 214 find_flag = True
215 215 break
216 216 if find_flag:
217 217 break
218 218
219 219 if not(find_flag):
220 220 return None, filename
221 221
222 222 return fullfilename, filename
223 223
224 224 def isDoyFolder(folder):
225 225 try:
226 226 year = int(folder[1:5])
227 227 except:
228 228 return 0
229 229
230 230 try:
231 231 doy = int(folder[5:8])
232 232 except:
233 233 return 0
234 234
235 235 return 1
236 236
237 237 class JRODataIO:
238 238
239 239 c = 3E8
240 240
241 241 isConfig = False
242 242
243 243 basicHeaderObj = BasicHeader(LOCALTIME)
244 244
245 245 systemHeaderObj = SystemHeader()
246 246
247 247 radarControllerHeaderObj = RadarControllerHeader()
248 248
249 249 processingHeaderObj = ProcessingHeader()
250 250
251 251 online = 0
252 252
253 253 dtype = None
254 254
255 255 pathList = []
256 256
257 257 filenameList = []
258 258
259 259 filename = None
260 260
261 261 ext = None
262 262
263 263 flagIsNewFile = 1
264 264
265 265 flagTimeBlock = 0
266 266
267 267 flagIsNewBlock = 0
268 268
269 269 fp = None
270 270
271 271 firstHeaderSize = 0
272 272
273 273 basicHeaderSize = 24
274 274
275 275 versionFile = 1103
276 276
277 277 fileSize = None
278 278
279 279 ippSeconds = None
280 280
281 281 fileSizeByHeader = None
282 282
283 283 fileIndex = None
284 284
285 285 profileIndex = None
286 286
287 287 blockIndex = None
288 288
289 289 nTotalBlocks = None
290 290
291 291 maxTimeStep = 30
292 292
293 293 lastUTTime = None
294 294
295 295 datablock = None
296 296
297 297 dataOut = None
298 298
299 299 blocksize = None
300 300
301 301 def __init__(self):
302 302
303 303 raise ValueError, "Not implemented"
304 304
305 305 def run(self):
306 306
307 307 raise ValueError, "Not implemented"
308 308
309 309 def getOutput(self):
310 310
311 311 return self.dataOut
312 312
313 313 class JRODataReader(JRODataIO, ProcessingUnit):
314 314
315 315 nReadBlocks = 0
316 316
317 317 delay = 10 #number of seconds waiting a new file
318 318
319 319 nTries = 3 #quantity tries
320 320
321 321 nFiles = 3 #number of files for searching
322 322
323 323 path = None
324 324
325 325 foldercounter = 0
326 326
327 327 flagNoMoreFiles = 0
328 328
329 329 datetimeList = []
330 330
331 331 __isFirstTimeOnline = 1
332 332
333 333 __printInfo = True
334 334
335 335 profileIndex = None
336 336
337 337 def __init__(self):
338 338
339 339 """
340 340
341 341 """
342 342
343 343 raise ValueError, "This method has not been implemented"
344 344
345 345
346 346 def createObjByDefault(self):
347 347 """
348 348
349 349 """
350 350 raise ValueError, "This method has not been implemented"
351 351
352 352 def getBlockDimension(self):
353 353
354 354 raise ValueError, "No implemented"
355 355
356 356 def __searchFilesOffLine(self,
357 357 path,
358 358 startDate,
359 359 endDate,
360 360 startTime=datetime.time(0,0,0),
361 361 endTime=datetime.time(23,59,59),
362 362 set=None,
363 363 expLabel='',
364 364 ext='.r',
365 365 walk=True):
366 366
367 367 pathList = []
368 368
369 369 if not walk:
370 370 pathList.append(path)
371 371
372 372 else:
373 373 dirList = []
374 374 for thisPath in os.listdir(path):
375 375 if not os.path.isdir(os.path.join(path,thisPath)):
376 376 continue
377 377 if not isDoyFolder(thisPath):
378 378 continue
379 379
380 380 dirList.append(thisPath)
381 381
382 382 if not(dirList):
383 383 return None, None
384 384
385 385 thisDate = startDate
386 386
387 387 while(thisDate <= endDate):
388 388 year = thisDate.timetuple().tm_year
389 389 doy = thisDate.timetuple().tm_yday
390 390
391 391 matchlist = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy) + '*')
392 392 if len(matchlist) == 0:
393 393 thisDate += datetime.timedelta(1)
394 394 continue
395 395 for match in matchlist:
396 396 pathList.append(os.path.join(path,match,expLabel))
397 397
398 398 thisDate += datetime.timedelta(1)
399 399
400 400 if pathList == []:
401 401 print "Any folder was found for the date range: %s-%s" %(startDate, endDate)
402 402 return None, None
403 403
404 404 print "%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate)
405 405
406 406 filenameList = []
407 407 datetimeList = []
408 408
409 409 for i in range(len(pathList)):
410 410
411 411 thisPath = pathList[i]
412 412
413 413 fileList = glob.glob1(thisPath, "*%s" %ext)
414 414 fileList.sort()
415 415
416 416 for file in fileList:
417 417
418 418 filename = os.path.join(thisPath,file)
419 419 thisDatetime = isFileinThisTime(filename, startTime, endTime)
420 420
421 421 if not(thisDatetime):
422 422 continue
423 423
424 424 filenameList.append(filename)
425 425 datetimeList.append(thisDatetime)
426 426
427 427 if not(filenameList):
428 428 print "Any file was found for the time range %s - %s" %(startTime, endTime)
429 429 return None, None
430 430
431 431 print "%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime)
432 432 print
433 433
434 434 for i in range(len(filenameList)):
435 435 print "%s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
436 436
437 437 self.filenameList = filenameList
438 438 self.datetimeList = datetimeList
439 439
440 440 return pathList, filenameList
441 441
442 442 def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True):
443 443
444 444 """
445 445 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
446 446 devuelve el archivo encontrado ademas de otros datos.
447 447
448 448 Input:
449 449 path : carpeta donde estan contenidos los files que contiene data
450 450
451 451 expLabel : Nombre del subexperimento (subfolder)
452 452
453 453 ext : extension de los files
454 454
455 455 walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath)
456 456
457 457 Return:
458 458 directory : eL directorio donde esta el file encontrado
459 459 filename : el ultimo file de una determinada carpeta
460 460 year : el anho
461 461 doy : el numero de dia del anho
462 462 set : el set del archivo
463 463
464 464
465 465 """
466 466 dirList = []
467 467
468 468 if not walk:
469 469 fullpath = path
470 470
471 471 else:
472 472 #Filtra solo los directorios
473 473 for thisPath in os.listdir(path):
474 474 if not os.path.isdir(os.path.join(path,thisPath)):
475 475 continue
476 476 if not isDoyFolder(thisPath):
477 477 continue
478 478
479 479 dirList.append(thisPath)
480 480
481 481 if not(dirList):
482 482 return None, None, None, None, None
483 483
484 484 dirList = sorted( dirList, key=str.lower )
485 485
486 486 doypath = dirList[-1]
487 487 foldercounter = int(doypath.split('_')[1]) if len(doypath.split('_'))>1 else 0
488 488 fullpath = os.path.join(path, doypath, expLabel)
489 489
490 490
491 491 print "%s folder was found: " %(fullpath )
492 492
493 493 filename = getlastFileFromPath(fullpath, ext)
494 494
495 495 if not(filename):
496 496 return None, None, None, None, None
497 497
498 498 print "%s file was found" %(filename)
499 499
500 500 if not(self.__verifyFile(os.path.join(fullpath, filename))):
501 501 return None, None, None, None, None
502 502
503 503 year = int( filename[1:5] )
504 504 doy = int( filename[5:8] )
505 505 set = int( filename[8:11] )
506 506
507 507 return fullpath, foldercounter, filename, year, doy, set
508 508
509 509 def __setNextFileOffline(self):
510 510
511 511 idFile = self.fileIndex
512 512
513 513 while (True):
514 514 idFile += 1
515 515 if not(idFile < len(self.filenameList)):
516 516 self.flagNoMoreFiles = 1
517 517 print "No more Files"
518 518 return 0
519 519
520 520 filename = self.filenameList[idFile]
521 521
522 522 if not(self.__verifyFile(filename)):
523 523 continue
524 524
525 525 fileSize = os.path.getsize(filename)
526 526 fp = open(filename,'rb')
527 527 break
528 528
529 529 self.flagIsNewFile = 1
530 530 self.fileIndex = idFile
531 531 self.filename = filename
532 532 self.fileSize = fileSize
533 533 self.fp = fp
534 534
535 535 print "Setting the file: %s"%self.filename
536 536
537 537 return 1
538 538
539 539 def __setNextFileOnline(self):
540 540 """
541 541 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
542 542 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
543 543 siguientes.
544 544
545 545 Affected:
546 546 self.flagIsNewFile
547 547 self.filename
548 548 self.fileSize
549 549 self.fp
550 550 self.set
551 551 self.flagNoMoreFiles
552 552
553 553 Return:
554 554 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
555 555 1 : si el file fue abierto con exito y esta listo a ser leido
556 556
557 557 Excepciones:
558 558 Si un determinado file no puede ser abierto
559 559 """
560 560 nFiles = 0
561 561 fileOk_flag = False
562 562 firstTime_flag = True
563 563
564 564 self.set += 1
565 565
566 566 if self.set > 999:
567 567 self.set = 0
568 568 self.foldercounter += 1
569 569
570 570 #busca el 1er file disponible
571 571 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
572 572 if fullfilename:
573 573 if self.__verifyFile(fullfilename, False):
574 574 fileOk_flag = True
575 575
576 576 #si no encuentra un file entonces espera y vuelve a buscar
577 577 if not(fileOk_flag):
578 578 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
579 579
580 580 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
581 581 tries = self.nTries
582 582 else:
583 583 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
584 584
585 585 for nTries in range( tries ):
586 586 if firstTime_flag:
587 587 print "\tWaiting %0.2f sec for the file \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
588 588 time.sleep( self.delay )
589 589 else:
590 590 print "\tSearching next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
591 591
592 592 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
593 593 if fullfilename:
594 594 if self.__verifyFile(fullfilename):
595 595 fileOk_flag = True
596 596 break
597 597
598 598 if fileOk_flag:
599 599 break
600 600
601 601 firstTime_flag = False
602 602
603 603 print "\tSkipping the file \"%s\" due to this file doesn't exist" % filename
604 604 self.set += 1
605 605
606 606 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
607 607 self.set = 0
608 608 self.doy += 1
609 609 self.foldercounter = 0
610 610
611 611 if fileOk_flag:
612 612 self.fileSize = os.path.getsize( fullfilename )
613 613 self.filename = fullfilename
614 614 self.flagIsNewFile = 1
615 615 if self.fp != None: self.fp.close()
616 616 self.fp = open(fullfilename, 'rb')
617 617 self.flagNoMoreFiles = 0
618 618 print 'Setting the file: %s' % fullfilename
619 619 else:
620 620 self.fileSize = 0
621 621 self.filename = None
622 622 self.flagIsNewFile = 0
623 623 self.fp = None
624 624 self.flagNoMoreFiles = 1
625 625 print 'No more Files'
626 626
627 627 return fileOk_flag
628 628
629 629
630 630 def setNextFile(self):
631 631 if self.fp != None:
632 632 self.fp.close()
633 633
634 634 if self.online:
635 635 newFile = self.__setNextFileOnline()
636 636 else:
637 637 newFile = self.__setNextFileOffline()
638 638
639 639 if not(newFile):
640 640 return 0
641 641
642 642 self.__readFirstHeader()
643 643 self.nReadBlocks = 0
644 644 return 1
645 645
646 646 def __waitNewBlock(self):
647 647 """
648 648 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
649 649
650 650 Si el modo de lectura es OffLine siempre retorn 0
651 651 """
652 652 if not self.online:
653 653 return 0
654 654
655 655 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
656 656 return 0
657 657
658 658 currentPointer = self.fp.tell()
659 659
660 660 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
661 661
662 662 for nTries in range( self.nTries ):
663 663
664 664 self.fp.close()
665 665 self.fp = open( self.filename, 'rb' )
666 666 self.fp.seek( currentPointer )
667 667
668 668 self.fileSize = os.path.getsize( self.filename )
669 669 currentSize = self.fileSize - currentPointer
670 670
671 671 if ( currentSize >= neededSize ):
672 672 self.__rdBasicHeader()
673 673 return 1
674 674
675 675 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
676 676 time.sleep( self.delay )
677 677
678 678
679 679 return 0
680 680
681 681 def __jumpToLastBlock(self):
682 682
683 683 if not(self.__isFirstTimeOnline):
684 684 return
685 685
686 686 csize = self.fileSize - self.fp.tell()
687 687
688 688 #sata el primer bloque de datos
689 689 if csize > self.processingHeaderObj.blockSize:
690 690 self.fp.seek(self.fp.tell() + self.processingHeaderObj.blockSize)
691 691 else:
692 692 return
693 693
694 694 csize = self.fileSize - self.fp.tell()
695 695 neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize
696 696 factor = int(csize/neededsize)
697 697 if factor > 0:
698 698 self.fp.seek(self.fp.tell() + factor*neededsize)
699 699
700 700 self.flagIsNewFile = 0
701 701 self.__isFirstTimeOnline = 0
702 702
703 703
704 704 def __setNewBlock(self):
705 705
706 706 if self.fp == None:
707 707 return 0
708 708
709 709 if self.online:
710 710 self.__jumpToLastBlock()
711 711
712 712 if self.flagIsNewFile:
713 713 return 1
714 714
715 715 self.lastUTTime = self.basicHeaderObj.utc
716 716 currentSize = self.fileSize - self.fp.tell()
717 717 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
718 718
719 719 if (currentSize >= neededSize):
720 720 self.__rdBasicHeader()
721 721 return 1
722 722
723 723 if self.__waitNewBlock():
724 724 return 1
725 725
726 726 if not(self.setNextFile()):
727 727 return 0
728 728
729 729 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
730 730
731 731 self.flagTimeBlock = 0
732 732
733 733 if deltaTime > self.maxTimeStep:
734 734 self.flagTimeBlock = 1
735 735
736 736 return 1
737 737
738 738
739 739 def readNextBlock(self):
740 740 if not(self.__setNewBlock()):
741 741 return 0
742 742
743 743 if not(self.readBlock()):
744 744 return 0
745 745
746 746 return 1
747 747
748 748 def __rdProcessingHeader(self, fp=None):
749 749 if fp == None:
750 750 fp = self.fp
751 751
752 752 self.processingHeaderObj.read(fp)
753 753
754 754 def __rdRadarControllerHeader(self, fp=None):
755 755 if fp == None:
756 756 fp = self.fp
757 757
758 758 self.radarControllerHeaderObj.read(fp)
759 759
760 760 def __rdSystemHeader(self, fp=None):
761 761 if fp == None:
762 762 fp = self.fp
763 763
764 764 self.systemHeaderObj.read(fp)
765 765
766 766 def __rdBasicHeader(self, fp=None):
767 767 if fp == None:
768 768 fp = self.fp
769 769
770 770 self.basicHeaderObj.read(fp)
771 771
772 772
773 773 def __readFirstHeader(self):
774 774 self.__rdBasicHeader()
775 775 self.__rdSystemHeader()
776 776 self.__rdRadarControllerHeader()
777 777 self.__rdProcessingHeader()
778 778
779 779 self.firstHeaderSize = self.basicHeaderObj.size
780 780
781 781 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
782 782 if datatype == 0:
783 783 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
784 784 elif datatype == 1:
785 785 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
786 786 elif datatype == 2:
787 787 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
788 788 elif datatype == 3:
789 789 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
790 790 elif datatype == 4:
791 791 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
792 792 elif datatype == 5:
793 793 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
794 794 else:
795 795 raise ValueError, 'Data type was not defined'
796 796
797 797 self.dtype = datatype_str
798 798 self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
799 799 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
800 800 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
801 801 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
802 802 self.getBlockDimension()
803 803
804 804
805 805 def __verifyFile(self, filename, msgFlag=True):
806 806 msg = None
807 807 try:
808 808 fp = open(filename, 'rb')
809 809 currentPosition = fp.tell()
810 810 except:
811 811 if msgFlag:
812 812 print "The file %s can't be opened" % (filename)
813 813 return False
814 814
815 815 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
816 816
817 817 if neededSize == 0:
818 818 basicHeaderObj = BasicHeader(LOCALTIME)
819 819 systemHeaderObj = SystemHeader()
820 820 radarControllerHeaderObj = RadarControllerHeader()
821 821 processingHeaderObj = ProcessingHeader()
822 822
823 823 try:
824 824 if not( basicHeaderObj.read(fp) ): raise IOError
825 825 if not( systemHeaderObj.read(fp) ): raise IOError
826 826 if not( radarControllerHeaderObj.read(fp) ): raise IOError
827 827 if not( processingHeaderObj.read(fp) ): raise IOError
828 828 data_type = int(numpy.log2((processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
829 829
830 830 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
831 831
832 832 except:
833 833 if msgFlag:
834 834 print "\tThe file %s is empty or it hasn't enough data" % filename
835 835
836 836 fp.close()
837 837 return False
838 838 else:
839 839 msg = "\tSkipping the file %s due to it hasn't enough data" %filename
840 840
841 841 fp.close()
842 842 fileSize = os.path.getsize(filename)
843 843 currentSize = fileSize - currentPosition
844 844 if currentSize < neededSize:
845 845 if msgFlag and (msg != None):
846 846 print msg #print"\tSkipping the file %s due to it hasn't enough data" %filename
847 847 return False
848 848
849 849 return True
850 850
851 851 def setup(self,
852 852 path=None,
853 853 startDate=None,
854 854 endDate=None,
855 855 startTime=datetime.time(0,0,0),
856 856 endTime=datetime.time(23,59,59),
857 857 set=0,
858 858 expLabel = "",
859 859 ext = None,
860 860 online = False,
861 861 delay = 60,
862 862 walk = True):
863 863
864 864 if path == None:
865 865 raise ValueError, "The path is not valid"
866 866
867 867 if ext == None:
868 868 ext = self.ext
869 869
870 870 if online:
871 871 print "Searching files in online mode..."
872 872
873 873 for nTries in range( self.nTries ):
874 874 fullpath, foldercounter, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk)
875 875
876 876 if fullpath:
877 877 break
878 878
879 879 print '\tWaiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
880 880 time.sleep( self.delay )
881 881
882 882 if not(fullpath):
883 883 print "There 'isn't valied files in %s" % path
884 884 return None
885 885
886 886 self.year = year
887 887 self.doy = doy
888 888 self.set = set - 1
889 889 self.path = path
890 890 self.foldercounter = foldercounter
891 891
892 892 else:
893 893 print "Searching files in offline mode ..."
894 894 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
895 895 startTime=startTime, endTime=endTime,
896 896 set=set, expLabel=expLabel, ext=ext,
897 897 walk=walk)
898 898
899 899 if not(pathList):
900 900 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
901 901 datetime.datetime.combine(startDate,startTime).ctime(),
902 902 datetime.datetime.combine(endDate,endTime).ctime())
903 903
904 904 sys.exit(-1)
905 905
906 906
907 907 self.fileIndex = -1
908 908 self.pathList = pathList
909 909 self.filenameList = filenameList
910 910
911 911 self.online = online
912 912 self.delay = delay
913 913 ext = ext.lower()
914 914 self.ext = ext
915 915
916 916 if not(self.setNextFile()):
917 917 if (startDate!=None) and (endDate!=None):
918 918 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
919 919 elif startDate != None:
920 920 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
921 921 else:
922 922 print "No files"
923 923
924 924 sys.exit(-1)
925 925
926 926 # self.updateDataHeader()
927 927
928 928 return self.dataOut
929 929
930 930 def getBasicHeader(self):
931 931
932 932 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.ippSeconds
933 933
934 934 self.dataOut.flagTimeBlock = self.flagTimeBlock
935 935
936 936 self.dataOut.timeZone = self.basicHeaderObj.timeZone
937 937
938 938 self.dataOut.dstFlag = self.basicHeaderObj.dstFlag
939 939
940 940 self.dataOut.errorCount = self.basicHeaderObj.errorCount
941 941
942 942 self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime
943 943
944 944 def getFirstHeader(self):
945 945
946 946 raise ValueError, "This method has not been implemented"
947 947
948 948 def getData():
949 949
950 950 raise ValueError, "This method has not been implemented"
951 951
952 952 def hasNotDataInBuffer():
953 953
954 954 raise ValueError, "This method has not been implemented"
955 955
956 956 def readBlock():
957 957
958 958 raise ValueError, "This method has not been implemented"
959 959
960 960 def isEndProcess(self):
961 961
962 962 return self.flagNoMoreFiles
963 963
964 964 def printReadBlocks(self):
965 965
966 966 print "Number of read blocks per file %04d" %self.nReadBlocks
967 967
968 968 def printTotalBlocks(self):
969 969
970 970 print "Number of read blocks %04d" %self.nTotalBlocks
971 971
972 972 def printNumberOfBlock(self):
973 973
974 974 if self.flagIsNewBlock:
975 975 print "Block No. %04d, Total blocks %04d -> %s" %(self.basicHeaderObj.dataBlock, self.nTotalBlocks, self.dataOut.datatime.ctime())
976 976
977 977 def printInfo(self):
978 978
979 979 if self.__printInfo == False:
980 980 return
981 981
982 982 self.basicHeaderObj.printInfo()
983 983 self.systemHeaderObj.printInfo()
984 984 self.radarControllerHeaderObj.printInfo()
985 985 self.processingHeaderObj.printInfo()
986 986
987 987 self.__printInfo = False
988 988
989 989
990 990 def run(self, **kwargs):
991 991
992 992 if not(self.isConfig):
993 993
994 994 # self.dataOut = dataOut
995 995 self.setup(**kwargs)
996 996 self.isConfig = True
997 997
998 998 self.getData()
999 999
1000 1000 class JRODataWriter(JRODataIO, Operation):
1001 1001
1002 1002 """
1003 1003 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
1004 1004 de los datos siempre se realiza por bloques.
1005 1005 """
1006 1006
1007 1007 blockIndex = 0
1008 1008
1009 1009 path = None
1010 1010
1011 1011 setFile = None
1012 1012
1013 1013 profilesPerBlock = None
1014 1014
1015 1015 blocksPerFile = None
1016 1016
1017 1017 nWriteBlocks = 0
1018 1018
1019 1019 def __init__(self, dataOut=None):
1020 1020 raise ValueError, "Not implemented"
1021 1021
1022 1022
1023 1023 def hasAllDataInBuffer(self):
1024 1024 raise ValueError, "Not implemented"
1025 1025
1026 1026
1027 1027 def setBlockDimension(self):
1028 1028 raise ValueError, "Not implemented"
1029 1029
1030 1030
1031 1031 def writeBlock(self):
1032 1032 raise ValueError, "No implemented"
1033 1033
1034 1034
1035 1035 def putData(self):
1036 1036 raise ValueError, "No implemented"
1037 1037
1038 1038
1039 1039 def setBasicHeader(self):
1040 1040
1041 1041 self.basicHeaderObj.size = self.basicHeaderSize #bytes
1042 1042 self.basicHeaderObj.version = self.versionFile
1043 1043 self.basicHeaderObj.dataBlock = self.nTotalBlocks
1044 1044
1045 1045 utc = numpy.floor(self.dataOut.utctime)
1046 1046 milisecond = (self.dataOut.utctime - utc)* 1000.0
1047 1047
1048 1048 self.basicHeaderObj.utc = utc
1049 1049 self.basicHeaderObj.miliSecond = milisecond
1050 1050 self.basicHeaderObj.timeZone = self.dataOut.timeZone
1051 1051 self.basicHeaderObj.dstFlag = self.dataOut.dstFlag
1052 1052 self.basicHeaderObj.errorCount = self.dataOut.errorCount
1053 1053
1054 1054 def setFirstHeader(self):
1055 1055 """
1056 1056 Obtiene una copia del First Header
1057 1057
1058 1058 Affected:
1059 1059
1060 1060 self.basicHeaderObj
1061 1061 self.systemHeaderObj
1062 1062 self.radarControllerHeaderObj
1063 1063 self.processingHeaderObj self.
1064 1064
1065 1065 Return:
1066 1066 None
1067 1067 """
1068 1068
1069 1069 raise ValueError, "No implemented"
1070 1070
1071 1071 def __writeFirstHeader(self):
1072 1072 """
1073 1073 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
1074 1074
1075 1075 Affected:
1076 1076 __dataType
1077 1077
1078 1078 Return:
1079 1079 None
1080 1080 """
1081 1081
1082 1082 # CALCULAR PARAMETROS
1083 1083
1084 1084 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1085 1085 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1086 1086
1087 1087 self.basicHeaderObj.write(self.fp)
1088 1088 self.systemHeaderObj.write(self.fp)
1089 1089 self.radarControllerHeaderObj.write(self.fp)
1090 1090 self.processingHeaderObj.write(self.fp)
1091 1091
1092 1092 self.dtype = self.dataOut.dtype
1093 1093
1094 1094 def __setNewBlock(self):
1095 1095 """
1096 1096 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
1097 1097
1098 1098 Return:
1099 1099 0 : si no pudo escribir nada
1100 1100 1 : Si escribio el Basic el First Header
1101 1101 """
1102 1102 if self.fp == None:
1103 1103 self.setNextFile()
1104 1104
1105 1105 if self.flagIsNewFile:
1106 1106 return 1
1107 1107
1108 1108 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
1109 1109 self.basicHeaderObj.write(self.fp)
1110 1110 return 1
1111 1111
1112 1112 if not( self.setNextFile() ):
1113 1113 return 0
1114 1114
1115 1115 return 1
1116 1116
1117 1117
1118 1118 def writeNextBlock(self):
1119 1119 """
1120 1120 Selecciona el bloque siguiente de datos y los escribe en un file
1121 1121
1122 1122 Return:
1123 1123 0 : Si no hizo pudo escribir el bloque de datos
1124 1124 1 : Si no pudo escribir el bloque de datos
1125 1125 """
1126 1126 if not( self.__setNewBlock() ):
1127 1127 return 0
1128 1128
1129 1129 self.writeBlock()
1130 1130
1131 1131 return 1
1132 1132
1133 1133 def setNextFile(self):
1134 1134 """
1135 1135 Determina el siguiente file que sera escrito
1136 1136
1137 1137 Affected:
1138 1138 self.filename
1139 1139 self.subfolder
1140 1140 self.fp
1141 1141 self.setFile
1142 1142 self.flagIsNewFile
1143 1143
1144 1144 Return:
1145 1145 0 : Si el archivo no puede ser escrito
1146 1146 1 : Si el archivo esta listo para ser escrito
1147 1147 """
1148 1148 ext = self.ext
1149 1149 path = self.path
1150 1150
1151 1151 if self.fp != None:
1152 1152 self.fp.close()
1153 1153
1154 1154 timeTuple = time.localtime( self.dataOut.utctime)
1155 1155 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
1156 1156
1157 1157 fullpath = os.path.join( path, subfolder )
1158 1158 if not( os.path.exists(fullpath) ):
1159 1159 os.mkdir(fullpath)
1160 1160 self.setFile = -1 #inicializo mi contador de seteo
1161 1161 else:
1162 1162 filesList = os.listdir( fullpath )
1163 1163 if len( filesList ) > 0:
1164 1164 filesList = sorted( filesList, key=str.lower )
1165 1165 filen = filesList[-1]
1166 1166 # el filename debera tener el siguiente formato
1167 1167 # 0 1234 567 89A BCDE (hex)
1168 1168 # x YYYY DDD SSS .ext
1169 1169 if isNumber( filen[8:11] ):
1170 1170 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1171 1171 else:
1172 1172 self.setFile = -1
1173 1173 else:
1174 1174 self.setFile = -1 #inicializo mi contador de seteo
1175 1175
1176 1176 setFile = self.setFile
1177 1177 setFile += 1
1178 1178
1179 1179 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
1180 1180 timeTuple.tm_year,
1181 1181 timeTuple.tm_yday,
1182 1182 setFile,
1183 1183 ext )
1184 1184
1185 1185 filename = os.path.join( path, subfolder, file )
1186 1186
1187 1187 fp = open( filename,'wb' )
1188 1188
1189 1189 self.blockIndex = 0
1190 1190
1191 1191 #guardando atributos
1192 1192 self.filename = filename
1193 1193 self.subfolder = subfolder
1194 1194 self.fp = fp
1195 1195 self.setFile = setFile
1196 1196 self.flagIsNewFile = 1
1197 1197
1198 1198 self.setFirstHeader()
1199 1199
1200 1200 print 'Writing the file: %s'%self.filename
1201 1201
1202 1202 self.__writeFirstHeader()
1203 1203
1204 1204 return 1
1205 1205
1206 1206 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=None, set=0, ext=None):
1207 1207 """
1208 1208 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
1209 1209
1210 1210 Inputs:
1211 1211 path : el path destino en el cual se escribiran los files a crear
1212 1212 format : formato en el cual sera salvado un file
1213 1213 set : el setebo del file
1214 1214
1215 1215 Return:
1216 1216 0 : Si no realizo un buen seteo
1217 1217 1 : Si realizo un buen seteo
1218 1218 """
1219 1219
1220 1220 if ext == None:
1221 1221 ext = self.ext
1222 1222
1223 1223 ext = ext.lower()
1224 1224
1225 1225 self.ext = ext
1226 1226
1227 1227 self.path = path
1228 1228
1229 1229 self.setFile = set - 1
1230 1230
1231 1231 self.blocksPerFile = blocksPerFile
1232 1232
1233 1233 self.profilesPerBlock = profilesPerBlock
1234 1234
1235 1235 self.dataOut = dataOut
1236 1236
1237 1237 if not(self.setNextFile()):
1238 1238 print "There isn't a next file"
1239 1239 return 0
1240 1240
1241 1241 self.setBlockDimension()
1242 1242
1243 1243 return 1
1244 1244
1245 1245 def run(self, dataOut, **kwargs):
1246 1246
1247 1247 if not(self.isConfig):
1248 1248
1249 1249 self.setup(dataOut, **kwargs)
1250 1250 self.isConfig = True
1251 1251
1252 1252 self.putData()
1253 1253
1254 1254 class VoltageReader(JRODataReader):
1255 1255 """
1256 1256 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
1257 1257 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
1258 1258 perfiles*alturas*canales) son almacenados en la variable "buffer".
1259 1259
1260 1260 perfiles * alturas * canales
1261 1261
1262 1262 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1263 1263 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
1264 1264 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
1265 1265 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1266 1266
1267 1267 Example:
1268 1268
1269 1269 dpath = "/home/myuser/data"
1270 1270
1271 1271 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1272 1272
1273 1273 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1274 1274
1275 1275 readerObj = VoltageReader()
1276 1276
1277 1277 readerObj.setup(dpath, startTime, endTime)
1278 1278
1279 1279 while(True):
1280 1280
1281 1281 #to get one profile
1282 1282 profile = readerObj.getData()
1283 1283
1284 1284 #print the profile
1285 1285 print profile
1286 1286
1287 1287 #If you want to see all datablock
1288 1288 print readerObj.datablock
1289 1289
1290 1290 if readerObj.flagNoMoreFiles:
1291 1291 break
1292 1292
1293 1293 """
1294 1294
1295 1295 ext = ".r"
1296 1296
1297 1297 optchar = "D"
1298 1298 dataOut = None
1299 1299
1300 1300
1301 1301 def __init__(self):
1302 1302 """
1303 1303 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
1304 1304
1305 1305 Input:
1306 1306 dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para
1307 1307 almacenar un perfil de datos cada vez que se haga un requerimiento
1308 1308 (getData). El perfil sera obtenido a partir del buffer de datos,
1309 1309 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1310 1310 bloque de datos.
1311 1311 Si este parametro no es pasado se creara uno internamente.
1312 1312
1313 1313 Variables afectadas:
1314 1314 self.dataOut
1315 1315
1316 1316 Return:
1317 1317 None
1318 1318 """
1319 1319
1320 1320 self.isConfig = False
1321 1321
1322 1322 self.datablock = None
1323 1323
1324 1324 self.utc = 0
1325 1325
1326 1326 self.ext = ".r"
1327 1327
1328 1328 self.optchar = "D"
1329 1329
1330 1330 self.basicHeaderObj = BasicHeader(LOCALTIME)
1331 1331
1332 1332 self.systemHeaderObj = SystemHeader()
1333 1333
1334 1334 self.radarControllerHeaderObj = RadarControllerHeader()
1335 1335
1336 1336 self.processingHeaderObj = ProcessingHeader()
1337 1337
1338 1338 self.online = 0
1339 1339
1340 1340 self.fp = None
1341 1341
1342 1342 self.idFile = None
1343 1343
1344 1344 self.dtype = None
1345 1345
1346 1346 self.fileSizeByHeader = None
1347 1347
1348 1348 self.filenameList = []
1349 1349
1350 1350 self.filename = None
1351 1351
1352 1352 self.fileSize = None
1353 1353
1354 1354 self.firstHeaderSize = 0
1355 1355
1356 1356 self.basicHeaderSize = 24
1357 1357
1358 1358 self.pathList = []
1359 1359
1360 1360 self.filenameList = []
1361 1361
1362 1362 self.lastUTTime = 0
1363 1363
1364 1364 self.maxTimeStep = 30
1365 1365
1366 1366 self.flagNoMoreFiles = 0
1367 1367
1368 1368 self.set = 0
1369 1369
1370 1370 self.path = None
1371 1371
1372 1372 self.profileIndex = 2**32-1
1373 1373
1374 1374 self.delay = 3 #seconds
1375 1375
1376 1376 self.nTries = 3 #quantity tries
1377 1377
1378 1378 self.nFiles = 3 #number of files for searching
1379 1379
1380 1380 self.nReadBlocks = 0
1381 1381
1382 1382 self.flagIsNewFile = 1
1383 1383
1384 1384 self.__isFirstTimeOnline = 1
1385 1385
1386 1386 self.ippSeconds = 0
1387 1387
1388 1388 self.flagTimeBlock = 0
1389 1389
1390 1390 self.flagIsNewBlock = 0
1391 1391
1392 1392 self.nTotalBlocks = 0
1393 1393
1394 1394 self.blocksize = 0
1395 1395
1396 1396 self.dataOut = self.createObjByDefault()
1397 1397
1398 1398 def createObjByDefault(self):
1399 1399
1400 1400 dataObj = Voltage()
1401 1401
1402 1402 return dataObj
1403 1403
1404 1404 def __hasNotDataInBuffer(self):
1405 1405 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1406 1406 return 1
1407 1407 return 0
1408 1408
1409 1409
1410 1410 def getBlockDimension(self):
1411 1411 """
1412 1412 Obtiene la cantidad de puntos a leer por cada bloque de datos
1413 1413
1414 1414 Affected:
1415 1415 self.blocksize
1416 1416
1417 1417 Return:
1418 1418 None
1419 1419 """
1420 1420 pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
1421 1421 self.blocksize = pts2read
1422 1422
1423 1423
1424 1424 def readBlock(self):
1425 1425 """
1426 1426 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
1427 1427 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1428 1428 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1429 1429 es seteado a 0
1430 1430
1431 1431 Inputs:
1432 1432 None
1433 1433
1434 1434 Return:
1435 1435 None
1436 1436
1437 1437 Affected:
1438 1438 self.profileIndex
1439 1439 self.datablock
1440 1440 self.flagIsNewFile
1441 1441 self.flagIsNewBlock
1442 1442 self.nTotalBlocks
1443 1443
1444 1444 Exceptions:
1445 1445 Si un bloque leido no es un bloque valido
1446 1446 """
1447 1447
1448 1448 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
1449 1449
1450 1450 try:
1451 1451 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
1452 1452 except:
1453 1453 print "The read block (%3d) has not enough data" %self.nReadBlocks
1454 1454 return 0
1455 1455
1456 1456 junk = numpy.transpose(junk, (2,0,1))
1457 1457 self.datablock = junk['real'] + junk['imag']*1j
1458 1458
1459 1459 self.profileIndex = 0
1460 1460
1461 1461 self.flagIsNewFile = 0
1462 1462 self.flagIsNewBlock = 1
1463 1463
1464 1464 self.nTotalBlocks += 1
1465 1465 self.nReadBlocks += 1
1466 1466
1467 1467 return 1
1468 1468
1469 1469 def getFirstHeader(self):
1470 1470
1471 1471 self.dataOut.dtype = self.dtype
1472 1472
1473 1473 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
1474 1474
1475 1475 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
1476 1476
1477 1477 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
1478 1478
1479 1479 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
1480 1480
1481 1481 self.dataOut.ippSeconds = self.ippSeconds
1482 1482
1483 1483 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt
1484 1484
1485 1485 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
1486 1486
1487 1487 self.dataOut.flagShiftFFT = False
1488 1488
1489 1489 if self.radarControllerHeaderObj.code != None:
1490 1490
1491 1491 self.dataOut.nCode = self.radarControllerHeaderObj.nCode
1492 1492
1493 1493 self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud
1494 1494
1495 1495 self.dataOut.code = self.radarControllerHeaderObj.code
1496 1496
1497 1497 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
1498 1498
1499 1499 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
1500 1500
1501 1501 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
1502 1502
1503 1503 self.dataOut.flagDeflipData = False #asumo q la data no esta sin flip
1504 1504
1505 1505 self.dataOut.flagShiftFFT = False
1506 1506
1507 1507 def getData(self):
1508 1508 """
1509 1509 getData obtiene una unidad de datos del buffer de lectura y la copia a la clase "Voltage"
1510 1510 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
1511 1511 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
1512 1512
1513 1513 Ademas incrementa el contador del buffer en 1.
1514 1514
1515 1515 Return:
1516 1516 data : retorna un perfil de voltages (alturas * canales) copiados desde el
1517 1517 buffer. Si no hay mas archivos a leer retorna None.
1518 1518
1519 1519 Variables afectadas:
1520 1520 self.dataOut
1521 1521 self.profileIndex
1522 1522
1523 1523 Affected:
1524 1524 self.dataOut
1525 1525 self.profileIndex
1526 1526 self.flagTimeBlock
1527 1527 self.flagIsNewBlock
1528 1528 """
1529 1529
1530 1530 if self.flagNoMoreFiles:
1531 1531 self.dataOut.flagNoData = True
1532 1532 print 'Process finished'
1533 1533 return 0
1534 1534
1535 1535 self.flagTimeBlock = 0
1536 1536 self.flagIsNewBlock = 0
1537 1537
1538 1538 if self.__hasNotDataInBuffer():
1539 1539
1540 1540 if not( self.readNextBlock() ):
1541 1541 return 0
1542 1542
1543 1543 self.getFirstHeader()
1544 1544
1545 1545 if self.datablock == None:
1546 1546 self.dataOut.flagNoData = True
1547 1547 return 0
1548 1548
1549 1549 self.dataOut.data = self.datablock[:,self.profileIndex,:]
1550 1550
1551 1551 self.dataOut.flagNoData = False
1552 1552
1553 1553 self.getBasicHeader()
1554 1554
1555 1555 self.profileIndex += 1
1556 1556
1557 1557 return self.dataOut.data
1558 1558
1559 1559
1560 1560 class VoltageWriter(JRODataWriter):
1561 1561 """
1562 1562 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
1563 1563 de los datos siempre se realiza por bloques.
1564 1564 """
1565 1565
1566 1566 ext = ".r"
1567 1567
1568 1568 optchar = "D"
1569 1569
1570 1570 shapeBuffer = None
1571 1571
1572 1572
1573 1573 def __init__(self):
1574 1574 """
1575 1575 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
1576 1576
1577 1577 Affected:
1578 1578 self.dataOut
1579 1579
1580 1580 Return: None
1581 1581 """
1582 1582
1583 1583 self.nTotalBlocks = 0
1584 1584
1585 1585 self.profileIndex = 0
1586 1586
1587 1587 self.isConfig = False
1588 1588
1589 1589 self.fp = None
1590 1590
1591 1591 self.flagIsNewFile = 1
1592 1592
1593 1593 self.nTotalBlocks = 0
1594 1594
1595 1595 self.flagIsNewBlock = 0
1596 1596
1597 1597 self.setFile = None
1598 1598
1599 1599 self.dtype = None
1600 1600
1601 1601 self.path = None
1602 1602
1603 1603 self.filename = None
1604 1604
1605 1605 self.basicHeaderObj = BasicHeader(LOCALTIME)
1606 1606
1607 1607 self.systemHeaderObj = SystemHeader()
1608 1608
1609 1609 self.radarControllerHeaderObj = RadarControllerHeader()
1610 1610
1611 1611 self.processingHeaderObj = ProcessingHeader()
1612 1612
1613 1613 def hasAllDataInBuffer(self):
1614 1614 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1615 1615 return 1
1616 1616 return 0
1617 1617
1618 1618
1619 1619 def setBlockDimension(self):
1620 1620 """
1621 1621 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
1622 1622
1623 1623 Affected:
1624 1624 self.shape_spc_Buffer
1625 1625 self.shape_cspc_Buffer
1626 1626 self.shape_dc_Buffer
1627 1627
1628 1628 Return: None
1629 1629 """
1630 1630 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
1631 1631 self.processingHeaderObj.nHeights,
1632 1632 self.systemHeaderObj.nChannels)
1633 1633
1634 1634 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
1635 1635 self.processingHeaderObj.profilesPerBlock,
1636 1636 self.processingHeaderObj.nHeights),
1637 1637 dtype=numpy.dtype('complex64'))
1638 1638
1639 1639
1640 1640 def writeBlock(self):
1641 1641 """
1642 1642 Escribe el buffer en el file designado
1643 1643
1644 1644 Affected:
1645 1645 self.profileIndex
1646 1646 self.flagIsNewFile
1647 1647 self.flagIsNewBlock
1648 1648 self.nTotalBlocks
1649 1649 self.blockIndex
1650 1650
1651 1651 Return: None
1652 1652 """
1653 1653 data = numpy.zeros( self.shapeBuffer, self.dtype )
1654 1654
1655 1655 junk = numpy.transpose(self.datablock, (1,2,0))
1656 1656
1657 1657 data['real'] = junk.real
1658 1658 data['imag'] = junk.imag
1659 1659
1660 1660 data = data.reshape( (-1) )
1661 1661
1662 1662 data.tofile( self.fp )
1663 1663
1664 1664 self.datablock.fill(0)
1665 1665
1666 1666 self.profileIndex = 0
1667 1667 self.flagIsNewFile = 0
1668 1668 self.flagIsNewBlock = 1
1669 1669
1670 1670 self.blockIndex += 1
1671 1671 self.nTotalBlocks += 1
1672 1672
1673 1673 def putData(self):
1674 1674 """
1675 1675 Setea un bloque de datos y luego los escribe en un file
1676 1676
1677 1677 Affected:
1678 1678 self.flagIsNewBlock
1679 1679 self.profileIndex
1680 1680
1681 1681 Return:
1682 1682 0 : Si no hay data o no hay mas files que puedan escribirse
1683 1683 1 : Si se escribio la data de un bloque en un file
1684 1684 """
1685 1685 if self.dataOut.flagNoData:
1686 1686 return 0
1687 1687
1688 1688 self.flagIsNewBlock = 0
1689 1689
1690 1690 if self.dataOut.flagTimeBlock:
1691 1691
1692 1692 self.datablock.fill(0)
1693 1693 self.profileIndex = 0
1694 1694 self.setNextFile()
1695 1695
1696 1696 if self.profileIndex == 0:
1697 1697 self.setBasicHeader()
1698 1698
1699 1699 self.datablock[:,self.profileIndex,:] = self.dataOut.data
1700 1700
1701 1701 self.profileIndex += 1
1702 1702
1703 1703 if self.hasAllDataInBuffer():
1704 1704 #if self.flagIsNewFile:
1705 1705 self.writeNextBlock()
1706 1706 # self.setFirstHeader()
1707 1707
1708 1708 return 1
1709 1709
1710 1710 def __getProcessFlags(self):
1711 1711
1712 1712 processFlags = 0
1713 1713
1714 1714 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1715 1715 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1716 1716 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1717 1717 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1718 1718 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1719 1719 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1720 1720
1721 1721 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1722 1722
1723 1723
1724 1724
1725 1725 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
1726 1726 PROCFLAG.DATATYPE_SHORT,
1727 1727 PROCFLAG.DATATYPE_LONG,
1728 1728 PROCFLAG.DATATYPE_INT64,
1729 1729 PROCFLAG.DATATYPE_FLOAT,
1730 1730 PROCFLAG.DATATYPE_DOUBLE]
1731 1731
1732 1732
1733 1733 for index in range(len(dtypeList)):
1734 1734 if self.dataOut.dtype == dtypeList[index]:
1735 1735 dtypeValue = datatypeValueList[index]
1736 1736 break
1737 1737
1738 1738 processFlags += dtypeValue
1739 1739
1740 1740 if self.dataOut.flagDecodeData:
1741 1741 processFlags += PROCFLAG.DECODE_DATA
1742 1742
1743 1743 if self.dataOut.flagDeflipData:
1744 1744 processFlags += PROCFLAG.DEFLIP_DATA
1745 1745
1746 1746 if self.dataOut.code != None:
1747 1747 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1748 1748
1749 1749 if self.dataOut.nCohInt > 1:
1750 1750 processFlags += PROCFLAG.COHERENT_INTEGRATION
1751 1751
1752 1752 return processFlags
1753 1753
1754 1754
1755 1755 def __getBlockSize(self):
1756 1756 '''
1757 1757 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
1758 1758 '''
1759 1759
1760 1760 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1761 1761 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1762 1762 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1763 1763 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1764 1764 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1765 1765 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1766 1766
1767 1767 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1768 1768 datatypeValueList = [1,2,4,8,4,8]
1769 1769 for index in range(len(dtypeList)):
1770 1770 if self.dataOut.dtype == dtypeList[index]:
1771 1771 datatypeValue = datatypeValueList[index]
1772 1772 break
1773 1773
1774 1774 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels * self.dataOut.nProfiles * datatypeValue * 2)
1775 1775
1776 1776 return blocksize
1777 1777
1778 1778 def setFirstHeader(self):
1779 1779
1780 1780 """
1781 1781 Obtiene una copia del First Header
1782 1782
1783 1783 Affected:
1784 1784 self.systemHeaderObj
1785 1785 self.radarControllerHeaderObj
1786 1786 self.dtype
1787 1787
1788 1788 Return:
1789 1789 None
1790 1790 """
1791 1791
1792 1792 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
1793 1793 self.systemHeaderObj.nChannels = self.dataOut.nChannels
1794 1794 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
1795 1795
1796 1796 self.setBasicHeader()
1797 1797
1798 1798 processingHeaderSize = 40 # bytes
1799 1799 self.processingHeaderObj.dtype = 0 # Voltage
1800 1800 self.processingHeaderObj.blockSize = self.__getBlockSize()
1801 1801 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
1802 1802 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
1803 1803 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
1804 1804 self.processingHeaderObj.processFlags = self.__getProcessFlags()
1805 1805 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
1806 1806 self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage
1807 1807 self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage
1808 1808
1809 1809 if self.dataOut.code != None:
1810 1810 self.processingHeaderObj.code = self.dataOut.code
1811 1811 self.processingHeaderObj.nCode = self.dataOut.nCode
1812 1812 self.processingHeaderObj.nBaud = self.dataOut.nBaud
1813 1813 codesize = int(8 + 4 * self.dataOut.nCode * self.dataOut.nBaud)
1814 1814 processingHeaderSize += codesize
1815 1815
1816 1816 if self.processingHeaderObj.nWindows != 0:
1817 1817 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
1818 1818 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
1819 1819 self.processingHeaderObj.nHeights = self.dataOut.nHeights
1820 1820 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
1821 1821 processingHeaderSize += 12
1822 1822
1823 1823 self.processingHeaderObj.size = processingHeaderSize
1824 1824
1825 1825 class SpectraReader(JRODataReader):
1826 1826 """
1827 1827 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
1828 1828 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
1829 1829 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
1830 1830
1831 1831 paresCanalesIguales * alturas * perfiles (Self Spectra)
1832 1832 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
1833 1833 canales * alturas (DC Channels)
1834 1834
1835 1835 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1836 1836 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
1837 1837 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
1838 1838 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1839 1839
1840 1840 Example:
1841 1841 dpath = "/home/myuser/data"
1842 1842
1843 1843 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1844 1844
1845 1845 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1846 1846
1847 1847 readerObj = SpectraReader()
1848 1848
1849 1849 readerObj.setup(dpath, startTime, endTime)
1850 1850
1851 1851 while(True):
1852 1852
1853 1853 readerObj.getData()
1854 1854
1855 1855 print readerObj.data_spc
1856 1856
1857 1857 print readerObj.data_cspc
1858 1858
1859 1859 print readerObj.data_dc
1860 1860
1861 1861 if readerObj.flagNoMoreFiles:
1862 1862 break
1863 1863
1864 1864 """
1865 1865
1866 1866 pts2read_SelfSpectra = 0
1867 1867
1868 1868 pts2read_CrossSpectra = 0
1869 1869
1870 1870 pts2read_DCchannels = 0
1871 1871
1872 1872 ext = ".pdata"
1873 1873
1874 1874 optchar = "P"
1875 1875
1876 1876 dataOut = None
1877 1877
1878 1878 nRdChannels = None
1879 1879
1880 1880 nRdPairs = None
1881 1881
1882 1882 rdPairList = []
1883 1883
1884 1884 def __init__(self):
1885 1885 """
1886 1886 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
1887 1887
1888 1888 Inputs:
1889 1889 dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para
1890 1890 almacenar un perfil de datos cada vez que se haga un requerimiento
1891 1891 (getData). El perfil sera obtenido a partir del buffer de datos,
1892 1892 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1893 1893 bloque de datos.
1894 1894 Si este parametro no es pasado se creara uno internamente.
1895 1895
1896 1896 Affected:
1897 1897 self.dataOut
1898 1898
1899 1899 Return : None
1900 1900 """
1901 1901
1902 1902 self.isConfig = False
1903 1903
1904 1904 self.pts2read_SelfSpectra = 0
1905 1905
1906 1906 self.pts2read_CrossSpectra = 0
1907 1907
1908 1908 self.pts2read_DCchannels = 0
1909 1909
1910 1910 self.datablock = None
1911 1911
1912 1912 self.utc = None
1913 1913
1914 1914 self.ext = ".pdata"
1915 1915
1916 1916 self.optchar = "P"
1917 1917
1918 1918 self.basicHeaderObj = BasicHeader(LOCALTIME)
1919 1919
1920 1920 self.systemHeaderObj = SystemHeader()
1921 1921
1922 1922 self.radarControllerHeaderObj = RadarControllerHeader()
1923 1923
1924 1924 self.processingHeaderObj = ProcessingHeader()
1925 1925
1926 1926 self.online = 0
1927 1927
1928 1928 self.fp = None
1929 1929
1930 1930 self.idFile = None
1931 1931
1932 1932 self.dtype = None
1933 1933
1934 1934 self.fileSizeByHeader = None
1935 1935
1936 1936 self.filenameList = []
1937 1937
1938 1938 self.filename = None
1939 1939
1940 1940 self.fileSize = None
1941 1941
1942 1942 self.firstHeaderSize = 0
1943 1943
1944 1944 self.basicHeaderSize = 24
1945 1945
1946 1946 self.pathList = []
1947 1947
1948 1948 self.lastUTTime = 0
1949 1949
1950 1950 self.maxTimeStep = 30
1951 1951
1952 1952 self.flagNoMoreFiles = 0
1953 1953
1954 1954 self.set = 0
1955 1955
1956 1956 self.path = None
1957 1957
1958 1958 self.delay = 60 #seconds
1959 1959
1960 1960 self.nTries = 3 #quantity tries
1961 1961
1962 1962 self.nFiles = 3 #number of files for searching
1963 1963
1964 1964 self.nReadBlocks = 0
1965 1965
1966 1966 self.flagIsNewFile = 1
1967 1967
1968 1968 self.__isFirstTimeOnline = 1
1969 1969
1970 1970 self.ippSeconds = 0
1971 1971
1972 1972 self.flagTimeBlock = 0
1973 1973
1974 1974 self.flagIsNewBlock = 0
1975 1975
1976 1976 self.nTotalBlocks = 0
1977 1977
1978 1978 self.blocksize = 0
1979 1979
1980 1980 self.dataOut = self.createObjByDefault()
1981 1981
1982 1982 self.profileIndex = 1 #Always
1983 1983
1984 1984
1985 1985 def createObjByDefault(self):
1986 1986
1987 1987 dataObj = Spectra()
1988 1988
1989 1989 return dataObj
1990 1990
1991 1991 def __hasNotDataInBuffer(self):
1992 1992 return 1
1993 1993
1994 1994
1995 1995 def getBlockDimension(self):
1996 1996 """
1997 1997 Obtiene la cantidad de puntos a leer por cada bloque de datos
1998 1998
1999 1999 Affected:
2000 2000 self.nRdChannels
2001 2001 self.nRdPairs
2002 2002 self.pts2read_SelfSpectra
2003 2003 self.pts2read_CrossSpectra
2004 2004 self.pts2read_DCchannels
2005 2005 self.blocksize
2006 2006 self.dataOut.nChannels
2007 2007 self.dataOut.nPairs
2008 2008
2009 2009 Return:
2010 2010 None
2011 2011 """
2012 2012 self.nRdChannels = 0
2013 2013 self.nRdPairs = 0
2014 2014 self.rdPairList = []
2015 2015
2016 2016 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
2017 2017 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
2018 2018 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
2019 2019 else:
2020 2020 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
2021 2021 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
2022 2022
2023 2023 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
2024 2024
2025 2025 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
2026 2026 self.blocksize = self.pts2read_SelfSpectra
2027 2027
2028 2028 if self.processingHeaderObj.flag_cspc:
2029 2029 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
2030 2030 self.blocksize += self.pts2read_CrossSpectra
2031 2031
2032 2032 if self.processingHeaderObj.flag_dc:
2033 2033 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
2034 2034 self.blocksize += self.pts2read_DCchannels
2035 2035
2036 2036 # self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels
2037 2037
2038 2038
2039 2039 def readBlock(self):
2040 2040 """
2041 2041 Lee el bloque de datos desde la posicion actual del puntero del archivo
2042 2042 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
2043 2043 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
2044 2044 es seteado a 0
2045 2045
2046 2046 Return: None
2047 2047
2048 2048 Variables afectadas:
2049 2049
2050 2050 self.flagIsNewFile
2051 2051 self.flagIsNewBlock
2052 2052 self.nTotalBlocks
2053 2053 self.data_spc
2054 2054 self.data_cspc
2055 2055 self.data_dc
2056 2056
2057 2057 Exceptions:
2058 2058 Si un bloque leido no es un bloque valido
2059 2059 """
2060 2060 blockOk_flag = False
2061 2061 fpointer = self.fp.tell()
2062 2062
2063 2063 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
2064 2064 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
2065 2065
2066 2066 if self.processingHeaderObj.flag_cspc:
2067 2067 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
2068 2068 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
2069 2069
2070 2070 if self.processingHeaderObj.flag_dc:
2071 2071 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
2072 2072 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
2073 2073
2074 2074
2075 2075 if not(self.processingHeaderObj.shif_fft):
2076 2076 #desplaza a la derecha en el eje 2 determinadas posiciones
2077 2077 shift = int(self.processingHeaderObj.profilesPerBlock/2)
2078 2078 spc = numpy.roll( spc, shift , axis=2 )
2079 2079
2080 2080 if self.processingHeaderObj.flag_cspc:
2081 2081 #desplaza a la derecha en el eje 2 determinadas posiciones
2082 2082 cspc = numpy.roll( cspc, shift, axis=2 )
2083 2083
2084 2084 # self.processingHeaderObj.shif_fft = True
2085 2085
2086 2086 spc = numpy.transpose( spc, (0,2,1) )
2087 2087 self.data_spc = spc
2088 2088
2089 2089 if self.processingHeaderObj.flag_cspc:
2090 2090 cspc = numpy.transpose( cspc, (0,2,1) )
2091 2091 self.data_cspc = cspc['real'] + cspc['imag']*1j
2092 2092 else:
2093 2093 self.data_cspc = None
2094 2094
2095 2095 if self.processingHeaderObj.flag_dc:
2096 2096 self.data_dc = dc['real'] + dc['imag']*1j
2097 2097 else:
2098 2098 self.data_dc = None
2099 2099
2100 2100 self.flagIsNewFile = 0
2101 2101 self.flagIsNewBlock = 1
2102 2102
2103 2103 self.nTotalBlocks += 1
2104 2104 self.nReadBlocks += 1
2105 2105
2106 2106 return 1
2107 2107
2108 2108 def getFirstHeader(self):
2109 2109
2110 2110 self.dataOut.dtype = self.dtype
2111 2111
2112 2112 self.dataOut.nPairs = self.nRdPairs
2113 2113
2114 2114 self.dataOut.pairsList = self.rdPairList
2115 2115
2116 2116 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
2117 2117
2118 2118 self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
2119 2119
2120 2120 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
2121 2121
2122 2122 self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
2123 2123
2124 2124 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
2125 2125
2126 2126 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
2127 2127
2128 2128 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
2129 2129
2130 2130 self.dataOut.ippSeconds = self.ippSeconds
2131 2131
2132 2132 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt * self.processingHeaderObj.nIncohInt * self.dataOut.nFFTPoints
2133 2133
2134 2134 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
2135 2135
2136 2136 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
2137 2137
2138 2138 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
2139 2139
2140 2140 self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada
2141 2141
2142 2142 self.dataOut.flagDeflipData = True #asumo q la data no esta sin flip
2143 2143
2144 2144 if self.processingHeaderObj.code != None:
2145 2145
2146 2146 self.dataOut.nCode = self.processingHeaderObj.nCode
2147 2147
2148 2148 self.dataOut.nBaud = self.processingHeaderObj.nBaud
2149 2149
2150 2150 self.dataOut.code = self.processingHeaderObj.code
2151 2151
2152 2152 self.dataOut.flagDecodeData = True
2153 2153
2154 2154 def getData(self):
2155 2155 """
2156 2156 Copia el buffer de lectura a la clase "Spectra",
2157 2157 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
2158 2158 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
2159 2159
2160 2160 Return:
2161 2161 0 : Si no hay mas archivos disponibles
2162 2162 1 : Si hizo una buena copia del buffer
2163 2163
2164 2164 Affected:
2165 2165 self.dataOut
2166 2166
2167 2167 self.flagTimeBlock
2168 2168 self.flagIsNewBlock
2169 2169 """
2170 2170
2171 2171 if self.flagNoMoreFiles:
2172 2172 self.dataOut.flagNoData = True
2173 2173 print 'Process finished'
2174 2174 return 0
2175 2175
2176 2176 self.flagTimeBlock = 0
2177 2177 self.flagIsNewBlock = 0
2178 2178
2179 2179 if self.__hasNotDataInBuffer():
2180 2180
2181 2181 if not( self.readNextBlock() ):
2182 2182 self.dataOut.flagNoData = True
2183 2183 return 0
2184 2184
2185 2185 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
2186 2186
2187 2187 if self.data_dc == None:
2188 2188 self.dataOut.flagNoData = True
2189 2189 return 0
2190 2190
2191 2191 self.getBasicHeader()
2192 2192
2193 2193 self.getFirstHeader()
2194 2194
2195 2195 self.dataOut.data_spc = self.data_spc
2196 2196
2197 2197 self.dataOut.data_cspc = self.data_cspc
2198 2198
2199 2199 self.dataOut.data_dc = self.data_dc
2200 2200
2201 2201 self.dataOut.flagNoData = False
2202 2202
2203 2203 return self.dataOut.data_spc
2204 2204
2205 2205
2206 2206 class SpectraWriter(JRODataWriter):
2207 2207
2208 2208 """
2209 2209 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
2210 2210 de los datos siempre se realiza por bloques.
2211 2211 """
2212 2212
2213 2213 ext = ".pdata"
2214 2214
2215 2215 optchar = "P"
2216 2216
2217 2217 shape_spc_Buffer = None
2218 2218
2219 2219 shape_cspc_Buffer = None
2220 2220
2221 2221 shape_dc_Buffer = None
2222 2222
2223 2223 data_spc = None
2224 2224
2225 2225 data_cspc = None
2226 2226
2227 2227 data_dc = None
2228 2228
2229 2229 # dataOut = None
2230 2230
2231 2231 def __init__(self):
2232 2232 """
2233 2233 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
2234 2234
2235 2235 Affected:
2236 2236 self.dataOut
2237 2237 self.basicHeaderObj
2238 2238 self.systemHeaderObj
2239 2239 self.radarControllerHeaderObj
2240 2240 self.processingHeaderObj
2241 2241
2242 2242 Return: None
2243 2243 """
2244 2244
2245 2245 self.isConfig = False
2246 2246
2247 2247 self.nTotalBlocks = 0
2248 2248
2249 2249 self.data_spc = None
2250 2250
2251 2251 self.data_cspc = None
2252 2252
2253 2253 self.data_dc = None
2254 2254
2255 2255 self.fp = None
2256 2256
2257 2257 self.flagIsNewFile = 1
2258 2258
2259 2259 self.nTotalBlocks = 0
2260 2260
2261 2261 self.flagIsNewBlock = 0
2262 2262
2263 2263 self.setFile = None
2264 2264
2265 2265 self.dtype = None
2266 2266
2267 2267 self.path = None
2268 2268
2269 2269 self.noMoreFiles = 0
2270 2270
2271 2271 self.filename = None
2272 2272
2273 2273 self.basicHeaderObj = BasicHeader(LOCALTIME)
2274 2274
2275 2275 self.systemHeaderObj = SystemHeader()
2276 2276
2277 2277 self.radarControllerHeaderObj = RadarControllerHeader()
2278 2278
2279 2279 self.processingHeaderObj = ProcessingHeader()
2280 2280
2281 2281
2282 2282 def hasAllDataInBuffer(self):
2283 2283 return 1
2284 2284
2285 2285
2286 2286 def setBlockDimension(self):
2287 2287 """
2288 2288 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
2289 2289
2290 2290 Affected:
2291 2291 self.shape_spc_Buffer
2292 2292 self.shape_cspc_Buffer
2293 2293 self.shape_dc_Buffer
2294 2294
2295 2295 Return: None
2296 2296 """
2297 2297 self.shape_spc_Buffer = (self.dataOut.nChannels,
2298 2298 self.processingHeaderObj.nHeights,
2299 2299 self.processingHeaderObj.profilesPerBlock)
2300 2300
2301 2301 self.shape_cspc_Buffer = (self.dataOut.nPairs,
2302 2302 self.processingHeaderObj.nHeights,
2303 2303 self.processingHeaderObj.profilesPerBlock)
2304 2304
2305 2305 self.shape_dc_Buffer = (self.dataOut.nChannels,
2306 2306 self.processingHeaderObj.nHeights)
2307 2307
2308 2308
2309 2309 def writeBlock(self):
2310 2310 """
2311 2311 Escribe el buffer en el file designado
2312 2312
2313 2313 Affected:
2314 2314 self.data_spc
2315 2315 self.data_cspc
2316 2316 self.data_dc
2317 2317 self.flagIsNewFile
2318 2318 self.flagIsNewBlock
2319 2319 self.nTotalBlocks
2320 2320 self.nWriteBlocks
2321 2321
2322 2322 Return: None
2323 2323 """
2324 2324
2325 2325 spc = numpy.transpose( self.data_spc, (0,2,1) )
2326 2326 if not( self.processingHeaderObj.shif_fft ):
2327 2327 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2328 2328 data = spc.reshape((-1))
2329 2329 data = data.astype(self.dtype[0])
2330 2330 data.tofile(self.fp)
2331 2331
2332 2332 if self.data_cspc != None:
2333 2333 data = numpy.zeros( self.shape_cspc_Buffer, self.dtype )
2334 2334 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
2335 2335 if not( self.processingHeaderObj.shif_fft ):
2336 2336 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2337 2337 data['real'] = cspc.real
2338 2338 data['imag'] = cspc.imag
2339 2339 data = data.reshape((-1))
2340 2340 data.tofile(self.fp)
2341 2341
2342 2342 if self.data_dc != None:
2343 2343 data = numpy.zeros( self.shape_dc_Buffer, self.dtype )
2344 2344 dc = self.data_dc
2345 2345 data['real'] = dc.real
2346 2346 data['imag'] = dc.imag
2347 2347 data = data.reshape((-1))
2348 2348 data.tofile(self.fp)
2349 2349
2350 2350 self.data_spc.fill(0)
2351 2351 self.data_dc.fill(0)
2352 2352 if self.data_cspc != None:
2353 2353 self.data_cspc.fill(0)
2354 2354
2355 2355 self.flagIsNewFile = 0
2356 2356 self.flagIsNewBlock = 1
2357 2357 self.nTotalBlocks += 1
2358 2358 self.nWriteBlocks += 1
2359 2359 self.blockIndex += 1
2360 2360
2361 2361
2362 2362 def putData(self):
2363 2363 """
2364 2364 Setea un bloque de datos y luego los escribe en un file
2365 2365
2366 2366 Affected:
2367 2367 self.data_spc
2368 2368 self.data_cspc
2369 2369 self.data_dc
2370 2370
2371 2371 Return:
2372 2372 0 : Si no hay data o no hay mas files que puedan escribirse
2373 2373 1 : Si se escribio la data de un bloque en un file
2374 2374 """
2375 2375
2376 2376 if self.dataOut.flagNoData:
2377 2377 return 0
2378 2378
2379 2379 self.flagIsNewBlock = 0
2380 2380
2381 2381 if self.dataOut.flagTimeBlock:
2382 2382 self.data_spc.fill(0)
2383 2383 self.data_cspc.fill(0)
2384 2384 self.data_dc.fill(0)
2385 2385 self.setNextFile()
2386 2386
2387 2387 if self.flagIsNewFile == 0:
2388 2388 self.setBasicHeader()
2389 2389
2390 2390 self.data_spc = self.dataOut.data_spc.copy()
2391 2391 self.data_cspc = self.dataOut.data_cspc.copy()
2392 2392 self.data_dc = self.dataOut.data_dc.copy()
2393 2393
2394 2394 # #self.processingHeaderObj.dataBlocksPerFile)
2395 2395 if self.hasAllDataInBuffer():
2396 2396 # self.setFirstHeader()
2397 2397 self.writeNextBlock()
2398 2398
2399 2399 return 1
2400 2400
2401 2401
2402 2402 def __getProcessFlags(self):
2403 2403
2404 2404 processFlags = 0
2405 2405
2406 2406 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2407 2407 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2408 2408 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2409 2409 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2410 2410 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2411 2411 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2412 2412
2413 2413 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2414 2414
2415 2415
2416 2416
2417 2417 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
2418 2418 PROCFLAG.DATATYPE_SHORT,
2419 2419 PROCFLAG.DATATYPE_LONG,
2420 2420 PROCFLAG.DATATYPE_INT64,
2421 2421 PROCFLAG.DATATYPE_FLOAT,
2422 2422 PROCFLAG.DATATYPE_DOUBLE]
2423 2423
2424 2424
2425 2425 for index in range(len(dtypeList)):
2426 2426 if self.dataOut.dtype == dtypeList[index]:
2427 2427 dtypeValue = datatypeValueList[index]
2428 2428 break
2429 2429
2430 2430 processFlags += dtypeValue
2431 2431
2432 2432 if self.dataOut.flagDecodeData:
2433 2433 processFlags += PROCFLAG.DECODE_DATA
2434 2434
2435 2435 if self.dataOut.flagDeflipData:
2436 2436 processFlags += PROCFLAG.DEFLIP_DATA
2437 2437
2438 2438 if self.dataOut.code != None:
2439 2439 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
2440 2440
2441 2441 if self.dataOut.nIncohInt > 1:
2442 2442 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
2443 2443
2444 2444 if self.dataOut.data_dc != None:
2445 2445 processFlags += PROCFLAG.SAVE_CHANNELS_DC
2446 2446
2447 2447 return processFlags
2448 2448
2449 2449
2450 2450 def __getBlockSize(self):
2451 2451 '''
2452 2452 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
2453 2453 '''
2454 2454
2455 2455 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2456 2456 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2457 2457 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2458 2458 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2459 2459 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2460 2460 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2461 2461
2462 2462 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2463 2463 datatypeValueList = [1,2,4,8,4,8]
2464 2464 for index in range(len(dtypeList)):
2465 2465 if self.dataOut.dtype == dtypeList[index]:
2466 2466 datatypeValue = datatypeValueList[index]
2467 2467 break
2468 2468
2469 2469
2470 2470 pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints
2471 2471
2472 2472 pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write)
2473 2473 blocksize = (pts2write_SelfSpectra*datatypeValue)
2474 2474
2475 2475 if self.dataOut.data_cspc != None:
2476 2476 pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write)
2477 2477 blocksize += (pts2write_CrossSpectra*datatypeValue*2)
2478 2478
2479 2479 if self.dataOut.data_dc != None:
2480 2480 pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights)
2481 2481 blocksize += (pts2write_DCchannels*datatypeValue*2)
2482 2482
2483 2483 blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
2484 2484
2485 2485 return blocksize
2486 2486
2487 2487 def setFirstHeader(self):
2488 2488
2489 2489 """
2490 2490 Obtiene una copia del First Header
2491 2491
2492 2492 Affected:
2493 2493 self.systemHeaderObj
2494 2494 self.radarControllerHeaderObj
2495 2495 self.dtype
2496 2496
2497 2497 Return:
2498 2498 None
2499 2499 """
2500 2500
2501 2501 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
2502 2502 self.systemHeaderObj.nChannels = self.dataOut.nChannels
2503 2503 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
2504 2504
2505 2505 self.setBasicHeader()
2506 2506
2507 2507 processingHeaderSize = 40 # bytes
2508 2508 self.processingHeaderObj.dtype = 1 # Spectra
2509 2509 self.processingHeaderObj.blockSize = self.__getBlockSize()
2510 2510 self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints
2511 2511 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
2512 2512 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
2513 2513 self.processingHeaderObj.processFlags = self.__getProcessFlags()
2514 2514 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval
2515 2515 self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt
2516 2516 self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
2517 2517 self.processingHeaderObj.shif_fft = self.dataOut.flagShiftFFT
2518 2518
2519 2519 if self.processingHeaderObj.totalSpectra > 0:
2520 2520 channelList = []
2521 2521 for channel in range(self.dataOut.nChannels):
2522 2522 channelList.append(channel)
2523 2523 channelList.append(channel)
2524 2524
2525 2525 pairsList = []
2526 2526 for pair in self.dataOut.pairsList:
2527 2527 pairsList.append(pair[0])
2528 2528 pairsList.append(pair[1])
2529 2529 spectraComb = channelList + pairsList
2530 2530 spectraComb = numpy.array(spectraComb,dtype="u1")
2531 2531 self.processingHeaderObj.spectraComb = spectraComb
2532 2532 sizeOfSpcComb = len(spectraComb)
2533 2533 processingHeaderSize += sizeOfSpcComb
2534 2534
2535 2535 # The processing header should not have information about code
2536 2536 # if self.dataOut.code != None:
2537 2537 # self.processingHeaderObj.code = self.dataOut.code
2538 2538 # self.processingHeaderObj.nCode = self.dataOut.nCode
2539 2539 # self.processingHeaderObj.nBaud = self.dataOut.nBaud
2540 2540 # nCodeSize = 4 # bytes
2541 2541 # nBaudSize = 4 # bytes
2542 2542 # codeSize = 4 # bytes
2543 2543 # sizeOfCode = int(nCodeSize + nBaudSize + codeSize * self.dataOut.nCode * self.dataOut.nBaud)
2544 2544 # processingHeaderSize += sizeOfCode
2545 2545
2546 2546 if self.processingHeaderObj.nWindows != 0:
2547 2547 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
2548 2548 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
2549 2549 self.processingHeaderObj.nHeights = self.dataOut.nHeights
2550 2550 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
2551 2551 sizeOfFirstHeight = 4
2552 2552 sizeOfdeltaHeight = 4
2553 2553 sizeOfnHeights = 4
2554 2554 sizeOfWindows = (sizeOfFirstHeight + sizeOfdeltaHeight + sizeOfnHeights)*self.processingHeaderObj.nWindows
2555 2555 processingHeaderSize += sizeOfWindows
2556 2556
2557 2557 self.processingHeaderObj.size = processingHeaderSize
2558 2558
2559 2559 class SpectraHeisWriter(Operation):
2560 2560 # set = None
2561 2561 setFile = None
2562 2562 idblock = None
2563 2563 doypath = None
2564 2564 subfolder = None
2565 2565
2566 2566 def __init__(self):
2567 2567 self.wrObj = FITS()
2568 2568 # self.dataOut = dataOut
2569 2569 self.nTotalBlocks=0
2570 2570 # self.set = None
2571 2571 self.setFile = None
2572 2572 self.idblock = 0
2573 2573 self.wrpath = None
2574 2574 self.doypath = None
2575 2575 self.subfolder = None
2576 2576 self.isConfig = False
2577 2577
2578 2578 def isNumber(str):
2579 2579 """
2580 2580 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
2581 2581
2582 2582 Excepciones:
2583 2583 Si un determinado string no puede ser convertido a numero
2584 2584 Input:
2585 2585 str, string al cual se le analiza para determinar si convertible a un numero o no
2586 2586
2587 2587 Return:
2588 2588 True : si el string es uno numerico
2589 2589 False : no es un string numerico
2590 2590 """
2591 2591 try:
2592 2592 float( str )
2593 2593 return True
2594 2594 except:
2595 2595 return False
2596 2596
2597 2597 def setup(self, dataOut, wrpath):
2598 2598
2599 2599 if not(os.path.exists(wrpath)):
2600 2600 os.mkdir(wrpath)
2601 2601
2602 2602 self.wrpath = wrpath
2603 2603 # self.setFile = 0
2604 2604 self.dataOut = dataOut
2605 2605
2606 2606 def putData(self):
2607 2607 name= time.localtime( self.dataOut.utctime)
2608 2608 ext=".fits"
2609 2609
2610 2610 if self.doypath == None:
2611 2611 self.subfolder = 'F%4.4d%3.3d_%d' % (name.tm_year,name.tm_yday,time.mktime(datetime.datetime.now().timetuple()))
2612 2612 self.doypath = os.path.join( self.wrpath, self.subfolder )
2613 2613 os.mkdir(self.doypath)
2614 2614
2615 2615 if self.setFile == None:
2616 2616 # self.set = self.dataOut.set
2617 2617 self.setFile = 0
2618 2618 # if self.set != self.dataOut.set:
2619 2619 ## self.set = self.dataOut.set
2620 2620 # self.setFile = 0
2621 2621
2622 2622 #make the filename
2623 2623 file = 'D%4.4d%3.3d_%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext)
2624 2624
2625 2625 filename = os.path.join(self.wrpath,self.subfolder, file)
2626 2626
2627 2627 idblock = numpy.array([self.idblock],dtype="int64")
2628 2628 header=self.wrObj.cFImage(idblock=idblock,
2629 2629 year=time.gmtime(self.dataOut.utctime).tm_year,
2630 2630 month=time.gmtime(self.dataOut.utctime).tm_mon,
2631 2631 day=time.gmtime(self.dataOut.utctime).tm_mday,
2632 2632 hour=time.gmtime(self.dataOut.utctime).tm_hour,
2633 2633 minute=time.gmtime(self.dataOut.utctime).tm_min,
2634 2634 second=time.gmtime(self.dataOut.utctime).tm_sec)
2635 2635
2636 2636 c=3E8
2637 2637 deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
2638 2638 freq=numpy.arange(-1*self.dataOut.nHeights/2.,self.dataOut.nHeights/2.)*(c/(2*deltaHeight*1000))
2639 2639
2640 2640 colList = []
2641 2641
2642 2642 colFreq=self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints)+'E', array=freq)
2643 2643
2644 2644 colList.append(colFreq)
2645 2645
2646 2646 nchannel=self.dataOut.nChannels
2647 2647
2648 2648 for i in range(nchannel):
2649 2649 col = self.wrObj.writeData(name="PCh"+str(i+1),
2650 2650 format=str(self.dataOut.nFFTPoints)+'E',
2651 2651 data=10*numpy.log10(self.dataOut.data_spc[i,:]))
2652 2652
2653 2653 colList.append(col)
2654 2654
2655 2655 data=self.wrObj.Ctable(colList=colList)
2656 2656
2657 2657 self.wrObj.CFile(header,data)
2658 2658
2659 2659 self.wrObj.wFile(filename)
2660 2660
2661 2661 #update the setFile
2662 2662 self.setFile += 1
2663 2663 self.idblock += 1
2664 2664
2665 2665 return 1
2666 2666
2667 2667 def run(self, dataOut, **kwargs):
2668 2668
2669 2669 if not(self.isConfig):
2670 2670
2671 2671 self.setup(dataOut, **kwargs)
2672 2672 self.isConfig = True
2673 2673
2674 2674 self.putData()
2675 2675
2676 2676
2677 2677 class FITS:
2678 2678 name=None
2679 2679 format=None
2680 2680 array =None
2681 2681 data =None
2682 2682 thdulist=None
2683 2683 prihdr=None
2684 2684 hdu=None
2685 2685
2686 2686 def __init__(self):
2687 2687
2688 2688 pass
2689 2689
2690 2690 def setColF(self,name,format,array):
2691 2691 self.name=name
2692 2692 self.format=format
2693 2693 self.array=array
2694 2694 a1=numpy.array([self.array],dtype=numpy.float32)
2695 2695 self.col1 = pyfits.Column(name=self.name, format=self.format, array=a1)
2696 2696 return self.col1
2697 2697
2698 2698 # def setColP(self,name,format,data):
2699 2699 # self.name=name
2700 2700 # self.format=format
2701 2701 # self.data=data
2702 2702 # a2=numpy.array([self.data],dtype=numpy.float32)
2703 2703 # self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2704 2704 # return self.col2
2705 2705
2706 2706
2707 2707 def writeData(self,name,format,data):
2708 2708 self.name=name
2709 2709 self.format=format
2710 2710 self.data=data
2711 2711 a2=numpy.array([self.data],dtype=numpy.float32)
2712 2712 self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2713 2713 return self.col2
2714 2714
2715 2715 def cFImage(self,idblock,year,month,day,hour,minute,second):
2716 2716 self.hdu= pyfits.PrimaryHDU(idblock)
2717 2717 self.hdu.header.set("Year",year)
2718 2718 self.hdu.header.set("Month",month)
2719 2719 self.hdu.header.set("Day",day)
2720 2720 self.hdu.header.set("Hour",hour)
2721 2721 self.hdu.header.set("Minute",minute)
2722 2722 self.hdu.header.set("Second",second)
2723 2723 return self.hdu
2724 2724
2725 2725
2726 2726 def Ctable(self,colList):
2727 2727 self.cols=pyfits.ColDefs(colList)
2728 2728 self.tbhdu = pyfits.new_table(self.cols)
2729 2729 return self.tbhdu
2730 2730
2731 2731
2732 2732 def CFile(self,hdu,tbhdu):
2733 2733 self.thdulist=pyfits.HDUList([hdu,tbhdu])
2734 2734
2735 2735 def wFile(self,filename):
2736 2736 if os.path.isfile(filename):
2737 2737 os.remove(filename)
2738 2738 self.thdulist.writeto(filename)
2739 2739
2740 2740
2741 2741 class ParameterConf:
2742 2742 ELEMENTNAME = 'Parameter'
2743 2743 def __init__(self):
2744 2744 self.name = ''
2745 2745 self.value = ''
2746 2746
2747 2747 def readXml(self, parmElement):
2748 2748 self.name = parmElement.get('name')
2749 2749 self.value = parmElement.get('value')
2750 2750
2751 2751 def getElementName(self):
2752 2752 return self.ELEMENTNAME
2753 2753
2754 2754 class Metadata:
2755 2755
2756 2756 def __init__(self, filename):
2757 2757 self.parmConfObjList = []
2758 2758 self.readXml(filename)
2759 2759
2760 2760 def readXml(self, filename):
2761 2761 self.projectElement = None
2762 2762 self.procUnitConfObjDict = {}
2763 2763 self.projectElement = ElementTree().parse(filename)
2764 2764 self.project = self.projectElement.tag
2765 2765
2766 2766 parmElementList = self.projectElement.getiterator(ParameterConf().getElementName())
2767 2767
2768 2768 for parmElement in parmElementList:
2769 2769 parmConfObj = ParameterConf()
2770 2770 parmConfObj.readXml(parmElement)
2771 2771 self.parmConfObjList.append(parmConfObj)
2772 2772
2773 2773 class FitsWriter(Operation):
2774 2774
2775 2775 def __init__(self):
2776 2776 self.isConfig = False
2777 2777 self.dataBlocksPerFile = None
2778 2778 self.blockIndex = 0
2779 2779 self.flagIsNewFile = 1
2780 2780 self.fitsObj = None
2781 2781 self.optchar = 'P'
2782 2782 self.ext = '.fits'
2783 2783 self.setFile = 0
2784 2784
2785 2785 def setFitsHeader(self, dataOut, metadatafile):
2786 2786
2787 2787 header_data = pyfits.PrimaryHDU()
2788 2788
2789 2789 metadata4fits = Metadata(metadatafile)
2790 2790 for parameter in metadata4fits.parmConfObjList:
2791 2791 parm_name = parameter.name
2792 2792 parm_value = parameter.value
2793 2793
2794 2794 if parm_value == 'fromdatadatetime':
2795 2795 value = time.strftime("%b %d %Y %H:%M:%S", dataOut.datatime.timetuple())
2796 2796 elif parm_value == 'fromdataheights':
2797 2797 value = dataOut.nHeights
2798 2798 elif parm_value == 'fromdatachannel':
2799 2799 value = dataOut.nChannels
2800 2800 elif parm_value == 'fromdatasamples':
2801 2801 value = dataOut.nFFTPoints
2802 2802 else:
2803 2803 value = parm_value
2804 2804
2805 2805 header_data.header[parm_name] = value
2806 2806
2807 2807 header_data.header['NBLOCK'] = self.blockIndex
2808 2808
2809 2809 header_data.writeto(self.filename)
2810 2810
2811 2811
2812 2812 def setup(self, dataOut, path, dataBlocksPerFile, metadatafile):
2813 2813
2814 2814 self.path = path
2815 2815 self.dataOut = dataOut
2816 2816 self.metadatafile = metadatafile
2817 2817 self.dataBlocksPerFile = dataBlocksPerFile
2818 2818
2819 2819 def open(self):
2820 2820 self.fitsObj = pyfits.open(self.filename, mode='update')
2821 2821
2822 2822
2823 2823 def addData(self, data):
2824 2824 self.open()
2825 2825 extension = pyfits.ImageHDU(data=data, name=self.fitsObj[0].header['DATA'])
2826 2826 extension.header['UTCTIME'] = self.dataOut.utctime
2827 2827 self.fitsObj.append(extension)
2828 2828 self.blockIndex += 1
2829 2829 self.fitsObj[0].header['NBLOCK'] = self.blockIndex
2830 2830
2831 2831 self.write()
2832 2832
2833 2833 def write(self):
2834 2834
2835 2835 self.fitsObj.flush(verbose=True)
2836 2836 self.fitsObj.close()
2837 2837
2838 2838
2839 2839 def setNextFile(self):
2840 2840
2841 2841 ext = self.ext
2842 2842 path = self.path
2843 2843
2844 2844 timeTuple = time.localtime( self.dataOut.utctime)
2845 2845 subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
2846 2846
2847 2847 fullpath = os.path.join( path, subfolder )
2848 2848 if not( os.path.exists(fullpath) ):
2849 2849 os.mkdir(fullpath)
2850 2850 self.setFile = -1 #inicializo mi contador de seteo
2851 2851 else:
2852 2852 filesList = os.listdir( fullpath )
2853 2853 if len( filesList ) > 0:
2854 2854 filesList = sorted( filesList, key=str.lower )
2855 2855 filen = filesList[-1]
2856 2856
2857 2857 if isNumber( filen[8:11] ):
2858 2858 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
2859 2859 else:
2860 2860 self.setFile = -1
2861 2861 else:
2862 2862 self.setFile = -1 #inicializo mi contador de seteo
2863 2863
2864 2864 setFile = self.setFile
2865 2865 setFile += 1
2866 2866
2867 2867 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
2868 2868 timeTuple.tm_year,
2869 2869 timeTuple.tm_yday,
2870 2870 setFile,
2871 2871 ext )
2872 2872
2873 2873 filename = os.path.join( path, subfolder, file )
2874 2874
2875 2875 self.blockIndex = 0
2876 2876 self.filename = filename
2877 2877 self.setFile = setFile
2878 2878 self.flagIsNewFile = 1
2879 2879
2880 2880 print 'Writing the file: %s'%self.filename
2881 2881
2882 2882 self.setFitsHeader(self.dataOut, self.metadatafile)
2883 2883
2884 2884 return 1
2885 2885
2886 2886 def writeBlock(self):
2887 2887 self.addData(self.dataOut.data_spc)
2888 2888 self.flagIsNewFile = 0
2889 2889
2890 2890
2891 2891 def __setNewBlock(self):
2892 2892
2893 2893 if self.flagIsNewFile:
2894 2894 return 1
2895 2895
2896 2896 if self.blockIndex < self.dataBlocksPerFile:
2897 2897 return 1
2898 2898
2899 2899 if not( self.setNextFile() ):
2900 2900 return 0
2901 2901
2902 2902 return 1
2903 2903
2904 2904 def writeNextBlock(self):
2905 2905 if not( self.__setNewBlock() ):
2906 2906 return 0
2907 2907 self.writeBlock()
2908 2908 return 1
2909 2909
2910 2910 def putData(self):
2911 2911 if self.flagIsNewFile:
2912 2912 self.setNextFile()
2913 2913 self.writeNextBlock()
2914 2914
2915 2915 def run(self, dataOut, **kwargs):
2916 2916 if not(self.isConfig):
2917 2917 self.setup(dataOut, **kwargs)
2918 2918 self.isConfig = True
2919 2919 self.putData()
2920 2920
2921 2921
2922 class FitsReader(ProcessingUnit):
2923
2924 __TIMEZONE = time.timezone
2925
2926 expName = None
2927 datetimestr = None
2928 utc = None
2929 nChannels = None
2930 nSamples = None
2931 dataBlocksPerFile = None
2932 comments = None
2933 lastUTTime = None
2934 header_dict = None
2935 data = None
2936 data_header_dict = None
2937
2938 def __init__(self):
2939 self.isConfig = False
2940 self.ext = '.fits'
2941 self.setFile = 0
2942 self.flagNoMoreFiles = 0
2943 self.flagIsNewFile = 1
2944 self.flagTimeBlock = None
2945 self.fileIndex = None
2946 self.filename = None
2947 self.fileSize = None
2948 self.fitsObj = None
2949 self.nReadBlocks = 0
2950 self.nTotalBlocks = 0
2951 self.dataOut = self.createObjByDefault()
2952 self.maxTimeStep = 10# deberia ser definido por el usuario usando el metodo setup()
2953 self.blockIndex = 1
2954
2955 def createObjByDefault(self):
2956
2957 dataObj = Fits()
2958
2959 return dataObj
2960
2961 def isFileinThisTime(self, filename, startTime, endTime, useLocalTime=False):
2962 try:
2963 fitsObj = pyfits.open(filename,'readonly')
2964 except:
2965 raise IOError, "The file %s can't be opened" %(filename)
2966
2967 header = fitsObj[0].header
2968 struct_time = time.strptime(header['DATETIME'], "%b %d %Y %H:%M:%S")
2969 utc = time.mktime(struct_time) - time.timezone #TIMEZONE debe ser un parametro del header FITS
2970
2971 ltc = utc
2972 if useLocalTime:
2973 ltc -= time.timezone
2974 thisDatetime = datetime.datetime.utcfromtimestamp(ltc)
2975 thisTime = thisDatetime.time()
2976
2977 if not ((startTime <= thisTime) and (endTime > thisTime)):
2978 return None
2979
2980 return thisDatetime
2981
2982 def __setNextFileOnline(self):
2983 raise ValueError, "No implemented"
2984
2985 def __setNextFileOffline(self):
2986 idFile = self.fileIndex
2987
2988 while (True):
2989 idFile += 1
2990 if not(idFile < len(self.filenameList)):
2991 self.flagNoMoreFiles = 1
2992 print "No more Files"
2993 return 0
2994
2995 filename = self.filenameList[idFile]
2996
2997 # if not(self.__verifyFile(filename)):
2998 # continue
2999
3000 fileSize = os.path.getsize(filename)
3001 fitsObj = pyfits.open(filename,'readonly')
3002 break
3003
3004 self.flagIsNewFile = 1
3005 self.fileIndex = idFile
3006 self.filename = filename
3007 self.fileSize = fileSize
3008 self.fitsObj = fitsObj
3009
3010 print "Setting the file: %s"%self.filename
3011
3012 return 1
3013
3014 def readHeader(self):
3015 headerObj = self.fitsObj[0]
3016
3017 self.header_dict = headerObj.header
3018 self.expName = headerObj.header['EXPNAME']
3019 self.datetimestr = headerObj.header['DATETIME']
3020 struct_time = time.strptime(headerObj.header['DATETIME'], "%b %d %Y %H:%M:%S")
3021 # self.utc = time.mktime(struct_time) - self.__TIMEZONE
3022 self.nChannels = headerObj.header['NCHANNEL']
3023 self.nSamples = headerObj.header['NSAMPLE']
3024 self.dataBlocksPerFile = headerObj.header['NBLOCK']
3025 self.comments = headerObj.header['COMMENT']
3026
3027
3028 def setNextFile(self):
3029
3030 if self.online:
3031 newFile = self.__setNextFileOnline()
3032 else:
3033 newFile = self.__setNextFileOffline()
3034
3035 if not(newFile):
3036 return 0
3037
3038 self.readHeader()
3039
3040 self.nReadBlocks = 0
3041 self.blockIndex = 1
3042 return 1
3043
3044 def __searchFilesOffLine(self,
3045 path,
3046 startDate,
3047 endDate,
3048 startTime=datetime.time(0,0,0),
3049 endTime=datetime.time(23,59,59),
3050 set=None,
3051 expLabel='',
3052 ext='.fits',
3053 walk=True):
3054
3055 pathList = []
3056
3057 if not walk:
3058 pathList.append(path)
3059
3060 else:
3061 dirList = []
3062 for thisPath in os.listdir(path):
3063 if not os.path.isdir(os.path.join(path,thisPath)):
3064 continue
3065 if not isDoyFolder(thisPath):
3066 continue
3067
3068 dirList.append(thisPath)
3069
3070 if not(dirList):
3071 return None, None
3072
3073 thisDate = startDate
3074
3075 while(thisDate <= endDate):
3076 year = thisDate.timetuple().tm_year
3077 doy = thisDate.timetuple().tm_yday
3078
3079 matchlist = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy) + '*')
3080 if len(matchlist) == 0:
3081 thisDate += datetime.timedelta(1)
3082 continue
3083 for match in matchlist:
3084 pathList.append(os.path.join(path,match,expLabel))
3085
3086 thisDate += datetime.timedelta(1)
3087
3088 if pathList == []:
3089 print "Any folder was found for the date range: %s-%s" %(startDate, endDate)
3090 return None, None
3091
3092 print "%d folder(s) was(were) found for the date range: %s - %s" %(len(pathList), startDate, endDate)
3093
3094 filenameList = []
3095 datetimeList = []
3096
3097 for i in range(len(pathList)):
3098
3099 thisPath = pathList[i]
3100
3101 fileList = glob.glob1(thisPath, "*%s" %ext)
3102 fileList.sort()
3103
3104 for file in fileList:
3105
3106 filename = os.path.join(thisPath,file)
3107 thisDatetime = self.isFileinThisTime(filename, startTime, endTime, useLocalTime=True)
3108
3109 if not(thisDatetime):
3110 continue
3111
3112 filenameList.append(filename)
3113 datetimeList.append(thisDatetime)
3114
3115 if not(filenameList):
3116 print "Any file was found for the time range %s - %s" %(startTime, endTime)
3117 return None, None
3118
3119 print "%d file(s) was(were) found for the time range: %s - %s" %(len(filenameList), startTime, endTime)
3120 print
3121
3122 for i in range(len(filenameList)):
3123 print "%s -> [%s]" %(filenameList[i], datetimeList[i].ctime())
3124
3125 self.filenameList = filenameList
3126 self.datetimeList = datetimeList
3127
3128 return pathList, filenameList
3129
3130 def setup(self, path=None,
3131 startDate=None,
3132 endDate=None,
3133 startTime=datetime.time(0,0,0),
3134 endTime=datetime.time(23,59,59),
3135 set=0,
3136 expLabel = "",
3137 ext = None,
3138 online = False,
3139 delay = 60,
3140 walk = True):
3141
3142 if path == None:
3143 raise ValueError, "The path is not valid"
3144
3145 if ext == None:
3146 ext = self.ext
3147
3148 if not(online):
3149 print "Searching files in offline mode ..."
3150 pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate,
3151 startTime=startTime, endTime=endTime,
3152 set=set, expLabel=expLabel, ext=ext,
3153 walk=walk)
3154
3155 if not(pathList):
3156 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
3157 datetime.datetime.combine(startDate,startTime).ctime(),
3158 datetime.datetime.combine(endDate,endTime).ctime())
3159
3160 sys.exit(-1)
3161
3162 self.fileIndex = -1
3163 self.pathList = pathList
3164 self.filenameList = filenameList
3165
3166 self.online = online
3167 self.delay = delay
3168 ext = ext.lower()
3169 self.ext = ext
3170
3171 if not(self.setNextFile()):
3172 if (startDate!=None) and (endDate!=None):
3173 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
3174 elif startDate != None:
3175 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
3176 else:
3177 print "No files"
3178
3179 sys.exit(-1)
3180
3181
3182
3183 def readBlock(self):
3184 dataObj = self.fitsObj[self.blockIndex]
3185
3186 self.data = dataObj.data
3187 self.data_header_dict = dataObj.header
3188 self.utc = self.data_header_dict['UTCTIME']
3189
3190 self.flagIsNewFile = 0
3191 self.blockIndex += 1
3192 self.nTotalBlocks += 1
3193 self.nReadBlocks += 1
3194
3195 return 1
3196
3197 def __jumpToLastBlock(self):
3198 raise ValueError, "No implemented"
3199
3200 def __waitNewBlock(self):
3201 """
3202 Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma.
3203
3204 Si el modo de lectura es OffLine siempre retorn 0
3205 """
3206 if not self.online:
3207 return 0
3208
3209 if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile):
3210 return 0
3211
3212 currentPointer = self.fp.tell()
3213
3214 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
3215
3216 for nTries in range( self.nTries ):
3217
3218 self.fp.close()
3219 self.fp = open( self.filename, 'rb' )
3220 self.fp.seek( currentPointer )
3221
3222 self.fileSize = os.path.getsize( self.filename )
3223 currentSize = self.fileSize - currentPointer
3224
3225 if ( currentSize >= neededSize ):
3226 self.__rdBasicHeader()
3227 return 1
3228
3229 print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
3230 time.sleep( self.delay )
3231
3232
3233 return 0
3234
3235 def __setNewBlock(self):
3236
3237 if self.online:
3238 self.__jumpToLastBlock()
3239
3240 if self.flagIsNewFile:
3241 return 1
3242
3243 self.lastUTTime = self.utc
3244
3245 if self.online:
3246 if self.__waitNewBlock():
3247 return 1
3248
3249 if self.nReadBlocks < self.dataBlocksPerFile:
3250 return 1
3251
3252 if not(self.setNextFile()):
3253 return 0
3254
3255 deltaTime = self.utc - self.lastUTTime
3256
3257 self.flagTimeBlock = 0
3258
3259 if deltaTime > self.maxTimeStep:
3260 self.flagTimeBlock = 1
3261
3262 return 1
3263
3264
3265 def readNextBlock(self):
3266 if not(self.__setNewBlock()):
3267 return 0
3268
3269 if not(self.readBlock()):
3270 return 0
3271
3272 return 1
3273
3274
3275 def getData(self):
3276
3277 if self.flagNoMoreFiles:
3278 self.dataOut.flagNoData = True
3279 print 'Process finished'
3280 return 0
3281
3282 self.flagTimeBlock = 0
3283 self.flagIsNewBlock = 0
3284
3285 if not(self.readNextBlock()):
3286 return 0
3287
3288 if self.data == None:
3289 self.dataOut.flagNoData = True
3290 return 0
3291
3292 self.dataOut.data = self.data
3293 self.dataOut.data_header = self.data_header_dict
3294 self.dataOut.utctime = self.utc
3295
3296 self.dataOut.header = self.header_dict
3297 self.dataOut.expName = self.expName
3298 self.dataOut.nChannels = self.nChannels
3299 self.dataOut.nSamples = self.nSamples
3300 self.dataOut.dataBlocksPerFile = self.dataBlocksPerFile
3301 self.dataOut.comments = self.comments
3302
3303 self.dataOut.flagNoData = False
3304
3305 return self.dataOut.data
3306
3307 def run(self, **kwargs):
3308
3309 if not(self.isConfig):
3310 self.setup(**kwargs)
3311 self.isConfig = True
3312
3313 self.getData() No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now