##// END OF EJS Templates
Primera version del controlador probada y testeada incluyendo graficos
Miguel Valdez -
r199:83ce53852b6c
parent child
Show More
@@ -1,609 +1,634
1 1 '''
2 2 Created on September , 2012
3 3 @author:
4 4 '''
5 5 from xml.etree.ElementTree import Element, SubElement, ElementTree
6 6 from xml.etree import ElementTree as ET
7 7 from xml.dom import minidom
8 8
9 9 import sys
10 10 import datetime
11 11 from model.jrodataIO import *
12 12 from model.jroprocessing import *
13 from model.jroplot import *
13 14
14 15 def prettify(elem):
15 16 """Return a pretty-printed XML string for the Element.
16 17 """
17 18 rough_string = ET.tostring(elem, 'utf-8')
18 19 reparsed = minidom.parseString(rough_string)
19 20 return reparsed.toprettyxml(indent=" ")
20 21
21 22 class ParameterConf():
22 23
23 24 id = None
24 25 name = None
25 26 value = None
26 type = None
27 format = None
27 28
28 29 ELEMENTNAME = 'Parameter'
29 30
30 31 def __init__(self):
31 32
32 self.type = 'str'
33 self.format = 'str'
33 34
34 35 def getElementName(self):
35 36
36 37 return self.ELEMENTNAME
37 38
38 39 def getValue(self):
39 40
40 if self.type == 'list':
41 if self.format == 'list':
41 42 strList = self.value.split(',')
42 43 return strList
43 44
44 if self.type == 'intlist':
45 if self.format == 'intlist':
45 46 strList = self.value.split(',')
46 47 intList = [int(x) for x in strList]
47 48 return intList
48 49
49 if self.type == 'floatlist':
50 if self.format == 'floatlist':
50 51 strList = self.value.split(',')
51 52 floatList = [float(x) for x in strList]
52 53 return floatList
53 54
54 if self.type == 'date':
55 if self.format == 'date':
55 56 strList = self.value.split('/')
56 57 intList = [int(x) for x in strList]
57 58 date = datetime.date(intList[0], intList[1], intList[2])
58 59 return date
59 60
60 if self.type == 'time':
61 if self.format == 'time':
61 62 strList = self.value.split(':')
62 63 intList = [int(x) for x in strList]
63 64 time = datetime.time(intList[0], intList[1], intList[2])
64 65 return time
65 66
66 func = eval(self.type)
67 func = eval(self.format)
67 68
68 69 return func(self.value)
69 70
70 def setup(self, id, name, value, type='str'):
71 def setup(self, id, name, value, format='str'):
71 72
72 73 self.id = id
73 74 self.name = name
74 75 self.value = str(value)
75 self.type = type
76 self.format = format
76 77
77 78 def makeXml(self, opElement):
78 79
79 80 parmElement = SubElement(opElement, self.ELEMENTNAME)
80 81 parmElement.set('id', str(self.id))
81 82 parmElement.set('name', self.name)
82 83 parmElement.set('value', self.value)
83 parmElement.set('type', self.type)
84 parmElement.set('format', self.format)
84 85
85 86 def readXml(self, parmElement):
86 87
87 88 self.id = parmElement.get('id')
88 89 self.name = parmElement.get('name')
89 90 self.value = parmElement.get('value')
90 self.type = parmElement.get('type')
91 self.format = parmElement.get('format')
91 92
92 93 def printattr(self):
93 94
94 print "Parameter[%s]: name = %s, value = %s, type = %s" %(self.id, self.name, self.value, self.type)
95 print "Parameter[%s]: name = %s, value = %s, format = %s" %(self.id, self.name, self.value, self.format)
95 96
96 97 class OperationConf():
97 98
98 99 id = None
99 100 name = None
100 101 priority = None
101 102 type = None
102 103
103 104 parmConfObjList = []
104 105
105 106 ELEMENTNAME = 'Operation'
106 107
107 108 def __init__(self):
108 109
109 110 id = 0
110 111 name = None
111 112 priority = None
112 113 type = 'self'
113 114
114 115
115 116 def __getNewId(self):
116 117
117 118 return int(self.id)*10 + len(self.parmConfObjList) + 1
118 119
119 120 def getElementName(self):
120 121
121 122 return self.ELEMENTNAME
122 123
123 124 def getParameterObjList(self):
124 125
125 126 return self.parmConfObjList
126 127
127 128 def setup(self, id, name, priority, type):
128 129
129 130 self.id = id
130 131 self.name = name
131 132 self.type = type
132 133 self.priority = priority
133 134
134 135 self.parmConfObjList = []
135 136
136 def addParameter(self, name, value, type='str'):
137 def addParameter(self, name, value, format='str'):
137 138
138 139 id = self.__getNewId()
139 140
140 141 parmConfObj = ParameterConf()
141 parmConfObj.setup(id, name, value, type)
142 parmConfObj.setup(id, name, value, format)
142 143
143 144 self.parmConfObjList.append(parmConfObj)
144 145
145 146 return parmConfObj
146 147
147 148 def makeXml(self, upElement):
148 149
149 150 opElement = SubElement(upElement, self.ELEMENTNAME)
150 151 opElement.set('id', str(self.id))
151 152 opElement.set('name', self.name)
152 153 opElement.set('type', self.type)
153 154 opElement.set('priority', str(self.priority))
154 155
155 156 for parmConfObj in self.parmConfObjList:
156 157 parmConfObj.makeXml(opElement)
157 158
158 159 def readXml(self, opElement):
159 160
160 161 self.id = opElement.get('id')
161 162 self.name = opElement.get('name')
162 163 self.type = opElement.get('type')
163 164 self.priority = opElement.get('priority')
164 165
165 166 self.parmConfObjList = []
166 167
167 168 parmElementList = opElement.getiterator(ParameterConf().getElementName())
168 169
169 170 for parmElement in parmElementList:
170 171 parmConfObj = ParameterConf()
171 172 parmConfObj.readXml(parmElement)
172 173 self.parmConfObjList.append(parmConfObj)
173 174
174 175 def printattr(self):
175 176
176 177 print "%s[%s]: name = %s, type = %s, priority = %s" %(self.ELEMENTNAME,
177 178 self.id,
178 179 self.name,
179 180 self.type,
180 181 self.priority)
181 182
182 183 for parmConfObj in self.parmConfObjList:
183 184 parmConfObj.printattr()
184 185
185 186 def createObject(self):
186 187
187 188 if self.type == 'self':
188 189 raise ValueError, "This operation type cannot be created"
189 190
190 191 if self.type == 'other':
191 192 className = eval(self.name)
192 193 opObj = className()
193 194
194 195 return opObj
195 196
196 197 class ProcUnitConf():
197 198
198 199 id = None
199 200 name = None
200 type = None
201 datatype = None
201 202 inputId = None
202 203
203 204 opConfObjList = []
204 205
205 206 procUnitObj = None
206 207 opObjList = []
207 208
208 209 ELEMENTNAME = 'ProcUnit'
209 210
210 211 def __init__(self):
211 212
212 213 self.id = None
213 self.type = None
214 self.datatype = None
214 215 self.name = None
215 216 self.inputId = None
216 217
217 218 self.opConfObjList = []
218 219
219 220 self.procUnitObj = None
220 221 self.opObjDict = {}
221 222
222 223 def __getPriority(self):
223 224
224 225 return len(self.opConfObjList)+1
225 226
226 227 def __getNewId(self):
227 228
228 229 return int(self.id)*10 + len(self.opConfObjList) + 1
229 230
230 231 def getElementName(self):
231 232
232 233 return self.ELEMENTNAME
233 234
234 235 def getId(self):
235 236
236 237 return str(self.id)
237 238
238 239 def getInputId(self):
239 240
240 241 return str(self.inputId)
241 242
242 243 def getOperationObjList(self):
243 244
244 245 return self.opConfObjList
245 246
246 247 def getProcUnitObj(self):
247 248
248 249 return self.procUnitObj
249 250
250 def setup(self, id, name, type, inputId):
251 def setup(self, id, name, datatype, inputId):
251 252
252 253 self.id = id
253 254 self.name = name
254 self.type = type
255 self.datatype = datatype
255 256 self.inputId = inputId
256 257
257 258 self.opConfObjList = []
258 259
259 260 self.addOperation(name='init', optype='self')
260 261
261 262 def addOperation(self, name, optype='self'):
262 263
263 264 id = self.__getNewId()
264 265 priority = self.__getPriority()
265 266
266 267 opConfObj = OperationConf()
267 268 opConfObj.setup(id, name=name, priority=priority, type=optype)
268 269
269 270 self.opConfObjList.append(opConfObj)
270 271
271 272 return opConfObj
272 273
273 274 def makeXml(self, procUnitElement):
274 275
275 276 upElement = SubElement(procUnitElement, self.ELEMENTNAME)
276 277 upElement.set('id', str(self.id))
277 278 upElement.set('name', self.name)
278 upElement.set('type', self.type)
279 upElement.set('datatype', self.datatype)
279 280 upElement.set('inputId', str(self.inputId))
280 281
281 282 for opConfObj in self.opConfObjList:
282 283 opConfObj.makeXml(upElement)
283 284
284 285 def readXml(self, upElement):
285 286
286 287 self.id = upElement.get('id')
287 288 self.name = upElement.get('name')
288 self.type = upElement.get('type')
289 self.datatype = upElement.get('datatype')
289 290 self.inputId = upElement.get('inputId')
290 291
291 292 self.opConfObjList = []
292 293
293 294 opElementList = upElement.getiterator(OperationConf().getElementName())
294 295
295 296 for opElement in opElementList:
296 297 opConfObj = OperationConf()
297 298 opConfObj.readXml(opElement)
298 299 self.opConfObjList.append(opConfObj)
299 300
300 301 def printattr(self):
301 302
302 print "%s[%s]: name = %s, type = %s, inputId = %s" %(self.ELEMENTNAME,
303 print "%s[%s]: name = %s, datatype = %s, inputId = %s" %(self.ELEMENTNAME,
303 304 self.id,
304 305 self.name,
305 self.type,
306 self.datatype,
306 307 self.inputId)
307 308
308 309 for opConfObj in self.opConfObjList:
309 310 opConfObj.printattr()
310 311
311 312 def createObjects(self):
312 313
313 314 className = eval(self.name)
314 315 procUnitObj = className()
315 316
316 317 for opConfObj in self.opConfObjList:
317 318
318 319 if opConfObj.type == 'self':
319 320 continue
320 321
321 322 opObj = opConfObj.createObject()
322 323
323 324 self.opObjDict[opConfObj.id] = opObj
324 325 procUnitObj.addOperation(opObj, opConfObj.id)
325 326
326 327 self.procUnitObj = procUnitObj
327 328
328 329 return procUnitObj
329 330
330 331 def run(self):
331 332
333 finalSts = False
334
332 335 for opConfObj in self.opConfObjList:
336
333 337 kwargs = {}
334 338 for parmConfObj in opConfObj.getParameterObjList():
335 339 kwargs[parmConfObj.name] = parmConfObj.getValue()
336
337 self.procUnitObj.call(opConfObj, **kwargs)
338 340
339
340
341 #print "\tRunning the '%s' operation with %s" %(opConfObj.name, opConfObj.id)
342 sts = self.procUnitObj.call(opConfObj, **kwargs)
343 finalSts = finalSts or sts
344
345 return finalSts
346
341 347 class ReadUnitConf(ProcUnitConf):
342 348
343
344 349 path = None
345 350 startDate = None
346 351 endDate = None
347 352 startTime = None
348 353 endTime = None
349 354 online = None
350 355 expLabel = None
351 356 delay = None
352 357
353 358 ELEMENTNAME = 'ReadUnit'
354 359
355 360 def __init__(self):
356 361
357 362 self.id = None
358 self.type = None
363 self.datatype = None
359 364 self.name = None
360 365 self.inputId = 0
361 366
362 367 self.opConfObjList = []
363 368 self.opObjList = []
364 369
365 370 def getElementName(self):
366 371
367 372 return self.ELEMENTNAME
368 373
369 def setup(self, id, name, type, path, startDate, endDate, startTime, endTime, online=0, expLabel='', delay=60):
374 def setup(self, id, name, datatype, path, startDate, endDate, startTime, endTime, online=0, expLabel='', delay=60):
370 375
371 376 self.id = id
372 377 self.name = name
373 self.type = type
378 self.datatype = datatype
374 379
375 380 self.path = path
376 381 self.startDate = startDate
377 382 self.endDate = endDate
378 383 self.startTime = startTime
379 384 self.endTime = endTime
380 385 self.online = online
381 386 self.expLabel = expLabel
382 387 self.delay = delay
383 388
384 389 self.addRunOperation()
385 390
386 391 def addRunOperation(self):
387 392
388 393 opObj = self.addOperation(name = 'run', optype = 'self')
389 394
390 opObj.addParameter(name='path' , value=self.path, type='str')
391 opObj.addParameter(name='startDate' , value=self.startDate, type='date')
392 opObj.addParameter(name='endDate' , value=self.endDate, type='date')
393 opObj.addParameter(name='startTime' , value=self.startTime, type='time')
394 opObj.addParameter(name='endTime' , value=self.endTime, type='time')
395 opObj.addParameter(name='expLabel' , value=self.expLabel, type='str')
396 opObj.addParameter(name='online' , value=self.online, type='bool')
397 opObj.addParameter(name='delay' , value=self.delay, type='float')
395 opObj.addParameter(name='path' , value=self.path, format='str')
396 opObj.addParameter(name='startDate' , value=self.startDate, format='date')
397 opObj.addParameter(name='endDate' , value=self.endDate, format='date')
398 opObj.addParameter(name='startTime' , value=self.startTime, format='time')
399 opObj.addParameter(name='endTime' , value=self.endTime, format='time')
400 opObj.addParameter(name='expLabel' , value=self.expLabel, format='str')
401 opObj.addParameter(name='online' , value=self.online, format='int')
402 opObj.addParameter(name='delay' , value=self.delay, format='float')
398 403
399 404 return opObj
400 405
401 406
402 407 class Controller():
403 408
404 409 id = None
405 410 name = None
406 411 description = None
407 412 # readUnitConfObjList = None
408 413 procUnitConfObjDict = None
409 414
410 415 ELEMENTNAME = 'Controller'
411 416
412 417 def __init__(self):
413 418
414 419 self.id = None
415 420 self.name = None
416 421 self.description = None
417 422
418 423 # self.readUnitConfObjList = []
419 424 self.procUnitConfObjDict = {}
420 425
421 426 def __getNewId(self):
422 427
423 428 id = int(self.id)*10 + len(self.procUnitConfObjDict) + 1
424 429
425 430 return str(id)
426 431
427 432 def getElementName(self):
428 433
429 434 return self.ELEMENTNAME
430 435
431 436 def setup(self, id, name, description):
432 437
433 438 self.id = id
434 439 self.name = name
435 440 self.description = description
436 441
437 def addReadUnit(self, type, path, startDate='', endDate='', startTime='', endTime='', online=0, expLabel='', delay=60):
442 def addReadUnit(self, datatype, path, startDate='', endDate='', startTime='', endTime='', online=0, expLabel='', delay=60):
438 443
439 444 id = self.__getNewId()
440 name = '%sReader' %(type)
445 name = '%sReader' %(datatype)
441 446
442 447 readUnitConfObj = ReadUnitConf()
443 readUnitConfObj.setup(id, name, type, path, startDate, endDate, startTime, endTime, online, expLabel, delay)
448 readUnitConfObj.setup(id, name, datatype, path, startDate, endDate, startTime, endTime, online, expLabel, delay)
444 449
445 450 self.procUnitConfObjDict[readUnitConfObj.getId()] = readUnitConfObj
446 451
447 452 return readUnitConfObj
448 453
449 def addProcUnit(self, type, inputId):
454 def addProcUnit(self, datatype, inputId):
450 455
451 456 id = self.__getNewId()
452 name = '%sProc' %(type)
457 name = '%sProc' %(datatype)
453 458
454 459 procUnitConfObj = ProcUnitConf()
455 procUnitConfObj.setup(id, name, type, inputId)
460 procUnitConfObj.setup(id, name, datatype, inputId)
456 461
457 462 self.procUnitConfObjDict[procUnitConfObj.getId()] = procUnitConfObj
458 463
459 464 return procUnitConfObj
460 465
461 466 def makeXml(self):
462 467
463 468 projectElement = Element('Controller')
464 469 projectElement.set('id', str(self.id))
465 470 projectElement.set('name', self.name)
466 471 projectElement.set('description', self.description)
467 472
468 473 # for readUnitConfObj in self.readUnitConfObjList:
469 474 # readUnitConfObj.makeXml(projectElement)
470 475
471 476 for procUnitConfObj in self.procUnitConfObjDict.values():
472 477 procUnitConfObj.makeXml(projectElement)
473 478
474 479 self.projectElement = projectElement
475 480
476 481 def writeXml(self, filename):
477 482
478 483 self.makeXml()
479 484
480 485 print prettify(self.projectElement)
481 486
482 487 ElementTree(self.projectElement).write(filename, method='xml')
483 488
484 489 def readXml(self, filename):
485 490
486 491 #tree = ET.parse(filename)
487 492 self.projectElement = None
488 493 # self.readUnitConfObjList = []
489 494 self.procUnitConfObjDict = {}
490 495
491 496 self.projectElement = ElementTree().parse(filename)
492 497
493 498 self.project = self.projectElement.tag
494 499
495 500 self.id = self.projectElement.get('id')
496 501 self.name = self.projectElement.get('name')
497 502 self.description = self.projectElement.get('description')
498 503
499 504 readUnitElementList = self.projectElement.getiterator(ReadUnitConf().getElementName())
500 505
501 506 for readUnitElement in readUnitElementList:
502 507 readUnitConfObj = ReadUnitConf()
503 508 readUnitConfObj.readXml(readUnitElement)
504 509
505 510 self.procUnitConfObjDict[readUnitConfObj.getId()] = readUnitConfObj
506 511
507 512 procUnitElementList = self.projectElement.getiterator(ProcUnitConf().getElementName())
508 513
509 514 for procUnitElement in procUnitElementList:
510 515 procUnitConfObj = ProcUnitConf()
511 516 procUnitConfObj.readXml(procUnitElement)
512 517
513 518 self.procUnitConfObjDict[procUnitConfObj.getId()] = procUnitConfObj
514 519
515 520 def printattr(self):
516 521
517 522 print "Controller[%s]: name = %s, description = %s" %(self.id,
518 523 self.name,
519 524 self.description)
520 525
521 526 # for readUnitConfObj in self.readUnitConfObjList:
522 527 # readUnitConfObj.printattr()
523 528
524 529 for procUnitConfObj in self.procUnitConfObjDict.values():
525 530 procUnitConfObj.printattr()
526 531
527 532 def createObjects(self):
528 533
529 534 # for readUnitConfObj in self.readUnitConfObjList:
530 535 # readUnitConfObj.createObjects()
531 536
532 537 for procUnitConfObj in self.procUnitConfObjDict.values():
533 538 procUnitConfObj.createObjects()
534 539
535 540 def __connect(self, objIN, obj):
536 541
537 542 obj.setInput(objIN.getOutput())
538 543
539 544 def connectObjects(self):
540 545
541 546 for puConfObj in self.procUnitConfObjDict.values():
542 547
543 548 inputId = puConfObj.getInputId()
544 549
545 550 if int(inputId) == 0:
546 551 continue
547 552
548 553 puConfINObj = self.procUnitConfObjDict[inputId]
549 554
550 555 puObj = puConfObj.getProcUnitObj()
551 556 puINObj = puConfINObj.getProcUnitObj()
552 557
553 558 self.__connect(puINObj, puObj)
554 559
555 560 def run(self):
556 561
557 562 # for readUnitConfObj in self.readUnitConfObjList:
558 563 # readUnitConfObj.run()
564
559 565 while(True):
566
567 finalSts = False
568
560 569 for procUnitConfObj in self.procUnitConfObjDict.values():
561 procUnitConfObj.run()
570 #print "Running the '%s' process with %s" %(procUnitConfObj.name, procUnitConfObj.id)
571 sts = procUnitConfObj.run()
572 finalSts = finalSts or sts
573
574 #If every process unit finished so end process
575 if not(finalSts):
576 print "Every process unit finished"
577 break
562 578
563 579 if __name__ == '__main__':
564 580
565 581 desc = "Segundo Test"
566 582 filename = "schain.xml"
567 583
568 584 controllerObj = Controller()
569 585
570 586 controllerObj.setup(id = '191', name='test01', description=desc)
571 587
572 readUnitConfObj = controllerObj.addReadUnit(type='Voltage',
573 path='/home/roj-idl71/Data/RAWDATA/Meteors',
574 startDate='2012/01/01',
588 readUnitConfObj = controllerObj.addReadUnit(datatype='Spectra',
589 path='D:\Data\IMAGING',
590 startDate='2011/01/01',
575 591 endDate='2012/12/31',
576 592 startTime='00:00:00',
577 593 endTime='23:59:59',
578 594 online=0)
579 595
580 procUnitConfObj1 = controllerObj.addProcUnit(type='Voltage', inputId=readUnitConfObj.getId())
596 opObj00 = readUnitConfObj.addOperation(name='printTotalBlocks')
581 597
582 procUnitConfObj2 = controllerObj.addProcUnit(type='Voltage', inputId=procUnitConfObj1.getId())
598 procUnitConfObj1 = controllerObj.addProcUnit(datatype='Spectra', inputId=readUnitConfObj.getId())
583 599
584 opObj11 = procUnitConfObj1.addOperation(name='selectChannels')
585 opObj11.addParameter(name='channelList', value='1,2', type='intlist')
600 opObj10 = procUnitConfObj1.addOperation(name='selectChannels')
601 opObj10.addParameter(name='channelList', value='0,1', format='intlist')
586 602
603 opObj11 = procUnitConfObj1.addOperation(name='SpectraPlot', optype='other')
604 opObj11.addParameter(name='idfigure', value='1', format='int')
605 opObj11.addParameter(name='wintitle', value='SpectraPlot', format='str')
606 opObj11.addParameter(name='zmin', value='60', format='int')
607 opObj11.addParameter(name='zmax', value='100', format='int')
608
587 609 # opObj12 = procUnitConfObj1.addOperation(name='decoder')
588 # opObj12.addParameter(name='ncode', value='2', type='int')
589 # opObj12.addParameter(name='nbauds', value='8', type='int')
590 # opObj12.addParameter(name='code0', value='001110011', type='int')
591 # opObj12.addParameter(name='code1', value='001110011', type='int')
610 # opObj12.addParameter(name='ncode', value='2', format='int')
611 # opObj12.addParameter(name='nbauds', value='8', format='int')
612 # opObj12.addParameter(name='code0', value='001110011', format='int')
613 # opObj12.addParameter(name='code1', value='001110011', format='int')
614
615 # procUnitConfObj2 = controllerObj.addProcUnit(datatype='Spectra', inputId=procUnitConfObj1.getId())
616
592 617
593 opObj21 = procUnitConfObj2.addOperation(name='CohInt', optype='other')
594 opObj21.addParameter(name='nCohInt', value='10', type='int')
618 # opObj21 = procUnitConfObj2.addOperation(name='IncohInt', optype='other')
619 # opObj21.addParameter(name='nCohInt', value='10', format='int')
595 620
596 621
597 622 print "Escribiendo el archivo XML"
598 623
599 624 controllerObj.writeXml(filename)
600 625
601 626 print "Leyendo el archivo XML"
602 627 controllerObj.readXml(filename)
603 628 #controllerObj.printattr()
604 629
605 630 controllerObj.createObjects()
606 631 controllerObj.connectObjects()
607 632 controllerObj.run()
608 633
609 634 No newline at end of file
@@ -1,80 +1,86
1 1 import matplotlib
2 matplotlib.use("Agg")
2 matplotlib.use("TKAgg")
3 3 import matplotlib.pyplot
4 4 #import scitools.numpyutils
5 5 from mpl_toolkits.axes_grid1 import make_axes_locatable
6 6
7 7 def init(idfigure, wintitle, width, height):
8 8 matplotlib.pyplot.ioff()
9 9 fig = matplotlib.pyplot.matplotlib.pyplot.figure(num=idfigure, facecolor="w")
10 10 fig.canvas.manager.set_window_title(wintitle)
11 11 fig.canvas.manager.resize(width,height)
12 12 matplotlib.pyplot.ion()
13 13 return fig
14 14
15 15 def setWinTitle(fig, title):
16 16 fig.canvas.manager.set_window_title(title)
17 17
18 18 def setTextFromAxes(idfigure, ax, title):
19 19 fig = matplotlib.pyplot.figure(idfigure)
20 20 ax.annotate(title, xy=(.1, .99),
21 21 xycoords='figure fraction',
22 22 horizontalalignment='left', verticalalignment='top',
23 23 fontsize=10)
24 24
25 25 def setTitle(idfigure, title):
26 26 fig = matplotlib.pyplot.figure(idfigure)
27 27 fig.suptitle(title)
28 28
29 29 def makeAxes(idfigure, nrow, ncol, xpos, ypos, colspan, rowspan):
30 30 fig = matplotlib.pyplot.figure(idfigure)
31 31 ax = matplotlib.pyplot.subplot2grid((nrow, ncol), (xpos, ypos), colspan=colspan, rowspan=rowspan)
32 32 return ax
33 33
34 34 def pline(ax, x, y, xmin, xmax, ymin, ymax, xlabel, ylabel, title, firsttime):
35 35 if firsttime:
36 36 ax.plot(x, y)
37 37 ax.set_xlim([xmin,xmax])
38 38 ax.set_ylim([ymin,ymax])
39 39 ax.set_xlabel(xlabel, size=8)
40 40 ax.set_ylabel(ylabel, size=8)
41 41 ax.set_title(title, size=10)
42 42 matplotlib.pyplot.tight_layout()
43 43 else:
44 44 ax.lines[0].set_data(x,y)
45 45
46 46 def draw(idfigure):
47 47 fig = matplotlib.pyplot.figure(idfigure)
48 48 fig.canvas.draw()
49 49
50 50 def pcolor(ax, x, y, z, xmin, xmax, ymin, ymax, zmin, zmax, xlabel, ylabel, title, firsttime, mesh):
51 51 if firsttime:
52 52 divider = make_axes_locatable(ax)
53 53 ax_cb = divider.new_horizontal(size="5%", pad=0.05)
54 54 fig1 = ax.get_figure()
55 55 fig1.add_axes(ax_cb)
56 56
57 57 ax.set_xlim([xmin,xmax])
58 58 ax.set_ylim([ymin,ymax])
59 59 ax.set_xlabel(xlabel)
60 60 ax.set_ylabel(ylabel)
61 61 ax.set_title(title)
62 62
63 63 imesh=ax.pcolormesh(x,y,z.T,vmin=zmin,vmax=zmax)
64 64 matplotlib.pyplot.colorbar(imesh, cax=ax_cb)
65 65 ax_cb.yaxis.tick_right()
66 66 for tl in ax_cb.get_yticklabels():
67 67 tl.set_visible(True)
68 68 ax_cb.yaxis.tick_right()
69 69 matplotlib.pyplot.tight_layout()
70 70 return imesh
71 71 else:
72 ax.set_xlim([xmin,xmax])
73 ax.set_ylim([ymin,ymax])
74 ax.set_xlabel(xlabel)
75 ax.set_ylabel(ylabel)
76 ax.set_title(title)
77
72 78 z = z.T
73 79 z = z[0:-1,0:-1]
74 80 mesh.set_array(z.ravel())
75 81
76 82 return mesh
77 83
78 84
79 85
80 86 No newline at end of file
@@ -1,245 +1,421
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JROData.py 173 2012-11-20 15:06:21Z murco $
5 5 '''
6 6
7 7 import os, sys
8 8 import copy
9 9 import numpy
10 10
11 11 from jroheaderIO import SystemHeader, RadarControllerHeader
12 12
13 def hildebrand_sekhon(data, navg):
14 """
15 This method is for the objective determination of de noise level in Doppler spectra. This
16 implementation technique is based on the fact that the standard deviation of the spectral
17 densities is equal to the mean spectral density for white Gaussian noise
18
19 Inputs:
20 Data : heights
21 navg : numbers of averages
22
23 Return:
24 -1 : any error
25 anoise : noise's level
26 """
27
28 dataflat = data.reshape(-1)
29 dataflat.sort()
30 npts = dataflat.size #numbers of points of the data
31
32 if npts < 32:
33 print "error in noise - requires at least 32 points"
34 return -1.0
35
36 dataflat2 = numpy.power(dataflat,2)
37
38 cs = numpy.cumsum(dataflat)
39 cs2 = numpy.cumsum(dataflat2)
40
41 # data sorted in ascending order
42 nmin = int((npts + 7.)/8)
43
44 for i in range(nmin, npts):
45 s = cs[i]
46 s2 = cs2[i]
47 p = s / float(i);
48 p2 = p**2;
49 q = s2 / float(i) - p2;
50 leftc = p2;
51 rightc = q * float(navg);
52 R2 = leftc/rightc
53
54 # Signal detect: R2 < 1 (R2 = leftc/rightc)
55 if R2 < 1:
56 npts_noise = i
57 break
58
59
60 anoise = numpy.average(dataflat[0:npts_noise])
61
62 return anoise;
63
64 def sorting_bruce(Data, navg):
65 sortdata = numpy.sort(Data)
66 lenOfData = len(Data)
67 nums_min = lenOfData/10
68
69 if (lenOfData/10) > 0:
70 nums_min = lenOfData/10
71 else:
72 nums_min = 0
73
74 rtest = 1.0 + 1.0/navg
75
76 sum = 0.
77
78 sumq = 0.
79
80 j = 0
81
82 cont = 1
83
84 while((cont==1)and(j<lenOfData)):
85
86 sum += sortdata[j]
87
88 sumq += sortdata[j]**2
89
90 j += 1
91
92 if j > nums_min:
93 if ((sumq*j) <= (rtest*sum**2)):
94 lnoise = sum / j
95 else:
96 j = j - 1
97 sum = sum - sordata[j]
98 sumq = sumq - sordata[j]**2
99 cont = 0
100
101 if j == nums_min:
102 lnoise = sum /j
103
104 return lnoise
105
13 106 class JROData:
14 107
15 108 # m_BasicHeader = BasicHeader()
16 109 # m_ProcessingHeader = ProcessingHeader()
17 110
18 111 systemHeaderObj = SystemHeader()
19 112
20 113 radarControllerHeaderObj = RadarControllerHeader()
21 114
22 115 # data = None
23 116
24 117 type = None
25 118
26 119 dtype = None
27 120
28 121 nChannels = None
29 122
30 123 nHeights = None
31 124
32 125 nProfiles = None
33 126
34 127 heightList = None
35 128
36 129 channelList = None
37 130
38 131 channelIndexList = None
39 132
40 133 flagNoData = True
41 134
42 135 flagTimeBlock = False
43 136
44 137 utctime = None
45 138
46 139 blocksize = None
47 140
48 141 nCode = None
49 142
50 143 nBaud = None
51 144
52 145 code = None
53 146
54 147 flagDecodeData = True #asumo q la data esta decodificada
55 148
56 149 flagDeflipData = True #asumo q la data esta sin flip
57 150
58 151 flagShiftFFT = False
59 152
60 153 ippSeconds = None
61 154
62 155 timeInterval = None
156
157 nCohInt = None
158
159 noise = None
63 160
64 161 def __init__(self):
65 162
66 163 raise ValueError, "This class has not been implemented"
67 164
68 165 def copy(self, inputObj=None):
69 166
70 167 if inputObj == None:
71 168 return copy.deepcopy(self)
72 169
73 170 for key in inputObj.__dict__.keys():
74 171 self.__dict__[key] = inputObj.__dict__[key]
75 172
76 173 def deepcopy(self):
77 174
78 175 return copy.deepcopy(self)
79 176
80 177 def isEmpty(self):
81 178
82 179 return self.flagNoData
83 180
84 181 class Voltage(JROData):
85 182
86 nCohInt = None
87
88 183 #data es un numpy array de 2 dmensiones (canales, alturas)
89 184 data = None
90 185
91 186 def __init__(self):
92 187 '''
93 188 Constructor
94 189 '''
95 190
96 191 self.radarControllerHeaderObj = RadarControllerHeader()
97 192
98 193 self.systemHeaderObj = SystemHeader()
99 194
100 195 self.type = "Voltage"
101 196
102 197 self.data = None
103 198
104 199 self.dtype = None
105 200
106 201 self.nChannels = 0
107 202
108 203 self.nHeights = 0
109 204
110 205 self.nProfiles = None
111 206
112 207 self.heightList = None
113 208
114 209 self.channelList = None
115 210
116 211 self.channelIndexList = None
117 212
118 213 self.flagNoData = True
119 214
120 215 self.flagTimeBlock = False
121 216
122 217 self.utctime = None
123 218
124 219 self.nCohInt = None
125 220
126 221 self.blocksize = None
222
223 def getNoisebyHildebrand(self):
224 """
225 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
226
227 Return:
228 noiselevel
229 """
127 230
231 for channel in range(self.nChannels):
232 daux = self.data_spc[channel,:,:]
233 self.noise[channel] = hildebrand_sekhon(daux, self.nCohInt)
234
235 return self.noise
236
237 def getNoise(self, type = 1):
238
239 self.noise = numpy.zeros(self.nChannels)
240
241 if type == 1:
242 noise = self.getNoisebyHildebrand()
243
244 return 10*numpy.log10(noise)
245
128 246 class Spectra(JROData):
129 247
130 248 #data es un numpy array de 2 dmensiones (canales, perfiles, alturas)
131 249 data_spc = None
132 250
133 251 #data es un numpy array de 2 dmensiones (canales, pares, alturas)
134 252 data_cspc = None
135 253
136 254 #data es un numpy array de 2 dmensiones (canales, alturas)
137 255 data_dc = None
138 256
139 257 nFFTPoints = None
140 258
141 259 nPairs = None
142 260
143 261 pairsList = None
144 262
145 263 nIncohInt = None
146 264
147 265 wavelength = None #Necesario para cacular el rango de velocidad desde la frecuencia
148 266
149 267 nCohInt = None #se requiere para determinar el valor de timeInterval
150 268
151 269 def __init__(self):
152 270 '''
153 271 Constructor
154 272 '''
155 273
156 274 self.radarControllerHeaderObj = RadarControllerHeader()
157 275
158 276 self.systemHeaderObj = SystemHeader()
159 277
160 278 self.type = "Spectra"
161 279
162 280 # self.data = None
163 281
164 282 self.dtype = None
165 283
166 284 self.nChannels = 0
167 285
168 286 self.nHeights = 0
169 287
170 288 self.nProfiles = None
171 289
172 290 self.heightList = None
173 291
174 292 self.channelList = None
175 293
176 294 self.channelIndexList = None
177 295
178 296 self.flagNoData = True
179 297
180 298 self.flagTimeBlock = False
181 299
182 300 self.utctime = None
183 301
302 self.nCohInt = None
303
184 304 self.nIncohInt = None
185 305
186 306 self.blocksize = None
187 307
188 308 self.nFFTPoints = None
189 309
190 310 self.wavelength = None
191 311
192 312 def getFrequencies(self):
193 313
194 314 xrange = numpy.arange(self.nFFTPoints)
195 315 xrange = xrange
196 316 return None
317
318 def getNoisebyHildebrand(self):
319 """
320 Determino el nivel de ruido usando el metodo Hildebrand-Sekhon
321
322 Return:
323 noiselevel
324 """
325
326 for channel in range(self.nChannels):
327 daux = self.data_spc[channel,:,:]
328 self.noise[channel] = hildebrand_sekhon(daux, self.nIncohInt)
329
330 return self.noise
331
332 def getNoisebyWindow(self, heiIndexMin=0, heiIndexMax=-1, freqIndexMin=0, freqIndexMax=-1):
333 """
334 Determina el ruido del canal utilizando la ventana indicada con las coordenadas:
335 (heiIndexMIn, freqIndexMin) hasta (heiIndexMax, freqIndexMAx)
336
337 Inputs:
338 heiIndexMin: Limite inferior del eje de alturas
339 heiIndexMax: Limite superior del eje de alturas
340 freqIndexMin: Limite inferior del eje de frecuencia
341 freqIndexMax: Limite supoerior del eje de frecuencia
342 """
343
344 data = self.data_spc[:, heiIndexMin:heiIndexMax, freqIndexMin:freqIndexMax]
345
346 for channel in range(self.nChannels):
347 daux = data[channel,:,:]
348 self.noise[channel] = numpy.average(daux)
349
350 return self.noise
351
352 def getNoisebySort(self):
353
354 for channel in range(self.nChannels):
355 daux = self.data_spc[channel,:,:]
356 self.noise[channel] = sorting_bruce(daux, self.nIncohInt)
357
358 return self.noise
359
360 def getNoise(self, type = 1):
361
362 self.noise = numpy.zeros(self.nChannels)
363
364 if type == 1:
365 noise = self.getNoisebyHildebrand()
366
367 if type == 2:
368 noise = self.getNoisebySort()
369
370 if type == 3:
371 noise = self.getNoisebyWindow()
197 372
373 return 10*numpy.log10(noise)
198 374
199 375 class SpectraHeis(JROData):
200 376
201 377 data_spc = None
202 378
203 379 data_cspc = None
204 380
205 381 data_dc = None
206 382
207 383 nFFTPoints = None
208 384
209 385 nPairs = None
210 386
211 387 pairsList = None
212 388
213 389 nIncohInt = None
214 390
215 391 def __init__(self):
216 392
217 393 self.radarControllerHeaderObj = RadarControllerHeader()
218 394
219 395 self.systemHeaderObj = SystemHeader()
220 396
221 397 self.type = "SpectraHeis"
222 398
223 399 self.dtype = None
224 400
225 401 self.nChannels = 0
226 402
227 403 self.nHeights = 0
228 404
229 405 self.nProfiles = None
230 406
231 407 self.heightList = None
232 408
233 409 self.channelList = None
234 410
235 411 self.channelIndexList = None
236 412
237 413 self.flagNoData = True
238 414
239 415 self.flagTimeBlock = False
240 416
241 417 self.nPairs = 0
242 418
243 419 self.utctime = None
244 420
245 421 self.blocksize = None
@@ -1,2476 +1,2478
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JRODataIO.py 169 2012-11-19 21:57:03Z murco $
5 5 '''
6 6
7 7 import os, sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import time, datetime
13 13
14 14 from jrodata import *
15 15 from jroheaderIO import *
16 16 from jroprocessing import *
17 17
18 18 def isNumber(str):
19 19 """
20 20 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
21 21
22 22 Excepciones:
23 23 Si un determinado string no puede ser convertido a numero
24 24 Input:
25 25 str, string al cual se le analiza para determinar si convertible a un numero o no
26 26
27 27 Return:
28 28 True : si el string es uno numerico
29 29 False : no es un string numerico
30 30 """
31 31 try:
32 32 float( str )
33 33 return True
34 34 except:
35 35 return False
36 36
37 37 def isThisFileinRange(filename, startUTSeconds, endUTSeconds):
38 38 """
39 39 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
40 40
41 41 Inputs:
42 42 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
43 43
44 44 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
45 45 segundos contados desde 01/01/1970.
46 46 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
47 47 segundos contados desde 01/01/1970.
48 48
49 49 Return:
50 50 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
51 51 fecha especificado, de lo contrario retorna False.
52 52
53 53 Excepciones:
54 54 Si el archivo no existe o no puede ser abierto
55 55 Si la cabecera no puede ser leida.
56 56
57 57 """
58 58 basicHeaderObj = BasicHeader()
59 59
60 60 try:
61 61 fp = open(filename,'rb')
62 62 except:
63 63 raise IOError, "The file %s can't be opened" %(filename)
64 64
65 65 sts = basicHeaderObj.read(fp)
66 66 fp.close()
67 67
68 68 if not(sts):
69 69 print "Skipping the file %s because it has not a valid header" %(filename)
70 70 return 0
71 71
72 72 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
73 73 return 0
74 74
75 75 return 1
76 76
77 77 def getlastFileFromPath(path, ext):
78 78 """
79 79 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
80 80 al final de la depuracion devuelve el ultimo file de la lista que quedo.
81 81
82 82 Input:
83 83 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
84 84 ext : extension de los files contenidos en una carpeta
85 85
86 86 Return:
87 87 El ultimo file de una determinada carpeta, no se considera el path.
88 88 """
89 89 validFilelist = []
90 90 fileList = os.listdir(path)
91 91
92 92 # 0 1234 567 89A BCDE
93 93 # H YYYY DDD SSS .ext
94 94
95 95 for file in fileList:
96 96 try:
97 97 year = int(file[1:5])
98 98 doy = int(file[5:8])
99 99
100 100 if (os.path.splitext(file)[-1].upper() != ext.upper()) : continue
101 101 except:
102 102 continue
103 103
104 104 validFilelist.append(file)
105 105
106 106 if validFilelist:
107 107 validFilelist = sorted( validFilelist, key=str.lower )
108 108 return validFilelist[-1]
109 109
110 110 return None
111 111
112 112 def checkForRealPath(path, year, doy, set, ext):
113 113 """
114 114 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
115 115 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
116 116 el path exacto de un determinado file.
117 117
118 118 Example :
119 119 nombre correcto del file es .../.../D2009307/P2009307367.ext
120 120
121 121 Entonces la funcion prueba con las siguientes combinaciones
122 122 .../.../x2009307/y2009307367.ext
123 123 .../.../x2009307/Y2009307367.ext
124 124 .../.../X2009307/y2009307367.ext
125 125 .../.../X2009307/Y2009307367.ext
126 126 siendo para este caso, la ultima combinacion de letras, identica al file buscado
127 127
128 128 Return:
129 129 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
130 130 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
131 131 para el filename
132 132 """
133 133 filepath = None
134 134 find_flag = False
135 135 filename = None
136 136
137 137 if ext.lower() == ".r": #voltage
138 138 header1 = "dD"
139 139 header2 = "dD"
140 140 elif ext.lower() == ".pdata": #spectra
141 141 header1 = "dD"
142 142 header2 = "pP"
143 143 else:
144 144 return None, filename
145 145
146 146 for dir in header1: #barrido por las dos combinaciones posibles de "D"
147 147 for fil in header2: #barrido por las dos combinaciones posibles de "D"
148 148 doypath = "%s%04d%03d" % ( dir, year, doy ) #formo el nombre del directorio xYYYYDDD (x=d o x=D)
149 149 filename = "%s%04d%03d%03d%s" % ( fil, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
150 150 filepath = os.path.join( path, doypath, filename ) #formo el path completo
151 151 if os.path.exists( filepath ): #verifico que exista
152 152 find_flag = True
153 153 break
154 154 if find_flag:
155 155 break
156 156
157 157 if not(find_flag):
158 158 return None, filename
159 159
160 160 return filepath, filename
161 161
162 162 class JRODataIO:
163 163
164 164 c = 3E8
165 165
166 166 isConfig = False
167 167
168 168 basicHeaderObj = BasicHeader()
169 169
170 170 systemHeaderObj = SystemHeader()
171 171
172 172 radarControllerHeaderObj = RadarControllerHeader()
173 173
174 174 processingHeaderObj = ProcessingHeader()
175 175
176 176 online = 0
177 177
178 178 dtype = None
179 179
180 180 pathList = []
181 181
182 182 filenameList = []
183 183
184 184 filename = None
185 185
186 186 ext = None
187 187
188 flagNoMoreFiles = 0
189
190 188 flagIsNewFile = 1
191 189
192 190 flagTimeBlock = 0
193 191
194 192 flagIsNewBlock = 0
195 193
196 194 fp = None
197 195
198 196 firstHeaderSize = 0
199 197
200 198 basicHeaderSize = 24
201 199
202 200 versionFile = 1103
203 201
204 202 fileSize = None
205 203
206 204 ippSeconds = None
207 205
208 206 fileSizeByHeader = None
209 207
210 208 fileIndex = None
211 209
212 210 profileIndex = None
213 211
214 212 blockIndex = None
215 213
216 214 nTotalBlocks = None
217 215
218 216 maxTimeStep = 30
219 217
220 218 lastUTTime = None
221 219
222 220 datablock = None
223 221
224 222 dataOut = None
225 223
226 224 blocksize = None
227 225
228 226 def __init__(self):
229 227
230 228 raise ValueError, "Not implemented"
231 229
232 230 def run(self):
233 231
234 232 raise ValueError, "Not implemented"
235 233
236 234 def getOutput(self):
237 235
238 236 return self.dataOut
239 237
240 238 class JRODataReader(JRODataIO, ProcessingUnit):
241 239
242 240 nReadBlocks = 0
243 241
244 242 delay = 60 #number of seconds waiting a new file
245 243
246 244 nTries = 3 #quantity tries
247 245
248 246 nFiles = 3 #number of files for searching
249
247
248 flagNoMoreFiles = 0
250 249
251 250 def __init__(self):
252 251
253 252 """
254 253
255 254 """
256 255
257 256 raise ValueError, "This method has not been implemented"
258 257
259 258
260 259 def createObjByDefault(self):
261 260 """
262 261
263 262 """
264 263 raise ValueError, "This method has not been implemented"
265 264
266 265 def getBlockDimension(self):
267 266
268 267 raise ValueError, "No implemented"
269 268
270 269 def __searchFilesOffLine(self,
271 270 path,
272 271 startDate,
273 272 endDate,
274 273 startTime=datetime.time(0,0,0),
275 274 endTime=datetime.time(23,59,59),
276 275 set=None,
277 276 expLabel="",
278 277 ext=".r"):
279 278 dirList = []
280 279 for thisPath in os.listdir(path):
281 280 if os.path.isdir(os.path.join(path,thisPath)):
282 281 dirList.append(thisPath)
283 282
284 283 if not(dirList):
285 284 return None, None
286 285
287 286 pathList = []
288 287 dateList = []
289 288
290 289 thisDate = startDate
291 290
292 291 while(thisDate <= endDate):
293 292 year = thisDate.timetuple().tm_year
294 293 doy = thisDate.timetuple().tm_yday
295 294
296 295 match = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy))
297 296 if len(match) == 0:
298 297 thisDate += datetime.timedelta(1)
299 298 continue
300 299
301 300 pathList.append(os.path.join(path,match[0],expLabel))
302 301 dateList.append(thisDate)
303 302 thisDate += datetime.timedelta(1)
304 303
305 304 filenameList = []
306 305 for index in range(len(pathList)):
307 306
308 307 thisPath = pathList[index]
309 308 fileList = glob.glob1(thisPath, "*%s" %ext)
310 309 fileList.sort()
311 310
312 311 #Busqueda de datos en el rango de horas indicados
313 312 thisDate = dateList[index]
314 313 startDT = datetime.datetime.combine(thisDate, startTime)
315 314 endDT = datetime.datetime.combine(thisDate, endTime)
316 315
317 316 startUtSeconds = time.mktime(startDT.timetuple())
318 317 endUtSeconds = time.mktime(endDT.timetuple())
319 318
320 319 for file in fileList:
321 320
322 321 filename = os.path.join(thisPath,file)
323 322
324 323 if isThisFileinRange(filename, startUtSeconds, endUtSeconds):
325 324 filenameList.append(filename)
326 325
327 326 if not(filenameList):
328 327 return None, None
329 328
330 329 self.filenameList = filenameList
331 330
332 331 return pathList, filenameList
333 332
334 333 def __searchFilesOnLine(self, path, startDate=None, endDate=None, startTime=None, endTime=None, expLabel = "", ext = None):
335 334
336 335 """
337 336 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
338 337 devuelve el archivo encontrado ademas de otros datos.
339 338
340 339 Input:
341 340 path : carpeta donde estan contenidos los files que contiene data
342 341
343 342 startDate : Fecha inicial. Rechaza todos los directorios donde
344 343 file end time < startDate (obejto datetime.date)
345 344
346 345 endDate : Fecha final. Rechaza todos los directorios donde
347 346 file start time > endDate (obejto datetime.date)
348 347
349 348 startTime : Tiempo inicial. Rechaza todos los archivos donde
350 349 file end time < startTime (obejto datetime.time)
351 350
352 351 endTime : Tiempo final. Rechaza todos los archivos donde
353 352 file start time > endTime (obejto datetime.time)
354 353
355 354 expLabel : Nombre del subexperimento (subfolder)
356 355
357 356 ext : extension de los files
358 357
359 358 Return:
360 359 directory : eL directorio donde esta el file encontrado
361 360 filename : el ultimo file de una determinada carpeta
362 361 year : el anho
363 362 doy : el numero de dia del anho
364 363 set : el set del archivo
365 364
366 365
367 366 """
368 367 dirList = []
369 368 pathList = []
370 369 directory = None
371 370
372 371 #Filtra solo los directorios
373 372 for thisPath in os.listdir(path):
374 373 if os.path.isdir(os.path.join(path, thisPath)):
375 374 dirList.append(thisPath)
376 375
377 376 if not(dirList):
378 377 return None, None, None, None, None
379 378
380 379 dirList = sorted( dirList, key=str.lower )
381 380
382 381 if startDate:
383 382 startDateTime = datetime.datetime.combine(startDate, startTime)
384 383 thisDateTime = startDateTime
385 384 if endDate == None: endDateTime = startDateTime
386 385 else: endDateTime = datetime.datetime.combine(endDate, endTime)
387 386
388 387 while(thisDateTime <= endDateTime):
389 388 year = thisDateTime.timetuple().tm_year
390 389 doy = thisDateTime.timetuple().tm_yday
391 390
392 391 match = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy))
393 392 if len(match) == 0:
394 393 thisDateTime += datetime.timedelta(1)
395 394 continue
396 395
397 396 pathList.append(os.path.join(path,match[0], expLabel))
398 397 thisDateTime += datetime.timedelta(1)
399 398
400 399 if not(pathList):
401 400 print "\tNo files in range: %s - %s" %(startDateTime.ctime(), endDateTime.ctime())
402 401 return None, None, None, None, None
403 402
404 403 directory = pathList[0]
405 404
406 405 else:
407 406 directory = dirList[-1]
408 407 directory = os.path.join(path,directory)
409 408
410 409 filename = getlastFileFromPath(directory, ext)
411 410
412 411 if not(filename):
413 412 return None, None, None, None, None
414 413
415 414 if not(self.__verifyFile(os.path.join(directory, filename))):
416 415 return None, None, None, None, None
417 416
418 417 year = int( filename[1:5] )
419 418 doy = int( filename[5:8] )
420 419 set = int( filename[8:11] )
421 420
422 421 return directory, filename, year, doy, set
423 422
424 423
425 424
426 425 def __setNextFileOffline(self):
427 426
428 427 idFile = self.fileIndex
429 428
430 429 while (True):
431 430 idFile += 1
432 431 if not(idFile < len(self.filenameList)):
433 432 self.flagNoMoreFiles = 1
434 433 print "No more Files"
435 434 return 0
436 435
437 436 filename = self.filenameList[idFile]
438 437
439 438 if not(self.__verifyFile(filename)):
440 439 continue
441 440
442 441 fileSize = os.path.getsize(filename)
443 442 fp = open(filename,'rb')
444 443 break
445 444
446 445 self.flagIsNewFile = 1
447 446 self.fileIndex = idFile
448 447 self.filename = filename
449 448 self.fileSize = fileSize
450 449 self.fp = fp
451 450
452 451 print "Setting the file: %s"%self.filename
453 452
454 453 return 1
455 454
456 455 def __setNextFileOnline(self):
457 456 """
458 457 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
459 458 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
460 459 siguientes.
461 460
462 461 Affected:
463 462 self.flagIsNewFile
464 463 self.filename
465 464 self.fileSize
466 465 self.fp
467 466 self.set
468 467 self.flagNoMoreFiles
469 468
470 469 Return:
471 470 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
472 471 1 : si el file fue abierto con exito y esta listo a ser leido
473 472
474 473 Excepciones:
475 474 Si un determinado file no puede ser abierto
476 475 """
477 476 nFiles = 0
478 477 fileOk_flag = False
479 478 firstTime_flag = True
480 479
481 480 self.set += 1
482 481
483 482 #busca el 1er file disponible
484 483 file, filename = checkForRealPath( self.path, self.year, self.doy, self.set, self.ext )
485 484 if file:
486 485 if self.__verifyFile(file, False):
487 486 fileOk_flag = True
488 487
489 488 #si no encuentra un file entonces espera y vuelve a buscar
490 489 if not(fileOk_flag):
491 490 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
492 491
493 492 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
494 493 tries = self.nTries
495 494 else:
496 495 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
497 496
498 497 for nTries in range( tries ):
499 498 if firstTime_flag:
500 499 print "\tWaiting %0.2f sec for the file \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
501 500 time.sleep( self.delay )
502 501 else:
503 502 print "\tSearching next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
504 503
505 504 file, filename = checkForRealPath( self.path, self.year, self.doy, self.set, self.ext )
506 505 if file:
507 506 if self.__verifyFile(file):
508 507 fileOk_flag = True
509 508 break
510 509
511 510 if fileOk_flag:
512 511 break
513 512
514 513 firstTime_flag = False
515 514
516 515 print "\tSkipping the file \"%s\" due to this file doesn't exist" % filename
517 516 self.set += 1
518 517
519 518 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
520 519 self.set = 0
521 520 self.doy += 1
522 521
523 522 if fileOk_flag:
524 523 self.fileSize = os.path.getsize( file )
525 524 self.filename = file
526 525 self.flagIsNewFile = 1
527 526 if self.fp != None: self.fp.close()
528 527 self.fp = open(file)
529 528 self.flagNoMoreFiles = 0
530 529 print 'Setting the file: %s' % file
531 530 else:
532 531 self.fileSize = 0
533 532 self.filename = None
534 533 self.flagIsNewFile = 0
535 534 self.fp = None
536 535 self.flagNoMoreFiles = 1
537 536 print 'No more Files'
538 537
539 538 return fileOk_flag
540 539
541 540
542 541 def setNextFile(self):
543 542 if self.fp != None:
544 543 self.fp.close()
545 544
546 545 if self.online:
547 546 newFile = self.__setNextFileOnline()
548 547 else:
549 548 newFile = self.__setNextFileOffline()
550 549
551 550 if not(newFile):
552 551 return 0
553 552
554 553 self.__readFirstHeader()
555 554 self.nReadBlocks = 0
556 555 return 1
557 556
558 557 def __setNewBlock(self):
559 558 if self.fp == None:
560 559 return 0
561 560
562 561 if self.flagIsNewFile:
563 562 return 1
564 563
565 564 self.lastUTTime = self.basicHeaderObj.utc
566 565 currentSize = self.fileSize - self.fp.tell()
567 566 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
568 567
569 568 if (currentSize >= neededSize):
570 569 self.__rdBasicHeader()
571 570 return 1
572 571
573 572 if not(self.setNextFile()):
574 573 return 0
575 574
576 575 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
577 576
578 577 self.flagTimeBlock = 0
579 578
580 579 if deltaTime > self.maxTimeStep:
581 580 self.flagTimeBlock = 1
582 581
583 582 return 1
584 583
585 584
586 585 def readNextBlock(self):
587 586 if not(self.__setNewBlock()):
588 587 return 0
589 588
590 589 if not(self.readBlock()):
591 590 return 0
592 591
593 592 return 1
594 593
595 594 def __rdProcessingHeader(self, fp=None):
596 595 if fp == None:
597 596 fp = self.fp
598 597
599 598 self.processingHeaderObj.read(fp)
600 599
601 600 def __rdRadarControllerHeader(self, fp=None):
602 601 if fp == None:
603 602 fp = self.fp
604 603
605 604 self.radarControllerHeaderObj.read(fp)
606 605
607 606 def __rdSystemHeader(self, fp=None):
608 607 if fp == None:
609 608 fp = self.fp
610 609
611 610 self.systemHeaderObj.read(fp)
612 611
613 612 def __rdBasicHeader(self, fp=None):
614 613 if fp == None:
615 614 fp = self.fp
616 615
617 616 self.basicHeaderObj.read(fp)
618 617
619 618
620 619 def __readFirstHeader(self):
621 620 self.__rdBasicHeader()
622 621 self.__rdSystemHeader()
623 622 self.__rdRadarControllerHeader()
624 623 self.__rdProcessingHeader()
625 624
626 625 self.firstHeaderSize = self.basicHeaderObj.size
627 626
628 627 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
629 628 if datatype == 0:
630 629 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
631 630 elif datatype == 1:
632 631 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
633 632 elif datatype == 2:
634 633 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
635 634 elif datatype == 3:
636 635 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
637 636 elif datatype == 4:
638 637 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
639 638 elif datatype == 5:
640 639 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
641 640 else:
642 641 raise ValueError, 'Data type was not defined'
643 642
644 643 self.dtype = datatype_str
645 644 self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
646 645 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
647 646 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
648 647 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
649 648 self.getBlockDimension()
650 649
651 650
652 651 def __verifyFile(self, filename, msgFlag=True):
653 652 msg = None
654 653 try:
655 654 fp = open(filename, 'rb')
656 655 currentPosition = fp.tell()
657 656 except:
658 657 if msgFlag:
659 658 print "The file %s can't be opened" % (filename)
660 659 return False
661 660
662 661 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
663 662
664 663 if neededSize == 0:
665 664 basicHeaderObj = BasicHeader()
666 665 systemHeaderObj = SystemHeader()
667 666 radarControllerHeaderObj = RadarControllerHeader()
668 667 processingHeaderObj = ProcessingHeader()
669 668
670 669 try:
671 670 if not( basicHeaderObj.read(fp) ): raise ValueError
672 671 if not( systemHeaderObj.read(fp) ): raise ValueError
673 672 if not( radarControllerHeaderObj.read(fp) ): raise ValueError
674 673 if not( processingHeaderObj.read(fp) ): raise ValueError
675 674 data_type = int(numpy.log2((processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
676 675
677 676 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
678 677
679 678 except:
680 679 if msgFlag:
681 680 print "\tThe file %s is empty or it hasn't enough data" % filename
682 681
683 682 fp.close()
684 683 return False
685 684 else:
686 685 msg = "\tSkipping the file %s due to it hasn't enough data" %filename
687 686
688 687 fp.close()
689 688 fileSize = os.path.getsize(filename)
690 689 currentSize = fileSize - currentPosition
691 690 if currentSize < neededSize:
692 691 if msgFlag and (msg != None):
693 692 print msg #print"\tSkipping the file %s due to it hasn't enough data" %filename
694 693 return False
695 694
696 695 return True
697 696
698 697 def setup(self,
699 698 path=None,
700 699 startDate=None,
701 700 endDate=None,
702 701 startTime=datetime.time(0,0,0),
703 702 endTime=datetime.time(23,59,59),
704 703 set=0,
705 704 expLabel = "",
706 705 ext = None,
707 706 online = False,
708 707 delay = 60):
709 708
710 709 if path == None:
711 710 raise ValueError, "The path is not valid"
712 711
713 712 if ext == None:
714 713 ext = self.ext
715 714
716 715 if online:
717 716 print "Searching files in online mode..."
718 717 doypath, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext)
719 718
720 719 if not(doypath):
721 720 for nTries in range( self.nTries ):
722 721 print '\tWaiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
723 722 time.sleep( self.delay )
724 doypath, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=exp)
723 doypath, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext)
725 724 if doypath:
726 725 break
727 726
728 727 if not(doypath):
729 728 print "There 'isn't valied files in %s" % path
730 729 return None
731 730
732 731 self.year = year
733 732 self.doy = doy
734 733 self.set = set - 1
735 734 self.path = path
736 735
737 736 else:
738 737 print "Searching files in offline mode ..."
739 738 pathList, filenameList = self.__searchFilesOffLine(path, startDate, endDate, startTime, endTime, set, expLabel, ext)
740 739
741 740 if not(pathList):
742 741 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
743 742 datetime.datetime.combine(startDate,startTime).ctime(),
744 743 datetime.datetime.combine(endDate,endTime).ctime())
745 744
746 745 sys.exit(-1)
747 746
748 747
749 748 self.fileIndex = -1
750 749 self.pathList = pathList
751 750 self.filenameList = filenameList
752 751
753 752 self.online = online
754 753 self.delay = delay
755 754 ext = ext.lower()
756 755 self.ext = ext
757 756
758 757 if not(self.setNextFile()):
759 758 if (startDate!=None) and (endDate!=None):
760 759 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
761 760 elif startDate != None:
762 761 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
763 762 else:
764 763 print "No files"
765 764
766 765 sys.exit(-1)
767 766
768 767 # self.updateDataHeader()
769 768
770 769 return self.dataOut
771 770
772 771 def getData():
773 pass
772
773 raise ValueError, "This method has not been implemented"
774 774
775 775 def hasNotDataInBuffer():
776 pass
776
777 raise ValueError, "This method has not been implemented"
777 778
778 779 def readBlock():
779 pass
780
781 raise ValueError, "This method has not been implemented"
782
783 def isEndProcess(self):
784
785 return self.flagNoMoreFiles
780 786
787 def printReadBlocks(self):
788
789 print "Number of read blocks per file %04d" %self.nReadBlocks
790
791 def printTotalBlocks(self):
792
793 print "Number of read blocks %04d" %self.nTotalBlocks
794
781 795 def run(self, **kwargs):
782 796
783 797 if not(self.isConfig):
784 798
785 799 # self.dataOut = dataOut
786 800 self.setup(**kwargs)
787 801 self.isConfig = True
788 802
789 803 self.getData()
790 804
791 805 class JRODataWriter(JRODataIO, Operation):
792 806
793 807 """
794 808 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
795 809 de los datos siempre se realiza por bloques.
796 810 """
797 811
798 812 blockIndex = 0
799 813
800 814 path = None
801 815
802 816 setFile = None
803 817
804 818 profilesPerBlock = None
805 819
806 820 blocksPerFile = None
807 821
808 822 nWriteBlocks = 0
809 823
810 824 def __init__(self, dataOut=None):
811 825 raise ValueError, "Not implemented"
812 826
813 827
814 828 def hasAllDataInBuffer(self):
815 829 raise ValueError, "Not implemented"
816 830
817 831
818 832 def setBlockDimension(self):
819 833 raise ValueError, "Not implemented"
820 834
821 835
822 836 def writeBlock(self):
823 837 raise ValueError, "No implemented"
824 838
825 839
826 840 def putData(self):
827 841 raise ValueError, "No implemented"
828 842
829 843 def getDataHeader(self):
830 844 """
831 845 Obtiene una copia del First Header
832 846
833 847 Affected:
834 848
835 849 self.basicHeaderObj
836 850 self.systemHeaderObj
837 851 self.radarControllerHeaderObj
838 852 self.processingHeaderObj self.
839 853
840 854 Return:
841 855 None
842 856 """
843 857
844 858 raise ValueError, "No implemented"
845 859
846 860 def getBasicHeader(self):
847 861
848 862 self.basicHeaderObj.size = self.basicHeaderSize #bytes
849 863 self.basicHeaderObj.version = self.versionFile
850 864 self.basicHeaderObj.dataBlock = self.nTotalBlocks
851 865
852 866 utc = numpy.floor(self.dataOut.utctime)
853 867 milisecond = (self.dataOut.utctime - utc)* 1000.0
854 868
855 869 self.basicHeaderObj.utc = utc
856 870 self.basicHeaderObj.miliSecond = milisecond
857 871 self.basicHeaderObj.timeZone = 0
858 872 self.basicHeaderObj.dstFlag = 0
859 873 self.basicHeaderObj.errorCount = 0
860 874
861 875 def __writeFirstHeader(self):
862 876 """
863 877 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
864 878
865 879 Affected:
866 880 __dataType
867 881
868 882 Return:
869 883 None
870 884 """
871 885
872 886 # CALCULAR PARAMETROS
873 887
874 888 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
875 889 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
876 890
877 891 self.basicHeaderObj.write(self.fp)
878 892 self.systemHeaderObj.write(self.fp)
879 893 self.radarControllerHeaderObj.write(self.fp)
880 894 self.processingHeaderObj.write(self.fp)
881 895
882 896 self.dtype = self.dataOut.dtype
883 897
884 898 def __setNewBlock(self):
885 899 """
886 900 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
887 901
888 902 Return:
889 903 0 : si no pudo escribir nada
890 904 1 : Si escribio el Basic el First Header
891 905 """
892 906 if self.fp == None:
893 907 self.setNextFile()
894 908
895 909 if self.flagIsNewFile:
896 910 return 1
897 911
898 912 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
899 913 self.basicHeaderObj.write(self.fp)
900 914 return 1
901 915
902 916 if not( self.setNextFile() ):
903 917 return 0
904 918
905 919 return 1
906 920
907 921
908 922 def writeNextBlock(self):
909 923 """
910 924 Selecciona el bloque siguiente de datos y los escribe en un file
911 925
912 926 Return:
913 927 0 : Si no hizo pudo escribir el bloque de datos
914 928 1 : Si no pudo escribir el bloque de datos
915 929 """
916 930 if not( self.__setNewBlock() ):
917 931 return 0
918 932
919 933 self.writeBlock()
920 934
921 935 return 1
922 936
923 937 def setNextFile(self):
924 938 """
925 939 Determina el siguiente file que sera escrito
926 940
927 941 Affected:
928 942 self.filename
929 943 self.subfolder
930 944 self.fp
931 945 self.setFile
932 946 self.flagIsNewFile
933 947
934 948 Return:
935 949 0 : Si el archivo no puede ser escrito
936 950 1 : Si el archivo esta listo para ser escrito
937 951 """
938 952 ext = self.ext
939 953 path = self.path
940 954
941 955 if self.fp != None:
942 956 self.fp.close()
943 957
944 958 timeTuple = time.localtime( self.dataOut.dataUtcTime)
945 959 subfolder = 'D%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
946 960
947 961 doypath = os.path.join( path, subfolder )
948 962 if not( os.path.exists(doypath) ):
949 963 os.mkdir(doypath)
950 964 self.setFile = -1 #inicializo mi contador de seteo
951 965 else:
952 966 filesList = os.listdir( doypath )
953 967 if len( filesList ) > 0:
954 968 filesList = sorted( filesList, key=str.lower )
955 969 filen = filesList[-1]
956 970 # el filename debera tener el siguiente formato
957 971 # 0 1234 567 89A BCDE (hex)
958 972 # x YYYY DDD SSS .ext
959 973 if isNumber( filen[8:11] ):
960 974 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
961 975 else:
962 976 self.setFile = -1
963 977 else:
964 978 self.setFile = -1 #inicializo mi contador de seteo
965 979
966 980 setFile = self.setFile
967 981 setFile += 1
968 982
969 983 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
970 984 timeTuple.tm_year,
971 985 timeTuple.tm_yday,
972 986 setFile,
973 987 ext )
974 988
975 989 filename = os.path.join( path, subfolder, file )
976 990
977 991 fp = open( filename,'wb' )
978 992
979 993 self.blockIndex = 0
980 994
981 995 #guardando atributos
982 996 self.filename = filename
983 997 self.subfolder = subfolder
984 998 self.fp = fp
985 999 self.setFile = setFile
986 1000 self.flagIsNewFile = 1
987 1001
988 1002 self.getDataHeader()
989 1003
990 1004 print 'Writing the file: %s'%self.filename
991 1005
992 1006 self.__writeFirstHeader()
993 1007
994 1008 return 1
995 1009
996 1010 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=None, set=0, ext=None):
997 1011 """
998 1012 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
999 1013
1000 1014 Inputs:
1001 1015 path : el path destino en el cual se escribiran los files a crear
1002 1016 format : formato en el cual sera salvado un file
1003 1017 set : el setebo del file
1004 1018
1005 1019 Return:
1006 1020 0 : Si no realizo un buen seteo
1007 1021 1 : Si realizo un buen seteo
1008 1022 """
1009 1023
1010 1024 if ext == None:
1011 1025 ext = self.ext
1012 1026
1013 1027 ext = ext.lower()
1014 1028
1015 1029 self.ext = ext
1016 1030
1017 1031 self.path = path
1018 1032
1019 1033 self.setFile = set - 1
1020 1034
1021 1035 self.blocksPerFile = blocksPerFile
1022 1036
1023 1037 self.profilesPerBlock = profilesPerBlock
1024 1038
1025 1039 self.dataOut = dataOut
1026 1040
1027 1041 if not(self.setNextFile()):
1028 1042 print "There isn't a next file"
1029 1043 return 0
1030 1044
1031 1045 self.setBlockDimension()
1032 1046
1033 1047 return 1
1034 1048
1035 1049 def run(self, dataOut, **kwargs):
1036 1050
1037 1051 if not(self.isConfig):
1038 1052
1039 1053 self.setup(dataOut, **kwargs)
1040 1054 self.isConfig = True
1041 1055
1042 1056 self.putData()
1043 1057
1044 1058 class VoltageReader(JRODataReader):
1045 1059 """
1046 1060 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
1047 1061 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
1048 1062 perfiles*alturas*canales) son almacenados en la variable "buffer".
1049 1063
1050 1064 perfiles * alturas * canales
1051 1065
1052 1066 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1053 1067 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
1054 1068 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
1055 1069 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1056 1070
1057 1071 Example:
1058 1072
1059 1073 dpath = "/home/myuser/data"
1060 1074
1061 1075 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1062 1076
1063 1077 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1064 1078
1065 1079 readerObj = VoltageReader()
1066 1080
1067 1081 readerObj.setup(dpath, startTime, endTime)
1068 1082
1069 1083 while(True):
1070 1084
1071 1085 #to get one profile
1072 1086 profile = readerObj.getData()
1073 1087
1074 1088 #print the profile
1075 1089 print profile
1076 1090
1077 1091 #If you want to see all datablock
1078 1092 print readerObj.datablock
1079 1093
1080 1094 if readerObj.flagNoMoreFiles:
1081 1095 break
1082 1096
1083 1097 """
1084 1098
1085 1099 ext = ".r"
1086 1100
1087 1101 optchar = "D"
1088 1102 dataOut = None
1089 1103
1090 1104
1091 1105 def __init__(self):
1092 1106 """
1093 1107 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
1094 1108
1095 1109 Input:
1096 1110 dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para
1097 1111 almacenar un perfil de datos cada vez que se haga un requerimiento
1098 1112 (getData). El perfil sera obtenido a partir del buffer de datos,
1099 1113 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1100 1114 bloque de datos.
1101 1115 Si este parametro no es pasado se creara uno internamente.
1102 1116
1103 1117 Variables afectadas:
1104 1118 self.dataOut
1105 1119
1106 1120 Return:
1107 1121 None
1108 1122 """
1109 1123
1110 1124 self.isConfig = False
1111 1125
1112 1126 self.datablock = None
1113 1127
1114 1128 self.utc = 0
1115 1129
1116 1130 self.ext = ".r"
1117 1131
1118 1132 self.optchar = "D"
1119 1133
1120 1134 self.basicHeaderObj = BasicHeader()
1121 1135
1122 1136 self.systemHeaderObj = SystemHeader()
1123 1137
1124 1138 self.radarControllerHeaderObj = RadarControllerHeader()
1125 1139
1126 1140 self.processingHeaderObj = ProcessingHeader()
1127 1141
1128 1142 self.online = 0
1129 1143
1130 1144 self.fp = None
1131 1145
1132 1146 self.idFile = None
1133 1147
1134 1148 self.dtype = None
1135 1149
1136 1150 self.fileSizeByHeader = None
1137 1151
1138 1152 self.filenameList = []
1139 1153
1140 1154 self.filename = None
1141 1155
1142 1156 self.fileSize = None
1143 1157
1144 1158 self.firstHeaderSize = 0
1145 1159
1146 1160 self.basicHeaderSize = 24
1147 1161
1148 1162 self.pathList = []
1149 1163
1150 1164 self.filenameList = []
1151 1165
1152 1166 self.lastUTTime = 0
1153 1167
1154 1168 self.maxTimeStep = 30
1155 1169
1156 1170 self.flagNoMoreFiles = 0
1157 1171
1158 1172 self.set = 0
1159 1173
1160 1174 self.path = None
1161 1175
1162 1176 self.profileIndex = 9999
1163 1177
1164 1178 self.delay = 3 #seconds
1165 1179
1166 1180 self.nTries = 3 #quantity tries
1167 1181
1168 1182 self.nFiles = 3 #number of files for searching
1169 1183
1170 1184 self.nReadBlocks = 0
1171 1185
1172 1186 self.flagIsNewFile = 1
1173 1187
1174 1188 self.ippSeconds = 0
1175 1189
1176 1190 self.flagTimeBlock = 0
1177 1191
1178 1192 self.flagIsNewBlock = 0
1179 1193
1180 1194 self.nTotalBlocks = 0
1181 1195
1182 1196 self.blocksize = 0
1183 1197
1184 1198 self.dataOut = self.createObjByDefault()
1185 1199
1186 1200 def createObjByDefault(self):
1187 1201
1188 1202 dataObj = Voltage()
1189 1203
1190 1204 return dataObj
1191 1205
1192 1206 def __hasNotDataInBuffer(self):
1193 1207 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1194 1208 return 1
1195 1209 return 0
1196 1210
1197 1211
1198 1212 def getBlockDimension(self):
1199 1213 """
1200 1214 Obtiene la cantidad de puntos a leer por cada bloque de datos
1201 1215
1202 1216 Affected:
1203 1217 self.blocksize
1204 1218
1205 1219 Return:
1206 1220 None
1207 1221 """
1208 1222 pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
1209 1223 self.blocksize = pts2read
1210 1224
1211 1225
1212 1226 def readBlock(self):
1213 1227 """
1214 1228 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
1215 1229 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1216 1230 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1217 1231 es seteado a 0
1218 1232
1219 1233 Inputs:
1220 1234 None
1221 1235
1222 1236 Return:
1223 1237 None
1224 1238
1225 1239 Affected:
1226 1240 self.profileIndex
1227 1241 self.datablock
1228 1242 self.flagIsNewFile
1229 1243 self.flagIsNewBlock
1230 1244 self.nTotalBlocks
1231 1245
1232 1246 Exceptions:
1233 1247 Si un bloque leido no es un bloque valido
1234 1248 """
1235 1249
1236 1250 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
1237 1251
1238 1252 try:
1239 1253 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
1240 1254 except:
1241 1255 print "The read block (%3d) has not enough data" %self.nReadBlocks
1242 1256 return 0
1243 1257
1244 1258 junk = numpy.transpose(junk, (2,0,1))
1245 1259 self.datablock = junk['real'] + junk['imag']*1j
1246 1260
1247 1261 self.profileIndex = 0
1248 1262
1249 1263 self.flagIsNewFile = 0
1250 1264 self.flagIsNewBlock = 1
1251 1265
1252 1266 self.nTotalBlocks += 1
1253 1267 self.nReadBlocks += 1
1254 1268
1255 1269 return 1
1256 1270
1257 1271
1258 1272 def getData(self):
1259 1273 """
1260 1274 getData obtiene una unidad de datos del buffer de lectura y la copia a la clase "Voltage"
1261 1275 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
1262 1276 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
1263 1277
1264 1278 Ademas incrementa el contador del buffer en 1.
1265 1279
1266 1280 Return:
1267 1281 data : retorna un perfil de voltages (alturas * canales) copiados desde el
1268 1282 buffer. Si no hay mas archivos a leer retorna None.
1269 1283
1270 1284 Variables afectadas:
1271 1285 self.dataOut
1272 1286 self.profileIndex
1273 1287
1274 1288 Affected:
1275 1289 self.dataOut
1276 1290 self.profileIndex
1277 1291 self.flagTimeBlock
1278 1292 self.flagIsNewBlock
1279 1293 """
1280 if self.flagNoMoreFiles: return 0
1281
1294
1295 if self.flagNoMoreFiles:
1296 self.dataOut.flagNoData = True
1297 print 'Process finished'
1298 return 0
1299
1282 1300 self.flagTimeBlock = 0
1283 1301 self.flagIsNewBlock = 0
1284 1302
1285 1303 if self.__hasNotDataInBuffer():
1286 1304
1287 1305 if not( self.readNextBlock() ):
1288 1306 return 0
1289 1307
1290 1308 # self.updateDataHeader()
1291
1292 if self.flagNoMoreFiles == 1:
1293 print 'Process finished'
1294 return 0
1295 1309
1296 1310 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
1297 1311
1298 1312 if self.datablock == None:
1299 1313 self.dataOut.flagNoData = True
1300 1314 return 0
1301 1315
1302 1316 self.dataOut.data = self.datablock[:,self.profileIndex,:]
1303 1317
1304 1318 self.dataOut.dtype = self.dtype
1305 1319
1306 1320 self.dataOut.nChannels = self.systemHeaderObj.nChannels
1307 1321
1308 1322 self.dataOut.nHeights = self.processingHeaderObj.nHeights
1309 1323
1310 1324 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
1311 1325
1312 1326 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
1313 1327
1314 1328 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
1315 1329
1316 1330 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
1317 1331
1318 1332 self.dataOut.channelIndexList = range(self.systemHeaderObj.nChannels)
1319 1333
1320 1334 self.dataOut.flagTimeBlock = self.flagTimeBlock
1321 1335
1322 1336 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.ippSeconds
1323 1337
1324 1338 self.dataOut.ippSeconds = self.ippSeconds
1325 1339
1326 1340 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt
1327 1341
1328 1342 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
1329 1343
1330 1344 self.dataOut.flagShiftFFT = False
1331 1345
1332 1346 if self.processingHeaderObj.code != None:
1333 1347 self.dataOut.nCode = self.processingHeaderObj.nCode
1334 1348
1335 1349 self.dataOut.nBaud = self.processingHeaderObj.nBaud
1336 1350
1337 1351 self.dataOut.code = self.processingHeaderObj.code
1338 1352
1339 1353 self.profileIndex += 1
1340 1354
1341 1355 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
1342 1356
1343 1357 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
1344 1358
1345 1359 self.dataOut.flagNoData = False
1346 1360
1347 1361 # print self.profileIndex, self.dataOut.utctime
1348 1362 # if self.profileIndex == 800:
1349 1363 # a=1
1350 1364
1351 1365 return self.dataOut.data
1352 1366
1353 1367
1354 1368 class VoltageWriter(JRODataWriter):
1355 1369 """
1356 1370 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
1357 1371 de los datos siempre se realiza por bloques.
1358 1372 """
1359 1373
1360 1374 ext = ".r"
1361 1375
1362 1376 optchar = "D"
1363 1377
1364 1378 shapeBuffer = None
1365 1379
1366 1380
1367 1381 def __init__(self):
1368 1382 """
1369 1383 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
1370 1384
1371 1385 Affected:
1372 1386 self.dataOut
1373 1387
1374 1388 Return: None
1375 1389 """
1376 1390
1377 1391 self.nTotalBlocks = 0
1378 1392
1379 1393 self.profileIndex = 0
1380 1394
1381 1395 self.isConfig = False
1382 1396
1383 1397 self.fp = None
1384 1398
1385 1399 self.flagIsNewFile = 1
1386 1400
1387 1401 self.nTotalBlocks = 0
1388 1402
1389 1403 self.flagIsNewBlock = 0
1390
1391 self.flagNoMoreFiles = 0
1392 1404
1393 1405 self.setFile = None
1394 1406
1395 1407 self.dtype = None
1396 1408
1397 1409 self.path = None
1398 1410
1399 self.noMoreFiles = 0
1400
1401 1411 self.filename = None
1402 1412
1403 1413 self.basicHeaderObj = BasicHeader()
1404 1414
1405 1415 self.systemHeaderObj = SystemHeader()
1406 1416
1407 1417 self.radarControllerHeaderObj = RadarControllerHeader()
1408 1418
1409 1419 self.processingHeaderObj = ProcessingHeader()
1410 1420
1411 1421 def hasAllDataInBuffer(self):
1412 1422 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1413 1423 return 1
1414 1424 return 0
1415 1425
1416 1426
1417 1427 def setBlockDimension(self):
1418 1428 """
1419 1429 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
1420 1430
1421 1431 Affected:
1422 1432 self.shape_spc_Buffer
1423 1433 self.shape_cspc_Buffer
1424 1434 self.shape_dc_Buffer
1425 1435
1426 1436 Return: None
1427 1437 """
1428 1438 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
1429 1439 self.processingHeaderObj.nHeights,
1430 1440 self.systemHeaderObj.nChannels)
1431 1441
1432 1442 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
1433 1443 self.processingHeaderObj.profilesPerBlock,
1434 1444 self.processingHeaderObj.nHeights),
1435 1445 dtype=numpy.dtype('complex'))
1436 1446
1437 1447
1438 1448 def writeBlock(self):
1439 1449 """
1440 1450 Escribe el buffer en el file designado
1441 1451
1442 1452 Affected:
1443 1453 self.profileIndex
1444 1454 self.flagIsNewFile
1445 1455 self.flagIsNewBlock
1446 1456 self.nTotalBlocks
1447 1457 self.blockIndex
1448 1458
1449 1459 Return: None
1450 1460 """
1451 1461 data = numpy.zeros( self.shapeBuffer, self.dtype )
1452 1462
1453 1463 junk = numpy.transpose(self.datablock, (1,2,0))
1454 1464
1455 1465 data['real'] = junk.real
1456 1466 data['imag'] = junk.imag
1457 1467
1458 1468 data = data.reshape( (-1) )
1459 1469
1460 1470 data.tofile( self.fp )
1461 1471
1462 1472 self.datablock.fill(0)
1463 1473
1464 1474 self.profileIndex = 0
1465 1475 self.flagIsNewFile = 0
1466 1476 self.flagIsNewBlock = 1
1467 1477
1468 1478 self.blockIndex += 1
1469 1479 self.nTotalBlocks += 1
1470 1480
1471 1481 def putData(self):
1472 1482 """
1473 1483 Setea un bloque de datos y luego los escribe en un file
1474 1484
1475 1485 Affected:
1476 1486 self.flagIsNewBlock
1477 1487 self.profileIndex
1478 1488
1479 1489 Return:
1480 1490 0 : Si no hay data o no hay mas files que puedan escribirse
1481 1491 1 : Si se escribio la data de un bloque en un file
1482 1492 """
1483 1493 if self.dataOut.flagNoData:
1484 1494 return 0
1485 1495
1486 1496 self.flagIsNewBlock = 0
1487 1497
1488 1498 if self.dataOut.flagTimeBlock:
1489 1499
1490 1500 self.datablock.fill(0)
1491 1501 self.profileIndex = 0
1492 1502 self.setNextFile()
1493 1503
1494 1504 if self.profileIndex == 0:
1495 1505 self.getBasicHeader()
1496 1506
1497 1507 self.datablock[:,self.profileIndex,:] = self.dataOut.data
1498 1508
1499 1509 self.profileIndex += 1
1500 1510
1501 1511 if self.hasAllDataInBuffer():
1502 1512 #if self.flagIsNewFile:
1503 1513 self.writeNextBlock()
1504 1514 # self.getDataHeader()
1505 1515
1506 if self.flagNoMoreFiles:
1507 #print 'Process finished'
1508 return 0
1509
1510 1516 return 1
1511 1517
1512 1518 def __getProcessFlags(self):
1513 1519
1514 1520 processFlags = 0
1515 1521
1516 1522 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1517 1523 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1518 1524 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1519 1525 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1520 1526 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1521 1527 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1522 1528
1523 1529 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1524 1530
1525 1531
1526 1532
1527 1533 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
1528 1534 PROCFLAG.DATATYPE_SHORT,
1529 1535 PROCFLAG.DATATYPE_LONG,
1530 1536 PROCFLAG.DATATYPE_INT64,
1531 1537 PROCFLAG.DATATYPE_FLOAT,
1532 1538 PROCFLAG.DATATYPE_DOUBLE]
1533 1539
1534 1540
1535 1541 for index in range(len(dtypeList)):
1536 1542 if self.dataOut.dtype == dtypeList[index]:
1537 1543 dtypeValue = datatypeValueList[index]
1538 1544 break
1539 1545
1540 1546 processFlags += dtypeValue
1541 1547
1542 1548 if self.dataOut.flagDecodeData:
1543 1549 processFlags += PROCFLAG.DECODE_DATA
1544 1550
1545 1551 if self.dataOut.flagDeflipData:
1546 1552 processFlags += PROCFLAG.DEFLIP_DATA
1547 1553
1548 1554 if self.dataOut.code != None:
1549 1555 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1550 1556
1551 1557 if self.dataOut.nCohInt > 1:
1552 1558 processFlags += PROCFLAG.COHERENT_INTEGRATION
1553 1559
1554 1560 return processFlags
1555 1561
1556 1562
1557 1563 def __getBlockSize(self):
1558 1564 '''
1559 1565 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
1560 1566 '''
1561 1567
1562 1568 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1563 1569 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1564 1570 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1565 1571 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1566 1572 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1567 1573 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1568 1574
1569 1575 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1570 1576 datatypeValueList = [1,2,4,8,4,8]
1571 1577 for index in range(len(dtypeList)):
1572 1578 if self.dataOut.dtype == dtypeList[index]:
1573 1579 datatypeValue = datatypeValueList[index]
1574 1580 break
1575 1581
1576 1582 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels * self.dataOut.nProfiles * datatypeValue * 2)
1577 1583
1578 1584 return blocksize
1579 1585
1580 1586 def getDataHeader(self):
1581 1587
1582 1588 """
1583 1589 Obtiene una copia del First Header
1584 1590
1585 1591 Affected:
1586 1592 self.systemHeaderObj
1587 1593 self.radarControllerHeaderObj
1588 1594 self.dtype
1589 1595
1590 1596 Return:
1591 1597 None
1592 1598 """
1593 1599
1594 1600 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
1595 1601 self.systemHeaderObj.nChannels = self.dataOut.nChannels
1596 1602 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
1597 1603
1598 1604 self.getBasicHeader()
1599 1605
1600 1606 processingHeaderSize = 40 # bytes
1601 1607 self.processingHeaderObj.dtype = 0 # Voltage
1602 1608 self.processingHeaderObj.blockSize = self.__getBlockSize()
1603 1609 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
1604 1610 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
1605 1611 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
1606 1612 self.processingHeaderObj.processFlags = self.__getProcessFlags()
1607 1613 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
1608 1614 self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage
1609 1615 self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage
1610 1616
1611 1617 if self.dataOut.code != None:
1612 1618 self.processingHeaderObj.code = self.dataOut.code
1613 1619 self.processingHeaderObj.nCode = self.dataOut.nCode
1614 1620 self.processingHeaderObj.nBaud = self.dataOut.nBaud
1615 1621 codesize = int(8 + 4 * self.dataOut.nCode * self.dataOut.nBaud)
1616 1622 processingHeaderSize += codesize
1617 1623
1618 1624 if self.processingHeaderObj.nWindows != 0:
1619 1625 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
1620 1626 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
1621 1627 self.processingHeaderObj.nHeights = self.dataOut.nHeights
1622 1628 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
1623 1629 processingHeaderSize += 12
1624 1630
1625 1631 self.processingHeaderObj.size = processingHeaderSize
1626 1632
1627 1633 class SpectraReader(JRODataReader):
1628 1634 """
1629 1635 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
1630 1636 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
1631 1637 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
1632 1638
1633 1639 paresCanalesIguales * alturas * perfiles (Self Spectra)
1634 1640 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
1635 1641 canales * alturas (DC Channels)
1636 1642
1637 1643 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1638 1644 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
1639 1645 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
1640 1646 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1641 1647
1642 1648 Example:
1643 1649 dpath = "/home/myuser/data"
1644 1650
1645 1651 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1646 1652
1647 1653 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1648 1654
1649 1655 readerObj = SpectraReader()
1650 1656
1651 1657 readerObj.setup(dpath, startTime, endTime)
1652 1658
1653 1659 while(True):
1654 1660
1655 1661 readerObj.getData()
1656 1662
1657 1663 print readerObj.data_spc
1658 1664
1659 1665 print readerObj.data_cspc
1660 1666
1661 1667 print readerObj.data_dc
1662 1668
1663 1669 if readerObj.flagNoMoreFiles:
1664 1670 break
1665 1671
1666 1672 """
1667 1673
1668 1674 pts2read_SelfSpectra = 0
1669 1675
1670 1676 pts2read_CrossSpectra = 0
1671 1677
1672 1678 pts2read_DCchannels = 0
1673 1679
1674 1680 ext = ".pdata"
1675 1681
1676 1682 optchar = "P"
1677 1683
1678 1684 dataOut = None
1679 1685
1680 1686 nRdChannels = None
1681 1687
1682 1688 nRdPairs = None
1683 1689
1684 1690 rdPairList = []
1685 1691
1686 1692
1687 1693 def __init__(self):
1688 1694 """
1689 1695 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
1690 1696
1691 1697 Inputs:
1692 1698 dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para
1693 1699 almacenar un perfil de datos cada vez que se haga un requerimiento
1694 1700 (getData). El perfil sera obtenido a partir del buffer de datos,
1695 1701 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1696 1702 bloque de datos.
1697 1703 Si este parametro no es pasado se creara uno internamente.
1698 1704
1699 1705 Affected:
1700 1706 self.dataOut
1701 1707
1702 1708 Return : None
1703 1709 """
1704 1710
1705 1711 self.isConfig = False
1706 1712
1707 1713 self.pts2read_SelfSpectra = 0
1708 1714
1709 1715 self.pts2read_CrossSpectra = 0
1710 1716
1711 1717 self.pts2read_DCchannels = 0
1712 1718
1713 1719 self.datablock = None
1714 1720
1715 1721 self.utc = None
1716 1722
1717 1723 self.ext = ".pdata"
1718 1724
1719 1725 self.optchar = "P"
1720 1726
1721 1727 self.basicHeaderObj = BasicHeader()
1722 1728
1723 1729 self.systemHeaderObj = SystemHeader()
1724 1730
1725 1731 self.radarControllerHeaderObj = RadarControllerHeader()
1726 1732
1727 1733 self.processingHeaderObj = ProcessingHeader()
1728 1734
1729 1735 self.online = 0
1730 1736
1731 1737 self.fp = None
1732 1738
1733 1739 self.idFile = None
1734 1740
1735 1741 self.dtype = None
1736 1742
1737 1743 self.fileSizeByHeader = None
1738 1744
1739 1745 self.filenameList = []
1740 1746
1741 1747 self.filename = None
1742 1748
1743 1749 self.fileSize = None
1744 1750
1745 1751 self.firstHeaderSize = 0
1746 1752
1747 1753 self.basicHeaderSize = 24
1748 1754
1749 1755 self.pathList = []
1750 1756
1751 1757 self.lastUTTime = 0
1752 1758
1753 1759 self.maxTimeStep = 30
1754 1760
1755 1761 self.flagNoMoreFiles = 0
1756 1762
1757 1763 self.set = 0
1758 1764
1759 1765 self.path = None
1760 1766
1761 1767 self.delay = 3 #seconds
1762 1768
1763 1769 self.nTries = 3 #quantity tries
1764 1770
1765 1771 self.nFiles = 3 #number of files for searching
1766 1772
1767 1773 self.nReadBlocks = 0
1768 1774
1769 1775 self.flagIsNewFile = 1
1770 1776
1771 1777 self.ippSeconds = 0
1772 1778
1773 1779 self.flagTimeBlock = 0
1774 1780
1775 1781 self.flagIsNewBlock = 0
1776 1782
1777 1783 self.nTotalBlocks = 0
1778 1784
1779 1785 self.blocksize = 0
1780 1786
1781 1787 self.dataOut = self.createObjByDefault()
1782 1788
1783 1789
1784 1790 def createObjByDefault(self):
1785 1791
1786 1792 dataObj = Spectra()
1787 1793
1788 1794 return dataObj
1789 1795
1790 1796 def __hasNotDataInBuffer(self):
1791 1797 return 1
1792 1798
1793 1799
1794 1800 def getBlockDimension(self):
1795 1801 """
1796 1802 Obtiene la cantidad de puntos a leer por cada bloque de datos
1797 1803
1798 1804 Affected:
1799 1805 self.nRdChannels
1800 1806 self.nRdPairs
1801 1807 self.pts2read_SelfSpectra
1802 1808 self.pts2read_CrossSpectra
1803 1809 self.pts2read_DCchannels
1804 1810 self.blocksize
1805 1811 self.dataOut.nChannels
1806 1812 self.dataOut.nPairs
1807 1813
1808 1814 Return:
1809 1815 None
1810 1816 """
1811 1817 self.nRdChannels = 0
1812 1818 self.nRdPairs = 0
1813 1819 self.rdPairList = []
1814 1820
1815 1821 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
1816 1822 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
1817 1823 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
1818 1824 else:
1819 1825 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
1820 1826 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
1821 1827
1822 1828 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
1823 1829
1824 1830 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
1825 1831 self.blocksize = self.pts2read_SelfSpectra
1826 1832
1827 1833 if self.processingHeaderObj.flag_cspc:
1828 1834 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
1829 1835 self.blocksize += self.pts2read_CrossSpectra
1830 1836
1831 1837 if self.processingHeaderObj.flag_dc:
1832 1838 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
1833 1839 self.blocksize += self.pts2read_DCchannels
1834 1840
1835 1841 # self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels
1836 1842
1837 1843
1838 1844 def readBlock(self):
1839 1845 """
1840 1846 Lee el bloque de datos desde la posicion actual del puntero del archivo
1841 1847 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1842 1848 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1843 1849 es seteado a 0
1844 1850
1845 1851 Return: None
1846 1852
1847 1853 Variables afectadas:
1848 1854
1849 1855 self.flagIsNewFile
1850 1856 self.flagIsNewBlock
1851 1857 self.nTotalBlocks
1852 1858 self.data_spc
1853 1859 self.data_cspc
1854 1860 self.data_dc
1855 1861
1856 1862 Exceptions:
1857 1863 Si un bloque leido no es un bloque valido
1858 1864 """
1859 1865 blockOk_flag = False
1860 1866 fpointer = self.fp.tell()
1861 1867
1862 1868 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
1863 1869 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
1864 1870
1865 1871 if self.processingHeaderObj.flag_cspc:
1866 1872 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
1867 1873 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
1868 1874
1869 1875 if self.processingHeaderObj.flag_dc:
1870 1876 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
1871 1877 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
1872 1878
1873 1879
1874 1880 if not(self.processingHeaderObj.shif_fft):
1875 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
1881 #desplaza a la derecha en el eje 2 determinadas posiciones
1882 shift = int(self.processingHeaderObj.profilesPerBlock/2)
1883 spc = numpy.roll( spc, shift , axis=2 )
1876 1884
1877 1885 if self.processingHeaderObj.flag_cspc:
1878 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
1886 #desplaza a la derecha en el eje 2 determinadas posiciones
1887 cspc = numpy.roll( cspc, shift, axis=2 )
1879 1888
1880 1889
1881 1890 spc = numpy.transpose( spc, (0,2,1) )
1882 1891 self.data_spc = spc
1883 1892
1884 1893 if self.processingHeaderObj.flag_cspc:
1885 1894 cspc = numpy.transpose( cspc, (0,2,1) )
1886 1895 self.data_cspc = cspc['real'] + cspc['imag']*1j
1887 1896 else:
1888 1897 self.data_cspc = None
1889 1898
1890 1899 if self.processingHeaderObj.flag_dc:
1891 1900 self.data_dc = dc['real'] + dc['imag']*1j
1892 1901 else:
1893 1902 self.data_dc = None
1894 1903
1895 1904 self.flagIsNewFile = 0
1896 1905 self.flagIsNewBlock = 1
1897 1906
1898 1907 self.nTotalBlocks += 1
1899 1908 self.nReadBlocks += 1
1900 1909
1901 1910 return 1
1902 1911
1903 1912
1904 1913 def getData(self):
1905 1914 """
1906 1915 Copia el buffer de lectura a la clase "Spectra",
1907 1916 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
1908 1917 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
1909 1918
1910 1919 Return:
1911 1920 0 : Si no hay mas archivos disponibles
1912 1921 1 : Si hizo una buena copia del buffer
1913 1922
1914 1923 Affected:
1915 1924 self.dataOut
1916 1925
1917 1926 self.flagTimeBlock
1918 1927 self.flagIsNewBlock
1919 1928 """
1920 1929
1921 if self.flagNoMoreFiles: return 0
1930 if self.flagNoMoreFiles:
1931 self.dataOut.flagNoData = True
1932 print 'Process finished'
1933 return 0
1922 1934
1923 1935 self.flagTimeBlock = 0
1924 1936 self.flagIsNewBlock = 0
1925 1937
1926 1938 if self.__hasNotDataInBuffer():
1927 1939
1928 1940 if not( self.readNextBlock() ):
1941 self.dataOut.flagNoData = True
1929 1942 return 0
1930 1943
1931 1944 # self.updateDataHeader()
1932 1945
1933 if self.flagNoMoreFiles == 1:
1934 print 'Process finished'
1935 return 0
1936
1937 1946 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
1938 1947
1939 1948 if self.data_dc == None:
1940 1949 self.dataOut.flagNoData = True
1941 1950 return 0
1942 1951
1943
1944 1952 self.dataOut.data_spc = self.data_spc
1945 1953
1946 1954 self.dataOut.data_cspc = self.data_cspc
1947 1955
1948 1956 self.dataOut.data_dc = self.data_dc
1949 1957
1950 1958 self.dataOut.flagTimeBlock = self.flagTimeBlock
1951 1959
1952 1960 self.dataOut.flagNoData = False
1953 1961
1954 1962 self.dataOut.dtype = self.dtype
1955 1963
1956 1964 self.dataOut.nChannels = self.nRdChannels
1957 1965
1958 1966 self.dataOut.nPairs = self.nRdPairs
1959 1967
1960 1968 self.dataOut.pairsList = self.rdPairList
1961 1969
1962 1970 self.dataOut.nHeights = self.processingHeaderObj.nHeights
1963 1971
1964 1972 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
1965 1973
1966 1974 self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
1967 1975
1968 1976 self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
1969 1977
1970 1978
1971 1979 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
1972 1980
1973 1981 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
1974 1982
1975 1983 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
1976 1984
1977 1985 self.dataOut.channelIndexList = range(self.systemHeaderObj.nChannels)
1978 1986
1979 1987 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000.#+ self.profileIndex * self.ippSeconds
1980 1988
1981 1989 self.dataOut.ippSeconds = self.ippSeconds
1982 1990
1983 1991 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt * self.processingHeaderObj.nIncohInt * self.dataOut.nFFTPoints
1984 1992
1985 1993 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
1986 1994
1987 1995 # self.profileIndex += 1
1988 1996
1989 1997 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
1990 1998
1991 1999 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
1992 2000
1993 2001 return self.dataOut.data_spc
1994 2002
1995 2003
1996 2004 class SpectraWriter(JRODataWriter):
1997 2005
1998 2006 """
1999 2007 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
2000 2008 de los datos siempre se realiza por bloques.
2001 2009 """
2002 2010
2003 2011 ext = ".pdata"
2004 2012
2005 2013 optchar = "P"
2006 2014
2007 2015 shape_spc_Buffer = None
2008 2016
2009 2017 shape_cspc_Buffer = None
2010 2018
2011 2019 shape_dc_Buffer = None
2012 2020
2013 2021 data_spc = None
2014 2022
2015 2023 data_cspc = None
2016 2024
2017 2025 data_dc = None
2018 2026
2019 2027 # dataOut = None
2020 2028
2021 2029 def __init__(self):
2022 2030 """
2023 2031 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
2024 2032
2025 2033 Affected:
2026 2034 self.dataOut
2027 2035 self.basicHeaderObj
2028 2036 self.systemHeaderObj
2029 2037 self.radarControllerHeaderObj
2030 2038 self.processingHeaderObj
2031 2039
2032 2040 Return: None
2033 2041 """
2034 2042
2035 2043 self.isConfig = False
2036 2044
2037 2045 self.nTotalBlocks = 0
2038 2046
2039 2047 self.data_spc = None
2040 2048
2041 2049 self.data_cspc = None
2042 2050
2043 2051 self.data_dc = None
2044 2052
2045 2053 self.fp = None
2046 2054
2047 2055 self.flagIsNewFile = 1
2048 2056
2049 2057 self.nTotalBlocks = 0
2050 2058
2051 2059 self.flagIsNewBlock = 0
2052
2053 self.flagNoMoreFiles = 0
2054 2060
2055 2061 self.setFile = None
2056 2062
2057 2063 self.dtype = None
2058 2064
2059 2065 self.path = None
2060 2066
2061 2067 self.noMoreFiles = 0
2062 2068
2063 2069 self.filename = None
2064 2070
2065 2071 self.basicHeaderObj = BasicHeader()
2066 2072
2067 2073 self.systemHeaderObj = SystemHeader()
2068 2074
2069 2075 self.radarControllerHeaderObj = RadarControllerHeader()
2070 2076
2071 2077 self.processingHeaderObj = ProcessingHeader()
2072 2078
2073 2079
2074 2080 def hasAllDataInBuffer(self):
2075 2081 return 1
2076 2082
2077 2083
2078 2084 def setBlockDimension(self):
2079 2085 """
2080 2086 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
2081 2087
2082 2088 Affected:
2083 2089 self.shape_spc_Buffer
2084 2090 self.shape_cspc_Buffer
2085 2091 self.shape_dc_Buffer
2086 2092
2087 2093 Return: None
2088 2094 """
2089 2095 self.shape_spc_Buffer = (self.dataOut.nChannels,
2090 2096 self.processingHeaderObj.nHeights,
2091 2097 self.processingHeaderObj.profilesPerBlock)
2092 2098
2093 2099 self.shape_cspc_Buffer = (self.dataOut.nPairs,
2094 2100 self.processingHeaderObj.nHeights,
2095 2101 self.processingHeaderObj.profilesPerBlock)
2096 2102
2097 2103 self.shape_dc_Buffer = (self.dataOut.nChannels,
2098 2104 self.processingHeaderObj.nHeights)
2099 2105
2100 2106
2101 2107 def writeBlock(self):
2102 2108 """
2103 2109 Escribe el buffer en el file designado
2104 2110
2105 2111 Affected:
2106 2112 self.data_spc
2107 2113 self.data_cspc
2108 2114 self.data_dc
2109 2115 self.flagIsNewFile
2110 2116 self.flagIsNewBlock
2111 2117 self.nTotalBlocks
2112 2118 self.nWriteBlocks
2113 2119
2114 2120 Return: None
2115 2121 """
2116 2122
2117 2123 spc = numpy.transpose( self.data_spc, (0,2,1) )
2118 2124 if not( self.processingHeaderObj.shif_fft ):
2119 2125 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2120 2126 data = spc.reshape((-1))
2121 2127 data.tofile(self.fp)
2122 2128
2123 2129 if self.data_cspc != None:
2124 2130 data = numpy.zeros( self.shape_cspc_Buffer, self.dtype )
2125 2131 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
2126 2132 if not( self.processingHeaderObj.shif_fft ):
2127 2133 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2128 2134 data['real'] = cspc.real
2129 2135 data['imag'] = cspc.imag
2130 2136 data = data.reshape((-1))
2131 2137 data.tofile(self.fp)
2132 2138
2133 2139 if self.data_dc != None:
2134 2140 data = numpy.zeros( self.shape_dc_Buffer, self.dtype )
2135 2141 dc = self.data_dc
2136 2142 data['real'] = dc.real
2137 2143 data['imag'] = dc.imag
2138 2144 data = data.reshape((-1))
2139 2145 data.tofile(self.fp)
2140 2146
2141 2147 self.data_spc.fill(0)
2142 2148 self.data_dc.fill(0)
2143 2149 if self.data_cspc != None:
2144 2150 self.data_cspc.fill(0)
2145 2151
2146 2152 self.flagIsNewFile = 0
2147 2153 self.flagIsNewBlock = 1
2148 2154 self.nTotalBlocks += 1
2149 2155 self.nWriteBlocks += 1
2150 2156 self.blockIndex += 1
2151 2157
2152 2158
2153 2159 def putData(self):
2154 2160 """
2155 2161 Setea un bloque de datos y luego los escribe en un file
2156 2162
2157 2163 Affected:
2158 2164 self.data_spc
2159 2165 self.data_cspc
2160 2166 self.data_dc
2161 2167
2162 2168 Return:
2163 2169 0 : Si no hay data o no hay mas files que puedan escribirse
2164 2170 1 : Si se escribio la data de un bloque en un file
2165 2171 """
2166 2172
2167 2173 if self.dataOut.flagNoData:
2168 2174 return 0
2169 2175
2170 2176 self.flagIsNewBlock = 0
2171 2177
2172 2178 if self.dataOut.flagTimeBlock:
2173 2179 self.data_spc.fill(0)
2174 2180 self.data_cspc.fill(0)
2175 2181 self.data_dc.fill(0)
2176 2182 self.setNextFile()
2177 2183
2178 2184 if self.flagIsNewFile == 0:
2179 2185 self.getBasicHeader()
2180 2186
2181 2187 self.data_spc = self.dataOut.data_spc
2182 2188 self.data_cspc = self.dataOut.data_cspc
2183 2189 self.data_dc = self.dataOut.data_dc
2184 2190
2185 2191 # #self.processingHeaderObj.dataBlocksPerFile)
2186 2192 if self.hasAllDataInBuffer():
2187 2193 # self.getDataHeader()
2188 2194 self.writeNextBlock()
2189 2195
2190 if self.flagNoMoreFiles:
2191 #print 'Process finished'
2192 return 0
2193
2194 2196 return 1
2195 2197
2196 2198
2197 2199 def __getProcessFlags(self):
2198 2200
2199 2201 processFlags = 0
2200 2202
2201 2203 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2202 2204 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2203 2205 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2204 2206 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2205 2207 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2206 2208 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2207 2209
2208 2210 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2209 2211
2210 2212
2211 2213
2212 2214 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
2213 2215 PROCFLAG.DATATYPE_SHORT,
2214 2216 PROCFLAG.DATATYPE_LONG,
2215 2217 PROCFLAG.DATATYPE_INT64,
2216 2218 PROCFLAG.DATATYPE_FLOAT,
2217 2219 PROCFLAG.DATATYPE_DOUBLE]
2218 2220
2219 2221
2220 2222 for index in range(len(dtypeList)):
2221 2223 if self.dataOut.dtype == dtypeList[index]:
2222 2224 dtypeValue = datatypeValueList[index]
2223 2225 break
2224 2226
2225 2227 processFlags += dtypeValue
2226 2228
2227 2229 if self.dataOut.flagDecodeData:
2228 2230 processFlags += PROCFLAG.DECODE_DATA
2229 2231
2230 2232 if self.dataOut.flagDeflipData:
2231 2233 processFlags += PROCFLAG.DEFLIP_DATA
2232 2234
2233 2235 if self.dataOut.code != None:
2234 2236 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
2235 2237
2236 2238 if self.dataOut.nIncohInt > 1:
2237 2239 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
2238 2240
2239 2241 if self.dataOut.data_dc != None:
2240 2242 processFlags += PROCFLAG.SAVE_CHANNELS_DC
2241 2243
2242 2244 return processFlags
2243 2245
2244 2246
2245 2247 def __getBlockSize(self):
2246 2248 '''
2247 2249 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
2248 2250 '''
2249 2251
2250 2252 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2251 2253 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2252 2254 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2253 2255 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2254 2256 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2255 2257 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2256 2258
2257 2259 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2258 2260 datatypeValueList = [1,2,4,8,4,8]
2259 2261 for index in range(len(dtypeList)):
2260 2262 if self.dataOut.dtype == dtypeList[index]:
2261 2263 datatypeValue = datatypeValueList[index]
2262 2264 break
2263 2265
2264 2266
2265 2267 pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints
2266 2268
2267 2269 pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write)
2268 2270 blocksize = (pts2write_SelfSpectra*datatypeValue)
2269 2271
2270 2272 if self.dataOut.data_cspc != None:
2271 2273 pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write)
2272 2274 blocksize += (pts2write_CrossSpectra*datatypeValue*2)
2273 2275
2274 2276 if self.dataOut.data_dc != None:
2275 2277 pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights)
2276 2278 blocksize += (pts2write_DCchannels*datatypeValue*2)
2277 2279
2278 2280 blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
2279 2281
2280 2282 return blocksize
2281 2283
2282 2284 def getDataHeader(self):
2283 2285
2284 2286 """
2285 2287 Obtiene una copia del First Header
2286 2288
2287 2289 Affected:
2288 2290 self.systemHeaderObj
2289 2291 self.radarControllerHeaderObj
2290 2292 self.dtype
2291 2293
2292 2294 Return:
2293 2295 None
2294 2296 """
2295 2297
2296 2298 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
2297 2299 self.systemHeaderObj.nChannels = self.dataOut.nChannels
2298 2300 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
2299 2301
2300 2302 self.getBasicHeader()
2301 2303
2302 2304 processingHeaderSize = 40 # bytes
2303 2305 self.processingHeaderObj.dtype = 0 # Voltage
2304 2306 self.processingHeaderObj.blockSize = self.__getBlockSize()
2305 2307 self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints
2306 2308 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
2307 2309 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
2308 2310 self.processingHeaderObj.processFlags = self.__getProcessFlags()
2309 2311 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval
2310 2312 self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt
2311 2313 self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
2312 2314
2313 2315 if self.processingHeaderObj.totalSpectra > 0:
2314 2316 channelList = []
2315 2317 for channel in range(self.dataOut.nChannels):
2316 2318 channelList.append(channel)
2317 2319 channelList.append(channel)
2318 2320
2319 2321 pairsList = []
2320 2322 for pair in self.dataOut.pairsList:
2321 2323 pairsList.append(pair[0])
2322 2324 pairsList.append(pair[1])
2323 2325 spectraComb = channelList + pairsList
2324 2326 spectraComb = numpy.array(spectraComb,dtype="u1")
2325 2327 self.processingHeaderObj.spectraComb = spectraComb
2326 2328 sizeOfSpcComb = len(spectraComb)
2327 2329 processingHeaderSize += sizeOfSpcComb
2328 2330
2329 2331 if self.dataOut.code != None:
2330 2332 self.processingHeaderObj.code = self.dataOut.code
2331 2333 self.processingHeaderObj.nCode = self.dataOut.nCode
2332 2334 self.processingHeaderObj.nBaud = self.dataOut.nBaud
2333 2335 nCodeSize = 4 # bytes
2334 2336 nBaudSize = 4 # bytes
2335 2337 codeSize = 4 # bytes
2336 2338 sizeOfCode = int(nCodeSize + nBaudSize + codeSize * self.dataOut.nCode * self.dataOut.nBaud)
2337 2339 processingHeaderSize += sizeOfCode
2338 2340
2339 2341 if self.processingHeaderObj.nWindows != 0:
2340 2342 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
2341 2343 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
2342 2344 self.processingHeaderObj.nHeights = self.dataOut.nHeights
2343 2345 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
2344 2346 sizeOfFirstHeight = 4
2345 2347 sizeOfdeltaHeight = 4
2346 2348 sizeOfnHeights = 4
2347 2349 sizeOfWindows = (sizeOfFirstHeight + sizeOfdeltaHeight + sizeOfnHeights)*self.processingHeaderObj.nWindows
2348 2350 processingHeaderSize += sizeOfWindows
2349 2351
2350 2352 self.processingHeaderObj.size = processingHeaderSize
2351 2353
2352 2354 class SpectraHeisWriter():
2353 2355
2354 2356 i=0
2355 2357
2356 2358 def __init__(self, dataOut):
2357 2359
2358 2360 self.wrObj = FITS()
2359 2361 self.dataOut = dataOut
2360 2362
2361 2363 def isNumber(str):
2362 2364 """
2363 2365 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
2364 2366
2365 2367 Excepciones:
2366 2368 Si un determinado string no puede ser convertido a numero
2367 2369 Input:
2368 2370 str, string al cual se le analiza para determinar si convertible a un numero o no
2369 2371
2370 2372 Return:
2371 2373 True : si el string es uno numerico
2372 2374 False : no es un string numerico
2373 2375 """
2374 2376 try:
2375 2377 float( str )
2376 2378 return True
2377 2379 except:
2378 2380 return False
2379 2381
2380 2382 def setup(self, wrpath,):
2381 2383
2382 2384 if not(os.path.exists(wrpath)):
2383 2385 os.mkdir(wrpath)
2384 2386
2385 2387 self.wrpath = wrpath
2386 2388 self.setFile = 0
2387 2389
2388 2390 def putData(self):
2389 2391 # self.wrObj.writeHeader(nChannels=self.dataOut.nChannels, nFFTPoints=self.dataOut.nFFTPoints)
2390 2392 #name = self.dataOut.utctime
2391 2393 name= time.localtime( self.dataOut.utctime)
2392 2394 ext=".fits"
2393 2395 #folder='D%4.4d%3.3d'%(name.tm_year,name.tm_yday)
2394 2396 subfolder = 'D%4.4d%3.3d' % (name.tm_year,name.tm_yday)
2395 2397
2396 2398 doypath = os.path.join( self.wrpath, subfolder )
2397 2399 if not( os.path.exists(doypath) ):
2398 2400 os.mkdir(doypath)
2399 2401 self.setFile += 1
2400 2402 file = 'D%4.4d%3.3d%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext)
2401 2403
2402 2404 filename = os.path.join(self.wrpath,subfolder, file)
2403 2405
2404 2406 # print self.dataOut.ippSeconds
2405 2407 freq=numpy.arange(-1*self.dataOut.nHeights/2.,self.dataOut.nHeights/2.)/(2*self.dataOut.ippSeconds)
2406 2408
2407 2409 col1=self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints)+'E', array=freq)
2408 2410 col2=self.wrObj.writeData(name="P_Ch1",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[0,:]))
2409 2411 col3=self.wrObj.writeData(name="P_Ch2",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[1,:]))
2410 2412 col4=self.wrObj.writeData(name="P_Ch3",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[2,:]))
2411 2413 col5=self.wrObj.writeData(name="P_Ch4",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[3,:]))
2412 2414 col6=self.wrObj.writeData(name="P_Ch5",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[4,:]))
2413 2415 col7=self.wrObj.writeData(name="P_Ch6",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[5,:]))
2414 2416 col8=self.wrObj.writeData(name="P_Ch7",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[6,:]))
2415 2417 col9=self.wrObj.writeData(name="P_Ch8",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[7,:]))
2416 2418 #n=numpy.arange((100))
2417 2419 n=self.dataOut.data_spc[6,:]
2418 2420 a=self.wrObj.cFImage(n)
2419 2421 b=self.wrObj.Ctable(col1,col2,col3,col4,col5,col6,col7,col8,col9)
2420 2422 self.wrObj.CFile(a,b)
2421 2423 self.wrObj.wFile(filename)
2422 2424 return 1
2423 2425
2424 2426 class FITS:
2425 2427
2426 2428 name=None
2427 2429 format=None
2428 2430 array =None
2429 2431 data =None
2430 2432 thdulist=None
2431 2433
2432 2434 def __init__(self):
2433 2435
2434 2436 pass
2435 2437
2436 2438 def setColF(self,name,format,array):
2437 2439 self.name=name
2438 2440 self.format=format
2439 2441 self.array=array
2440 2442 a1=numpy.array([self.array],dtype=numpy.float32)
2441 2443 self.col1 = pyfits.Column(name=self.name, format=self.format, array=a1)
2442 2444 return self.col1
2443 2445
2444 2446 # def setColP(self,name,format,data):
2445 2447 # self.name=name
2446 2448 # self.format=format
2447 2449 # self.data=data
2448 2450 # a2=numpy.array([self.data],dtype=numpy.float32)
2449 2451 # self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2450 2452 # return self.col2
2451 2453
2452 2454 def writeHeader(self,):
2453 2455 pass
2454 2456
2455 2457 def writeData(self,name,format,data):
2456 2458 self.name=name
2457 2459 self.format=format
2458 2460 self.data=data
2459 2461 a2=numpy.array([self.data],dtype=numpy.float32)
2460 2462 self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2461 2463 return self.col2
2462 2464
2463 2465 def cFImage(self,n):
2464 2466 self.hdu= pyfits.PrimaryHDU(n)
2465 2467 return self.hdu
2466 2468
2467 2469 def Ctable(self,col1,col2,col3,col4,col5,col6,col7,col8,col9):
2468 2470 self.cols=pyfits.ColDefs( [col1,col2,col3,col4,col5,col6,col7,col8,col9])
2469 2471 self.tbhdu = pyfits.new_table(self.cols)
2470 2472 return self.tbhdu
2471 2473
2472 2474 def CFile(self,hdu,tbhdu):
2473 2475 self.thdulist=pyfits.HDUList([hdu,tbhdu])
2474 2476
2475 2477 def wFile(self,filename):
2476 2478 self.thdulist.writeto(filename) No newline at end of file
@@ -1,186 +1,193
1 1 import numpy
2 2 import datetime
3 3 from graphics.figure import *
4 4
5 5 class SpectraPlot(Figure):
6
6 7 __isConfig = None
7 8
8 9 def __init__(self):
10
9 11 self.__isConfig = False
10 12 self.width = 850
11 13 self.height = 800
12 14
13 15 def getSubplots(self):
16
14 17 ncol = int(numpy.sqrt(self.nplots)+0.9)
15 18 nrow = int(self.nplots*1./ncol + 0.9)
16 19 return nrow, ncol
17 20
18
19 21 def setAxesWithOutProfiles(self, nrow, ncol):
22
20 23 colspan = 1
21 24 rowspan = 1
22 25 counter = 0
23 26
24 27 for y in range(nrow):
25 28 for x in range(ncol):
26 29 if counter < self.nplots:
27 30 self.makeAxes(nrow, ncol, y, x, colspan, rowspan)
28 31 counter += 1
29 32
30 33 def setAxesWithProfiles(self, nrow, ncol):
34
31 35 colspan = 1
32 36 rowspan = 1
33 37 factor = 2
34 38 ncol = ncol*factor
35 39 counter = 0
36 40
37 41 for y in range(nrow):
38 42 for x in range(ncol):
39 43 if counter < self.nplots*factor:
40 44 # plt.subplot2grid((nrow, ncol), (y, x), colspan=colspan, rowspan=rowspan)
41 45 self.makeAxes(nrow, ncol, y, x, colspan, rowspan)
42 46 counter += 1
43 47
44 48 def setup(self, idfigure, wintitle, width, height, nplots, profile):
49
45 50 self.init(idfigure, wintitle, width, height, nplots)
46 51
47 52 nrow,ncol = self.getSubplots()
48 53
49 54 if profile:
50 55 self.setAxesWithProfiles(nrow, ncol)
51 56 else:
52 57 self.setAxesWithOutProfiles(nrow, ncol)
53 58
54 59 def run(self, dataOut, idfigure, wintitle="", channelList=None, xmin=None, xmax=None, ymin=None, ymax=None, zmin=None, zmax=None, profile=False):
55 if dataOut.isEmpty():
56 return None
57 60
58 61 if channelList == None:
59 62 channelList = dataOut.channelList
60 63
61 64 nplots = len(channelList)
62 65
63 66 z = 10.*numpy.log10(dataOut.data_spc[channelList,:,:])
64 67
65 68 y = dataOut.heightList
66 69
67 70 x = numpy.arange(dataOut.nFFTPoints)
68 71
72 noise = dataOut.getNoise()
73
69 74 if not self.__isConfig:
70 75 self.setup(idfigure=idfigure,
71 76 wintitle=wintitle,
72 77 width=self.width,
73 78 height=self.height,
74 79 nplots=nplots,
75 80 profile=profile)
76 81
77 if xmin == None: self.xmin = numpy.min(x)
78 if xmax == None: self.xmax = numpy.max(x)
79 if ymin == None: self.ymin = numpy.min(y)
80 if ymax == None: self.ymax = numpy.max(y)
81 if zmin == None: self.zmin = 0
82 if zmax == None: self.zmax = 90
83
82 if xmin == None: xmin = numpy.min(x)
83 if xmax == None: xmax = numpy.max(x)
84 if ymin == None: ymin = numpy.min(y)
85 if ymax == None: ymax = numpy.max(y)
86 if zmin == None: zmin = numpy.min(z)
87 if zmax == None: zmax = numpy.max(z)
88
89 self.xmin = xmin
90 self.xmax = xmax
91 self.ymin = ymin
92 self.ymax = ymax
93 self.zmin = zmin
94 self.zmax = zmax
95
84 96 self.__isConfig = True
85 97
86 98 thisDatetime = datetime.datetime.fromtimestamp(dataOut.utctime)
87 99 dateTime = "%s"%(thisDatetime.strftime("%d-%b-%Y %H:%M:%S"))
88 100 date = "%s"%(thisDatetime.strftime("%d-%b-%Y"))
89 101 title = "Spectra: " + dateTime
90 102
91 103 self.setWinTitle(title)
92 104
93 105 ylabel = "Range[Km]"
94 106
95 107 xlabel = "m/s"
96 108
97 109 for i in range(len(self.axesList)):
98 title = "Channel %d"%i
110 title = "Channel %d: %4.2fdB" %(i, noise[i])
99 111 axes = self.axesList[i]
100 112 z2 = z[i,:,:]
101 113 axes.pcolor(x, y, z2, self.xmin, self.xmax, self.ymin, self.ymax, self.zmin, self.zmax, xlabel, ylabel, title)
102
103
114
104 115 self.draw()
105
106
107
108
109 116
110 117 class Scope(Figure):
111 118 __isConfig = None
112 119
113 120 def __init__(self):
114 121 self.__isConfig = False
115 122 self.width = 850
116 123 self.height = 800
117 124
118 125 def getSubplots(self):
119 126 nrow = self.nplots
120 127 ncol = 3
121 128 return nrow, ncol
122 129
123 130 def setup(self, idfigure, wintitle, width, height, nplots):
124 131 self.init(idfigure, wintitle, width, height, nplots)
125 132
126 133 nrow,ncol = self.getSubplots()
127 134 colspan = 3
128 135 rowspan = 1
129 136
130 137 for i in range(nplots):
131 138 self.makeAxes(nrow, ncol, i, 0, colspan, rowspan)
132 139
133 140
134 141
135 142 def run(self, dataOut, idfigure, wintitle="", channelList=None, xmin=None, xmax=None, ymin=None, ymax=None):
136 143
137 144 if dataOut.isEmpty():
138 145 return None
139 146
140 147 if channelList == None:
141 148 channelList = dataOut.channelList
142 149
143 150 nplots = len(channelList)
144 151
145 152 y = dataOut.data[channelList,:] * numpy.conjugate(dataOut.data[channelList,:])
146 153 y = y.real
147 154
148 155 x = dataOut.heightList
149 156
150 157 if not self.__isConfig:
151 158 self.setup(idfigure=idfigure,
152 159 wintitle=wintitle,
153 160 width=self.width,
154 161 height=self.height,
155 162 nplots=nplots)
156 163
157 164 if xmin == None: self.xmin = numpy.min(x)
158 165 if xmax == None: self.xmax = numpy.max(x)
159 166 if ymin == None: self.ymin = numpy.min(y)
160 167 if ymax == None: self.ymax = numpy.max(y)
161 168
162 169 self.__isConfig = True
163 170
164 171
165 172
166 173 thisDatetime = datetime.datetime.fromtimestamp(dataOut.utctime)
167 174 dateTime = "%s"%(thisDatetime.strftime("%d-%b-%Y %H:%M:%S"))
168 175 date = "%s"%(thisDatetime.strftime("%d-%b-%Y"))
169 176 title = "Scope: " + dateTime
170 177
171 178 self.setWinTitle(title)
172 179
173 180 ylabel = "Intensity"
174 181
175 182 xlabel = "Range[Km]"
176 183
177 184 for i in range(len(self.axesList)):
178 title = "Channel %d"%i
185 title = "Channel %d: %4.2fdB" %(i, noise[i])
179 186 axes = self.axesList[i]
180 187 y2 = y[i,:]
181 188 axes.pline(x, y2, self.xmin, self.xmax, self.ymin, self.ymax, xlabel, ylabel, title)
182 189
183 190 self.draw()
184 191
185 192
186 193 No newline at end of file
@@ -1,588 +1,638
1 1 '''
2 2
3 3 $Author: dsuarez $
4 4 $Id: Processor.py 1 2012-11-12 18:56:07Z dsuarez $
5 5 '''
6 6 import os
7 7 import numpy
8 8 import datetime
9 9 import time
10 10
11 11 from jrodata import *
12 12 from jrodataIO import *
13 13 from jroplot import *
14 14
15 15 class ProcessingUnit:
16 16
17 17 """
18 18 Esta es la clase base para el procesamiento de datos.
19 19
20 20 Contiene el metodo "call" para llamar operaciones. Las operaciones pueden ser:
21 21 - Metodos internos (callMethod)
22 22 - Objetos del tipo Operation (callObject). Antes de ser llamados, estos objetos
23 23 tienen que ser agreagados con el metodo "add".
24 24
25 25 """
26 26 # objeto de datos de entrada (Voltage, Spectra o Correlation)
27 27 dataIn = None
28 28
29 29 # objeto de datos de entrada (Voltage, Spectra o Correlation)
30 30 dataOut = None
31 31
32 32
33 33 objectDict = None
34 34
35 35 def __init__(self):
36 36
37 37 self.objectDict = {}
38 38
39 39 def init(self):
40 40
41 41 raise ValueError, "Not implemented"
42 42
43 43 def addOperation(self, object, objId):
44 44
45 45 """
46 46 Agrega el objeto "object" a la lista de objetos "self.objectList" y retorna el
47 47 identificador asociado a este objeto.
48 48
49 49 Input:
50 50
51 51 object : objeto de la clase "Operation"
52 52
53 53 Return:
54 54
55 55 objId : identificador del objeto, necesario para ejecutar la operacion
56 56 """
57 57
58 58 self.objectDict[objId] = object
59 59
60 60 return objId
61 61
62 62 def operation(self, **kwargs):
63 63
64 64 """
65 65 Operacion directa sobre la data (dataout.data). Es necesario actualizar los valores de los
66 66 atributos del objeto dataOut
67 67
68 68 Input:
69 69
70 70 **kwargs : Diccionario de argumentos de la funcion a ejecutar
71 71 """
72 72
73 73 raise ValueError, "ImplementedError"
74 74
75 75 def callMethod(self, name, **kwargs):
76 76
77 77 """
78 78 Ejecuta el metodo con el nombre "name" y con argumentos **kwargs de la propia clase.
79 79
80 80 Input:
81 81 name : nombre del metodo a ejecutar
82 82
83 83 **kwargs : diccionario con los nombres y valores de la funcion a ejecutar.
84 84
85 85 """
86 86 if name != 'run':
87 87
88 88 if name == 'init' and self.dataIn.isEmpty():
89 return
89 self.dataOut.flagNoData = True
90 return False
90 91
91 92 if name != 'init' and self.dataOut.isEmpty():
92 return
93 return False
93 94
94 95 methodToCall = getattr(self, name)
95 96
96 97 methodToCall(**kwargs)
97 98
99 if name != 'run':
100 return True
101
102 if self.dataOut.isEmpty():
103 return False
104
105 return True
106
98 107 def callObject(self, objId, **kwargs):
99 108
100 109 """
101 110 Ejecuta la operacion asociada al identificador del objeto "objId"
102 111
103 112 Input:
104 113
105 114 objId : identificador del objeto a ejecutar
106 115
107 116 **kwargs : diccionario con los nombres y valores de la funcion a ejecutar.
108 117
109 118 Return:
110 119
111 120 None
112 121 """
113 122
114 123 if self.dataOut.isEmpty():
115 return
124 return False
116 125
117 126 object = self.objectDict[objId]
118 127
119 128 object.run(self.dataOut, **kwargs)
129
130 return True
120 131
121 132 def call(self, operationConf, **kwargs):
122 133
123 134 """
124 Ejecuta la operacion "operationConf.name" con los argumentos "**kwargs". La operacion puede
125 ser de dos tipos:
135 Return True si ejecuta la operacion "operationConf.name" con los
136 argumentos "**kwargs". False si la operacion no se ha ejecutado.
137 La operacion puede ser de dos tipos:
126 138
127 139 1. Un metodo propio de esta clase:
128 140
129 141 operation.type = "self"
130 142
131 143 2. El metodo "run" de un objeto del tipo Operation o de un derivado de ella:
132 144 operation.type = "other".
133 145
134 146 Este objeto de tipo Operation debe de haber sido agregado antes con el metodo:
135 147 "addOperation" e identificado con el operation.id
136 148
137 149
138 150 con el id de la operacion.
139 151
140 152 Input:
141 153
142 154 Operation : Objeto del tipo operacion con los atributos: name, type y id.
143 155
144 156 """
145 157
146 158 if operationConf.type == 'self':
147 self.callMethod(operationConf.name, **kwargs)
148 return
159 sts = self.callMethod(operationConf.name, **kwargs)
149 160
150 161 if operationConf.type == 'other':
151 self.callObject(operationConf.id, **kwargs)
152 return
162 sts = self.callObject(operationConf.id, **kwargs)
163
164 return sts
153 165
154 166 def setInput(self, dataIn):
155 167
156 168 self.dataIn = dataIn
157 169
158 170 def getOutput(self):
159 171
160 172 return self.dataOut
161 173
162 174 class Operation():
163 175
164 176 """
165 177 Clase base para definir las operaciones adicionales que se pueden agregar a la clase ProcessingUnit
166 178 y necesiten acumular informacion previa de los datos a procesar. De preferencia usar un buffer de
167 179 acumulacion dentro de esta clase
168 180
169 181 Ejemplo: Integraciones coherentes, necesita la informacion previa de los n perfiles anteriores (bufffer)
170 182
171 183 """
172 184
173 185 __buffer = None
174 186 __isConfig = False
175 187
176 188 def __init__(self):
177 189
178 190 pass
179 191
180 192 def run(self, dataIn, **kwargs):
181 193
182 194 """
183 195 Realiza las operaciones necesarias sobre la dataIn.data y actualiza los atributos del objeto dataIn.
184 196
185 197 Input:
186 198
187 199 dataIn : objeto del tipo JROData
188 200
189 201 Return:
190 202
191 203 None
192 204
193 205 Affected:
194 206 __buffer : buffer de recepcion de datos.
195 207
196 208 """
197 209
198 210 raise ValueError, "ImplementedError"
199 211
200 212 class VoltageProc(ProcessingUnit):
201 213
202 214
203 215 def __init__(self):
204 216
205 217 self.objectDict = {}
206 218 self.dataOut = Voltage()
207 219
208 220 def init(self):
209 221
210 222 self.dataOut.copy(self.dataIn)
211 223 # No necesita copiar en cada init() los atributos de dataIn
212 224 # la copia deberia hacerse por cada nuevo bloque de datos
213 225
214 226 def selectChannels(self, channelList):
215 227
216 if self.dataIn.isEmpty():
217 return 0
218
219 228 self.selectChannelsByIndex(channelList)
220 229
221 230 def selectChannelsByIndex(self, channelIndexList):
222 231 """
223 232 Selecciona un bloque de datos en base a canales segun el channelIndexList
224 233
225 234 Input:
226 235 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
227 236
228 237 Affected:
229 238 self.dataOut.data
230 239 self.dataOut.channelIndexList
231 240 self.dataOut.nChannels
232 241 self.dataOut.m_ProcessingHeader.totalSpectra
233 242 self.dataOut.systemHeaderObj.numChannels
234 243 self.dataOut.m_ProcessingHeader.blockSize
235 244
236 245 Return:
237 246 None
238 247 """
239 248
240 249 for channel in channelIndexList:
241 250 if channel not in self.dataOut.channelIndexList:
242 251 print channelIndexList
243 252 raise ValueError, "The value %d in channelIndexList is not valid" %channel
244 253
245 254 nChannels = len(channelIndexList)
246 255
247 256 data = self.dataOut.data[channelIndexList,:]
248 257
249 258 self.dataOut.data = data
250 259 self.dataOut.channelIndexList = channelIndexList
251 260 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
252 261 self.dataOut.nChannels = nChannels
253 262
254 263 return 1
255 264
256 265 class CohInt(Operation):
257 266
258 267 __profIndex = 0
259 268 __withOverapping = False
260 269
261 270 __byTime = False
262 271 __initime = None
263 272 __lastdatatime = None
264 273 __integrationtime = None
265 274
266 275 __buffer = None
267 276
268 277 __dataReady = False
269 278
270 279 nCohInt = None
271 280
272 281
273 282 def __init__(self):
274 283
275 284 self.__isConfig = False
276 285
277 286 def setup(self, nCohInt=None, timeInterval=None, overlapping=False):
278 287 """
279 288 Set the parameters of the integration class.
280 289
281 290 Inputs:
282 291
283 292 nCohInt : Number of coherent integrations
284 293 timeInterval : Time of integration. If the parameter "nCohInt" is selected this one does not work
285 294 overlapping :
286 295
287 296 """
288 297
289 298 self.__initime = None
290 299 self.__lastdatatime = 0
291 300 self.__buffer = None
292 301 self.__dataReady = False
293 302
294 303
295 304 if nCohInt == None and timeInterval == None:
296 305 raise ValueError, "nCohInt or timeInterval should be specified ..."
297 306
298 307 if nCohInt != None:
299 308 self.nCohInt = nCohInt
300 309 self.__byTime = False
301 310 else:
302 311 self.__integrationtime = timeInterval * 60. #if (type(timeInterval)!=integer) -> change this line
303 312 self.nCohInt = 9999
304 313 self.__byTime = True
305 314
306 315 if overlapping:
307 316 self.__withOverapping = True
308 317 self.__buffer = None
309 318 else:
310 319 self.__withOverapping = False
311 320 self.__buffer = 0
312 321
313 322 self.__profIndex = 0
314 323
315 324 def putData(self, data):
316 325
317 326 """
318 327 Add a profile to the __buffer and increase in one the __profileIndex
319 328
320 329 """
321 330
322 331 if not self.__withOverapping:
323 332 self.__buffer += data
324 333 self.__profIndex += 1
325 334 return
326 335
327 336 #Overlapping data
328 337 nChannels, nHeis = data.shape
329 338 data = numpy.reshape(data, (1, nChannels, nHeis))
330 339
331 340 #If the buffer is empty then it takes the data value
332 341 if self.__buffer == None:
333 342 self.__buffer = data
334 343 self.__profIndex += 1
335 344 return
336 345
337 346 #If the buffer length is lower than nCohInt then stakcing the data value
338 347 if self.__profIndex < self.nCohInt:
339 348 self.__buffer = numpy.vstack((self.__buffer, data))
340 349 self.__profIndex += 1
341 350 return
342 351
343 352 #If the buffer length is equal to nCohInt then replacing the last buffer value with the data value
344 353 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
345 354 self.__buffer[self.nCohInt-1] = data
346 355 self.__profIndex = self.nCohInt
347 356 return
348 357
349 358
350 359 def pushData(self):
351 360 """
352 361 Return the sum of the last profiles and the profiles used in the sum.
353 362
354 363 Affected:
355 364
356 365 self.__profileIndex
357 366
358 367 """
359 368
360 369 if not self.__withOverapping:
361 370 data = self.__buffer
362 371 nCohInt = self.__profIndex
363 372
364 373 self.__buffer = 0
365 374 self.__profIndex = 0
366 375
367 376 return data, nCohInt
368 377
369 378 #Integration with Overlapping
370 379 data = numpy.sum(self.__buffer, axis=0)
371 380 nCohInt = self.__profIndex
372 381
373 382 return data, nCohInt
374 383
375 384 def byProfiles(self, data):
376 385
377 386 self.__dataReady = False
378 387 avgdata = None
379 388 nCohInt = None
380 389
381 390 self.putData(data)
382 391
383 392 if self.__profIndex == self.nCohInt:
384 393
385 394 avgdata, nCohInt = self.pushData()
386 395 self.__dataReady = True
387 396
388 397 return avgdata
389 398
390 399 def byTime(self, data, datatime):
391 400
392 401 self.__dataReady = False
393 402 avgdata = None
394 403 nCohInt = None
395 404
396 405 self.putData(data)
397 406
398 407 if (datatime - self.__initime) >= self.__integrationtime:
399 408 avgdata, nCohInt = self.pushData()
400 409 self.nCohInt = nCohInt
401 410 self.__dataReady = True
402 411
403 412 return avgdata
404 413
405 414 def integrate(self, data, datatime=None):
406 415
407 416 if self.__initime == None:
408 417 self.__initime = datatime
409 418
410 419 if self.__byTime:
411 420 avgdata = self.byTime(data, datatime)
412 421 else:
413 422 avgdata = self.byProfiles(data)
414 423
415 424
416 425 self.__lastdatatime = datatime
417 426
418 427 if avgdata == None:
419 428 return None, None
420 429
421 430 avgdatatime = self.__initime
422 431
423 432 deltatime = datatime -self.__lastdatatime
424 433
425 434 if not self.__withOverapping:
426 435 self.__initime = datatime
427 436 else:
428 437 self.__initime += deltatime
429 438
430 439 return avgdata, avgdatatime
431 440
432 441 def run(self, dataOut, nCohInt=None, timeInterval=None, overlapping=False):
433 442
434 443 if not self.__isConfig:
435 444 self.setup(nCohInt, timeInterval, overlapping)
436 445 self.__isConfig = True
437 446
438 447 avgdata, avgdatatime = self.integrate(dataOut.data, dataOut.utctime)
439 448
440 449 # dataOut.timeInterval *= nCohInt
441 450 dataOut.flagNoData = True
442 451
443 452 if self.__dataReady:
444 453 dataOut.data = avgdata
445 454 dataOut.timeInterval *= self.nCohInt
446 455 dataOut.nCohInt *= self.nCohInt
447 456 dataOut.utctime = avgdatatime
448 457 dataOut.flagNoData = False
449 458
450 459
451 460 class SpectraProc(ProcessingUnit):
452 461
453 462 def __init__(self):
463
454 464 self.objectDict = {}
455 465 self.buffer = None
456 466 self.firstdatatime = None
457 467 self.profIndex = 0
458 468 self.dataOut = Spectra()
459 469
460 def init(self, nFFTPoints=None, pairsList=None):
461 if self.dataIn.type == "Spectra":
462 self.dataOut.copy(self.dataIn)
463 return
464
465 if self.dataIn.type == "Voltage":
466
467 if nFFTPoints == None:
468 raise ValueError, "This SpectraProc.setup() need nFFTPoints input variable"
469
470 if pairsList == None:
471 nPairs = 0
472 else:
473 nPairs = len(pairsList)
474
475 self.dataOut.nFFTPoints = nFFTPoints
476 self.dataOut.pairsList = pairsList
477 self.dataOut.nPairs = nPairs
478
479 if self.buffer == None:
480 self.buffer = numpy.zeros((self.dataIn.nChannels,
481 self.dataOut.nFFTPoints,
482 self.dataIn.nHeights),
483 dtype='complex')
484
485
486 self.buffer[:,self.profIndex,:] = self.dataIn.data
487 self.profIndex += 1
488
489 if self.firstdatatime == None:
490 self.firstdatatime = self.dataIn.utctime
491
492 if self.profIndex == self.dataOut.nFFTPoints:
493 self.__updateObjFromInput()
494 self.__getFft()
495
496 self.dataOut.flagNoData = False
497
498 self.buffer = None
499 self.firstdatatime = None
500 self.profIndex = 0
501
502 return
503
504 raise ValuError, "The type object %s is not valid"%(self.dataIn.type)
505
506 470 def __updateObjFromInput(self):
507 471
508 472 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
509 473 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
510 474 self.dataOut.channelList = self.dataIn.channelList
511 475 self.dataOut.heightList = self.dataIn.heightList
512 476 self.dataOut.dtype = self.dataIn.dtype
513 477 self.dataOut.nHeights = self.dataIn.nHeights
514 478 self.dataOut.nChannels = self.dataIn.nChannels
515 479 self.dataOut.nBaud = self.dataIn.nBaud
516 480 self.dataOut.nCode = self.dataIn.nCode
517 481 self.dataOut.code = self.dataIn.code
518 482 self.dataOut.nProfiles = self.dataOut.nFFTPoints
519 483 self.dataOut.channelIndexList = self.dataIn.channelIndexList
520 484 self.dataOut.flagTimeBlock = self.dataIn.flagTimeBlock
521 485 self.dataOut.utctime = self.firstdatatime
522 486 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
523 487 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
524 488 self.dataOut.flagShiftFFT = self.dataIn.flagShiftFFT
525 489 self.dataOut.nCohInt = self.dataIn.nCohInt
526 490 self.dataOut.nIncohInt = 1
527 491 self.dataOut.ippSeconds = self.dataIn.ippSeconds
528 492 self.dataOut.timeInterval = self.dataIn.timeInterval*self.dataOut.nFFTPoints
529
493
530 494 def __getFft(self):
531 495 """
532 496 Convierte valores de Voltaje a Spectra
533 497
534 498 Affected:
535 499 self.dataOut.data_spc
536 500 self.dataOut.data_cspc
537 501 self.dataOut.data_dc
538 502 self.dataOut.heightList
539 503 self.dataOut.m_BasicHeader
540 504 self.dataOut.m_ProcessingHeader
541 505 self.dataOut.radarControllerHeaderObj
542 506 self.dataOut.systemHeaderObj
543 507 self.profIndex
544 508 self.buffer
545 509 self.dataOut.flagNoData
546 510 self.dataOut.dtype
547 511 self.dataOut.nPairs
548 512 self.dataOut.nChannels
549 513 self.dataOut.nProfiles
550 514 self.dataOut.systemHeaderObj.numChannels
551 515 self.dataOut.m_ProcessingHeader.totalSpectra
552 516 self.dataOut.m_ProcessingHeader.profilesPerBlock
553 517 self.dataOut.m_ProcessingHeader.numHeights
554 518 self.dataOut.m_ProcessingHeader.spectraComb
555 519 self.dataOut.m_ProcessingHeader.shif_fft
556 520 """
557 521 fft_volt = numpy.fft.fft(self.buffer,axis=1)
558 522 dc = fft_volt[:,0,:]
559 523
560 524 #calculo de self-spectra
561 525 fft_volt = numpy.fft.fftshift(fft_volt,axes=(1,))
562 526 spc = fft_volt * numpy.conjugate(fft_volt)
563 527 spc = spc.real
564 528
565 529 blocksize = 0
566 530 blocksize += dc.size
567 531 blocksize += spc.size
568 532
569 533 cspc = None
570 534 pairIndex = 0
571 535 if self.dataOut.pairsList != None:
572 536 #calculo de cross-spectra
573 537 cspc = numpy.zeros((self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex')
574 538 for pair in self.dataOut.pairsList:
575 539 cspc[pairIndex,:,:] = numpy.abs(fft_volt[pair[0],:,:] * numpy.conjugate(fft_volt[pair[1],:,:]))
576 540 pairIndex += 1
577 541 blocksize += cspc.size
578 542
579 543 self.dataOut.data_spc = spc
580 544 self.dataOut.data_cspc = cspc
581 545 self.dataOut.data_dc = dc
582 546 self.dataOut.blockSize = blocksize
547
548 def init(self, nFFTPoints=None, pairsList=None):
549
550 if self.dataIn.type == "Spectra":
551 self.dataOut.copy(self.dataIn)
552 return
553
554 if self.dataIn.type == "Voltage":
555
556 if nFFTPoints == None:
557 raise ValueError, "This SpectraProc.setup() need nFFTPoints input variable"
558
559 if pairsList == None:
560 nPairs = 0
561 else:
562 nPairs = len(pairsList)
563
564 self.dataOut.nFFTPoints = nFFTPoints
565 self.dataOut.pairsList = pairsList
566 self.dataOut.nPairs = nPairs
567
568 if self.buffer == None:
569 self.buffer = numpy.zeros((self.dataIn.nChannels,
570 self.dataOut.nFFTPoints,
571 self.dataIn.nHeights),
572 dtype='complex')
573
574
575 self.buffer[:,self.profIndex,:] = self.dataIn.data
576 self.profIndex += 1
577
578 if self.firstdatatime == None:
579 self.firstdatatime = self.dataIn.utctime
580
581 if self.profIndex == self.dataOut.nFFTPoints:
582 self.__updateObjFromInput()
583 self.__getFft()
584
585 self.dataOut.flagNoData = False
586
587 self.buffer = None
588 self.firstdatatime = None
589 self.profIndex = 0
590
591 return
592
593 raise ValuError, "The type object %s is not valid"%(self.dataIn.type)
594
595 def selectChannels(self, channelList):
596
597 self.selectChannelsByIndex(channelList)
598
599 def selectChannelsByIndex(self, channelIndexList):
600 """
601 Selecciona un bloque de datos en base a canales segun el channelIndexList
602
603 Input:
604 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
605
606 Affected:
607 self.dataOut.data
608 self.dataOut.channelIndexList
609 self.dataOut.nChannels
610 self.dataOut.m_ProcessingHeader.totalSpectra
611 self.dataOut.systemHeaderObj.numChannels
612 self.dataOut.m_ProcessingHeader.blockSize
613
614 Return:
615 None
616 """
617
618 for channel in channelIndexList:
619 if channel not in self.dataOut.channelIndexList:
620 print channelIndexList
621 raise ValueError, "The value %d in channelIndexList is not valid" %channel
622
623 nChannels = len(channelIndexList)
624
625 data = self.dataOut.data_spc[channelIndexList,:]
626
627 self.dataOut.data_spc = data
628 self.dataOut.channelIndexList = channelIndexList
629 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
630 self.dataOut.nChannels = nChannels
631
632 return 1
583 633
584 634
585 635 class IncohInt(Operation):
586 636
587 637 def __init__(self):
588 638 pass No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now