##// END OF EJS Templates
Controlador de Signal Chain finalizado....
Miguel Valdez -
r197:77188df611db
parent child
Show More
This diff has been collapsed as it changes many lines, (609 lines changed) Show them Hide them
@@ -0,0 +1,609
1 '''
2 Created on September , 2012
3 @author:
4 '''
5 from xml.etree.ElementTree import Element, SubElement, ElementTree
6 from xml.etree import ElementTree as ET
7 from xml.dom import minidom
8
9 import sys
10 import datetime
11 from model.jrodataIO import *
12 from model.jroprocessing import *
13
14 def prettify(elem):
15 """Return a pretty-printed XML string for the Element.
16 """
17 rough_string = ET.tostring(elem, 'utf-8')
18 reparsed = minidom.parseString(rough_string)
19 return reparsed.toprettyxml(indent=" ")
20
21 class ParameterConf():
22
23 id = None
24 name = None
25 value = None
26 type = None
27
28 ELEMENTNAME = 'Parameter'
29
30 def __init__(self):
31
32 self.type = 'str'
33
34 def getElementName(self):
35
36 return self.ELEMENTNAME
37
38 def getValue(self):
39
40 if self.type == 'list':
41 strList = self.value.split(',')
42 return strList
43
44 if self.type == 'intlist':
45 strList = self.value.split(',')
46 intList = [int(x) for x in strList]
47 return intList
48
49 if self.type == 'floatlist':
50 strList = self.value.split(',')
51 floatList = [float(x) for x in strList]
52 return floatList
53
54 if self.type == 'date':
55 strList = self.value.split('/')
56 intList = [int(x) for x in strList]
57 date = datetime.date(intList[0], intList[1], intList[2])
58 return date
59
60 if self.type == 'time':
61 strList = self.value.split(':')
62 intList = [int(x) for x in strList]
63 time = datetime.time(intList[0], intList[1], intList[2])
64 return time
65
66 func = eval(self.type)
67
68 return func(self.value)
69
70 def setup(self, id, name, value, type='str'):
71
72 self.id = id
73 self.name = name
74 self.value = str(value)
75 self.type = type
76
77 def makeXml(self, opElement):
78
79 parmElement = SubElement(opElement, self.ELEMENTNAME)
80 parmElement.set('id', str(self.id))
81 parmElement.set('name', self.name)
82 parmElement.set('value', self.value)
83 parmElement.set('type', self.type)
84
85 def readXml(self, parmElement):
86
87 self.id = parmElement.get('id')
88 self.name = parmElement.get('name')
89 self.value = parmElement.get('value')
90 self.type = parmElement.get('type')
91
92 def printattr(self):
93
94 print "Parameter[%s]: name = %s, value = %s, type = %s" %(self.id, self.name, self.value, self.type)
95
96 class OperationConf():
97
98 id = None
99 name = None
100 priority = None
101 type = None
102
103 parmConfObjList = []
104
105 ELEMENTNAME = 'Operation'
106
107 def __init__(self):
108
109 id = 0
110 name = None
111 priority = None
112 type = 'self'
113
114
115 def __getNewId(self):
116
117 return int(self.id)*10 + len(self.parmConfObjList) + 1
118
119 def getElementName(self):
120
121 return self.ELEMENTNAME
122
123 def getParameterObjList(self):
124
125 return self.parmConfObjList
126
127 def setup(self, id, name, priority, type):
128
129 self.id = id
130 self.name = name
131 self.type = type
132 self.priority = priority
133
134 self.parmConfObjList = []
135
136 def addParameter(self, name, value, type='str'):
137
138 id = self.__getNewId()
139
140 parmConfObj = ParameterConf()
141 parmConfObj.setup(id, name, value, type)
142
143 self.parmConfObjList.append(parmConfObj)
144
145 return parmConfObj
146
147 def makeXml(self, upElement):
148
149 opElement = SubElement(upElement, self.ELEMENTNAME)
150 opElement.set('id', str(self.id))
151 opElement.set('name', self.name)
152 opElement.set('type', self.type)
153 opElement.set('priority', str(self.priority))
154
155 for parmConfObj in self.parmConfObjList:
156 parmConfObj.makeXml(opElement)
157
158 def readXml(self, opElement):
159
160 self.id = opElement.get('id')
161 self.name = opElement.get('name')
162 self.type = opElement.get('type')
163 self.priority = opElement.get('priority')
164
165 self.parmConfObjList = []
166
167 parmElementList = opElement.getiterator(ParameterConf().getElementName())
168
169 for parmElement in parmElementList:
170 parmConfObj = ParameterConf()
171 parmConfObj.readXml(parmElement)
172 self.parmConfObjList.append(parmConfObj)
173
174 def printattr(self):
175
176 print "%s[%s]: name = %s, type = %s, priority = %s" %(self.ELEMENTNAME,
177 self.id,
178 self.name,
179 self.type,
180 self.priority)
181
182 for parmConfObj in self.parmConfObjList:
183 parmConfObj.printattr()
184
185 def createObject(self):
186
187 if self.type == 'self':
188 raise ValueError, "This operation type cannot be created"
189
190 if self.type == 'other':
191 className = eval(self.name)
192 opObj = className()
193
194 return opObj
195
196 class ProcUnitConf():
197
198 id = None
199 name = None
200 type = None
201 inputId = None
202
203 opConfObjList = []
204
205 procUnitObj = None
206 opObjList = []
207
208 ELEMENTNAME = 'ProcUnit'
209
210 def __init__(self):
211
212 self.id = None
213 self.type = None
214 self.name = None
215 self.inputId = None
216
217 self.opConfObjList = []
218
219 self.procUnitObj = None
220 self.opObjDict = {}
221
222 def __getPriority(self):
223
224 return len(self.opConfObjList)+1
225
226 def __getNewId(self):
227
228 return int(self.id)*10 + len(self.opConfObjList) + 1
229
230 def getElementName(self):
231
232 return self.ELEMENTNAME
233
234 def getId(self):
235
236 return str(self.id)
237
238 def getInputId(self):
239
240 return str(self.inputId)
241
242 def getOperationObjList(self):
243
244 return self.opConfObjList
245
246 def getProcUnitObj(self):
247
248 return self.procUnitObj
249
250 def setup(self, id, name, type, inputId):
251
252 self.id = id
253 self.name = name
254 self.type = type
255 self.inputId = inputId
256
257 self.opConfObjList = []
258
259 self.addOperation(name='init', optype='self')
260
261 def addOperation(self, name, optype='self'):
262
263 id = self.__getNewId()
264 priority = self.__getPriority()
265
266 opConfObj = OperationConf()
267 opConfObj.setup(id, name=name, priority=priority, type=optype)
268
269 self.opConfObjList.append(opConfObj)
270
271 return opConfObj
272
273 def makeXml(self, procUnitElement):
274
275 upElement = SubElement(procUnitElement, self.ELEMENTNAME)
276 upElement.set('id', str(self.id))
277 upElement.set('name', self.name)
278 upElement.set('type', self.type)
279 upElement.set('inputId', str(self.inputId))
280
281 for opConfObj in self.opConfObjList:
282 opConfObj.makeXml(upElement)
283
284 def readXml(self, upElement):
285
286 self.id = upElement.get('id')
287 self.name = upElement.get('name')
288 self.type = upElement.get('type')
289 self.inputId = upElement.get('inputId')
290
291 self.opConfObjList = []
292
293 opElementList = upElement.getiterator(OperationConf().getElementName())
294
295 for opElement in opElementList:
296 opConfObj = OperationConf()
297 opConfObj.readXml(opElement)
298 self.opConfObjList.append(opConfObj)
299
300 def printattr(self):
301
302 print "%s[%s]: name = %s, type = %s, inputId = %s" %(self.ELEMENTNAME,
303 self.id,
304 self.name,
305 self.type,
306 self.inputId)
307
308 for opConfObj in self.opConfObjList:
309 opConfObj.printattr()
310
311 def createObjects(self):
312
313 className = eval(self.name)
314 procUnitObj = className()
315
316 for opConfObj in self.opConfObjList:
317
318 if opConfObj.type == 'self':
319 continue
320
321 opObj = opConfObj.createObject()
322
323 self.opObjDict[opConfObj.id] = opObj
324 procUnitObj.addOperation(opObj, opConfObj.id)
325
326 self.procUnitObj = procUnitObj
327
328 return procUnitObj
329
330 def run(self):
331
332 for opConfObj in self.opConfObjList:
333 kwargs = {}
334 for parmConfObj in opConfObj.getParameterObjList():
335 kwargs[parmConfObj.name] = parmConfObj.getValue()
336
337 self.procUnitObj.call(opConfObj, **kwargs)
338
339
340
341 class ReadUnitConf(ProcUnitConf):
342
343
344 path = None
345 startDate = None
346 endDate = None
347 startTime = None
348 endTime = None
349 online = None
350 expLabel = None
351 delay = None
352
353 ELEMENTNAME = 'ReadUnit'
354
355 def __init__(self):
356
357 self.id = None
358 self.type = None
359 self.name = None
360 self.inputId = 0
361
362 self.opConfObjList = []
363 self.opObjList = []
364
365 def getElementName(self):
366
367 return self.ELEMENTNAME
368
369 def setup(self, id, name, type, path, startDate, endDate, startTime, endTime, online=0, expLabel='', delay=60):
370
371 self.id = id
372 self.name = name
373 self.type = type
374
375 self.path = path
376 self.startDate = startDate
377 self.endDate = endDate
378 self.startTime = startTime
379 self.endTime = endTime
380 self.online = online
381 self.expLabel = expLabel
382 self.delay = delay
383
384 self.addRunOperation()
385
386 def addRunOperation(self):
387
388 opObj = self.addOperation(name = 'run', optype = 'self')
389
390 opObj.addParameter(name='path' , value=self.path, type='str')
391 opObj.addParameter(name='startDate' , value=self.startDate, type='date')
392 opObj.addParameter(name='endDate' , value=self.endDate, type='date')
393 opObj.addParameter(name='startTime' , value=self.startTime, type='time')
394 opObj.addParameter(name='endTime' , value=self.endTime, type='time')
395 opObj.addParameter(name='expLabel' , value=self.expLabel, type='str')
396 opObj.addParameter(name='online' , value=self.online, type='bool')
397 opObj.addParameter(name='delay' , value=self.delay, type='float')
398
399 return opObj
400
401
402 class Controller():
403
404 id = None
405 name = None
406 description = None
407 # readUnitConfObjList = None
408 procUnitConfObjDict = None
409
410 ELEMENTNAME = 'Controller'
411
412 def __init__(self):
413
414 self.id = None
415 self.name = None
416 self.description = None
417
418 # self.readUnitConfObjList = []
419 self.procUnitConfObjDict = {}
420
421 def __getNewId(self):
422
423 id = int(self.id)*10 + len(self.procUnitConfObjDict) + 1
424
425 return str(id)
426
427 def getElementName(self):
428
429 return self.ELEMENTNAME
430
431 def setup(self, id, name, description):
432
433 self.id = id
434 self.name = name
435 self.description = description
436
437 def addReadUnit(self, type, path, startDate='', endDate='', startTime='', endTime='', online=0, expLabel='', delay=60):
438
439 id = self.__getNewId()
440 name = '%sReader' %(type)
441
442 readUnitConfObj = ReadUnitConf()
443 readUnitConfObj.setup(id, name, type, path, startDate, endDate, startTime, endTime, online, expLabel, delay)
444
445 self.procUnitConfObjDict[readUnitConfObj.getId()] = readUnitConfObj
446
447 return readUnitConfObj
448
449 def addProcUnit(self, type, inputId):
450
451 id = self.__getNewId()
452 name = '%sProc' %(type)
453
454 procUnitConfObj = ProcUnitConf()
455 procUnitConfObj.setup(id, name, type, inputId)
456
457 self.procUnitConfObjDict[procUnitConfObj.getId()] = procUnitConfObj
458
459 return procUnitConfObj
460
461 def makeXml(self):
462
463 projectElement = Element('Controller')
464 projectElement.set('id', str(self.id))
465 projectElement.set('name', self.name)
466 projectElement.set('description', self.description)
467
468 # for readUnitConfObj in self.readUnitConfObjList:
469 # readUnitConfObj.makeXml(projectElement)
470
471 for procUnitConfObj in self.procUnitConfObjDict.values():
472 procUnitConfObj.makeXml(projectElement)
473
474 self.projectElement = projectElement
475
476 def writeXml(self, filename):
477
478 self.makeXml()
479
480 print prettify(self.projectElement)
481
482 ElementTree(self.projectElement).write(filename, method='xml')
483
484 def readXml(self, filename):
485
486 #tree = ET.parse(filename)
487 self.projectElement = None
488 # self.readUnitConfObjList = []
489 self.procUnitConfObjDict = {}
490
491 self.projectElement = ElementTree().parse(filename)
492
493 self.project = self.projectElement.tag
494
495 self.id = self.projectElement.get('id')
496 self.name = self.projectElement.get('name')
497 self.description = self.projectElement.get('description')
498
499 readUnitElementList = self.projectElement.getiterator(ReadUnitConf().getElementName())
500
501 for readUnitElement in readUnitElementList:
502 readUnitConfObj = ReadUnitConf()
503 readUnitConfObj.readXml(readUnitElement)
504
505 self.procUnitConfObjDict[readUnitConfObj.getId()] = readUnitConfObj
506
507 procUnitElementList = self.projectElement.getiterator(ProcUnitConf().getElementName())
508
509 for procUnitElement in procUnitElementList:
510 procUnitConfObj = ProcUnitConf()
511 procUnitConfObj.readXml(procUnitElement)
512
513 self.procUnitConfObjDict[procUnitConfObj.getId()] = procUnitConfObj
514
515 def printattr(self):
516
517 print "Controller[%s]: name = %s, description = %s" %(self.id,
518 self.name,
519 self.description)
520
521 # for readUnitConfObj in self.readUnitConfObjList:
522 # readUnitConfObj.printattr()
523
524 for procUnitConfObj in self.procUnitConfObjDict.values():
525 procUnitConfObj.printattr()
526
527 def createObjects(self):
528
529 # for readUnitConfObj in self.readUnitConfObjList:
530 # readUnitConfObj.createObjects()
531
532 for procUnitConfObj in self.procUnitConfObjDict.values():
533 procUnitConfObj.createObjects()
534
535 def __connect(self, objIN, obj):
536
537 obj.setInput(objIN.getOutput())
538
539 def connectObjects(self):
540
541 for puConfObj in self.procUnitConfObjDict.values():
542
543 inputId = puConfObj.getInputId()
544
545 if int(inputId) == 0:
546 continue
547
548 puConfINObj = self.procUnitConfObjDict[inputId]
549
550 puObj = puConfObj.getProcUnitObj()
551 puINObj = puConfINObj.getProcUnitObj()
552
553 self.__connect(puINObj, puObj)
554
555 def run(self):
556
557 # for readUnitConfObj in self.readUnitConfObjList:
558 # readUnitConfObj.run()
559 while(True):
560 for procUnitConfObj in self.procUnitConfObjDict.values():
561 procUnitConfObj.run()
562
563 if __name__ == '__main__':
564
565 desc = "Segundo Test"
566 filename = "schain.xml"
567
568 controllerObj = Controller()
569
570 controllerObj.setup(id = '191', name='test01', description=desc)
571
572 readUnitConfObj = controllerObj.addReadUnit(type='Voltage',
573 path='/home/roj-idl71/Data/RAWDATA/Meteors',
574 startDate='2012/01/01',
575 endDate='2012/12/31',
576 startTime='00:00:00',
577 endTime='23:59:59',
578 online=0)
579
580 procUnitConfObj1 = controllerObj.addProcUnit(type='Voltage', inputId=readUnitConfObj.getId())
581
582 procUnitConfObj2 = controllerObj.addProcUnit(type='Voltage', inputId=procUnitConfObj1.getId())
583
584 opObj11 = procUnitConfObj1.addOperation(name='selectChannels')
585 opObj11.addParameter(name='channelList', value='1,2', type='intlist')
586
587 # opObj12 = procUnitConfObj1.addOperation(name='decoder')
588 # opObj12.addParameter(name='ncode', value='2', type='int')
589 # opObj12.addParameter(name='nbauds', value='8', type='int')
590 # opObj12.addParameter(name='code0', value='001110011', type='int')
591 # opObj12.addParameter(name='code1', value='001110011', type='int')
592
593 opObj21 = procUnitConfObj2.addOperation(name='CohInt', optype='other')
594 opObj21.addParameter(name='nCohInt', value='10', type='int')
595
596
597 print "Escribiendo el archivo XML"
598
599 controllerObj.writeXml(filename)
600
601 print "Leyendo el archivo XML"
602 controllerObj.readXml(filename)
603 #controllerObj.printattr()
604
605 controllerObj.createObjects()
606 controllerObj.connectObjects()
607 controllerObj.run()
608
609 No newline at end of file
@@ -0,0 +1,1
1 <Controller description="Segundo Test" id="191" name="test01"><ReadUnit id="1911" inputId="0" name="VoltageReader" type="Voltage"><Operation id="19111" name="run" priority="1" type="self"><Parameter id="191111" name="path" type="str" value="/home/roj-idl71/Data/RAWDATA/Meteors" /><Parameter id="191112" name="startDate" type="date" value="2012/01/01" /><Parameter id="191113" name="endDate" type="date" value="2012/12/31" /><Parameter id="191114" name="startTime" type="time" value="00:00:00" /><Parameter id="191115" name="endTime" type="time" value="23:59:59" /><Parameter id="191116" name="expLabel" type="str" value="" /><Parameter id="191117" name="online" type="bool" value="0" /><Parameter id="191118" name="delay" type="float" value="60" /></Operation></ReadUnit><ProcUnit id="1913" inputId="1912" name="VoltageProc" type="Voltage"><Operation id="19131" name="init" priority="1" type="self" /><Operation id="19132" name="CohInt" priority="2" type="other"><Parameter id="191321" name="nCohInt" type="int" value="10" /></Operation></ProcUnit><ProcUnit id="1912" inputId="1911" name="VoltageProc" type="Voltage"><Operation id="19121" name="init" priority="1" type="self" /><Operation id="19122" name="selectChannels" priority="2" type="self"><Parameter id="191221" name="channelList" type="intlist" value="1,2" /></Operation></ProcUnit></Controller> No newline at end of file
1 NO CONTENT: file renamed from schainpy/graphics/__init__.py to schainpy/model/graphics/__init__.py
1 NO CONTENT: file renamed from schainpy/graphics/figure.py to schainpy/model/graphics/figure.py
@@ -1,80 +1,80
1 1 import matplotlib
2 matplotlib.use("TKAgg")
2 matplotlib.use("Agg")
3 3 import matplotlib.pyplot
4 import scitools.numpyutils
4 #import scitools.numpyutils
5 5 from mpl_toolkits.axes_grid1 import make_axes_locatable
6 6
7 7 def init(idfigure, wintitle, width, height):
8 8 matplotlib.pyplot.ioff()
9 9 fig = matplotlib.pyplot.matplotlib.pyplot.figure(num=idfigure, facecolor="w")
10 10 fig.canvas.manager.set_window_title(wintitle)
11 11 fig.canvas.manager.resize(width,height)
12 12 matplotlib.pyplot.ion()
13 13 return fig
14 14
15 15 def setWinTitle(fig, title):
16 16 fig.canvas.manager.set_window_title(title)
17 17
18 18 def setTextFromAxes(idfigure, ax, title):
19 19 fig = matplotlib.pyplot.figure(idfigure)
20 20 ax.annotate(title, xy=(.1, .99),
21 21 xycoords='figure fraction',
22 22 horizontalalignment='left', verticalalignment='top',
23 23 fontsize=10)
24 24
25 25 def setTitle(idfigure, title):
26 26 fig = matplotlib.pyplot.figure(idfigure)
27 27 fig.suptitle(title)
28 28
29 29 def makeAxes(idfigure, nrow, ncol, xpos, ypos, colspan, rowspan):
30 30 fig = matplotlib.pyplot.figure(idfigure)
31 31 ax = matplotlib.pyplot.subplot2grid((nrow, ncol), (xpos, ypos), colspan=colspan, rowspan=rowspan)
32 32 return ax
33 33
34 34 def pline(ax, x, y, xmin, xmax, ymin, ymax, xlabel, ylabel, title, firsttime):
35 35 if firsttime:
36 36 ax.plot(x, y)
37 37 ax.set_xlim([xmin,xmax])
38 38 ax.set_ylim([ymin,ymax])
39 39 ax.set_xlabel(xlabel, size=8)
40 40 ax.set_ylabel(ylabel, size=8)
41 41 ax.set_title(title, size=10)
42 42 matplotlib.pyplot.tight_layout()
43 43 else:
44 44 ax.lines[0].set_data(x,y)
45 45
46 46 def draw(idfigure):
47 47 fig = matplotlib.pyplot.figure(idfigure)
48 48 fig.canvas.draw()
49 49
50 50 def pcolor(ax, x, y, z, xmin, xmax, ymin, ymax, zmin, zmax, xlabel, ylabel, title, firsttime, mesh):
51 51 if firsttime:
52 52 divider = make_axes_locatable(ax)
53 53 ax_cb = divider.new_horizontal(size="5%", pad=0.05)
54 54 fig1 = ax.get_figure()
55 55 fig1.add_axes(ax_cb)
56 56
57 57 ax.set_xlim([xmin,xmax])
58 58 ax.set_ylim([ymin,ymax])
59 59 ax.set_xlabel(xlabel)
60 60 ax.set_ylabel(ylabel)
61 61 ax.set_title(title)
62 62
63 63 imesh=ax.pcolormesh(x,y,z.T,vmin=zmin,vmax=zmax)
64 64 matplotlib.pyplot.colorbar(imesh, cax=ax_cb)
65 65 ax_cb.yaxis.tick_right()
66 66 for tl in ax_cb.get_yticklabels():
67 67 tl.set_visible(True)
68 68 ax_cb.yaxis.tick_right()
69 69 matplotlib.pyplot.tight_layout()
70 70 return imesh
71 71 else:
72 72 z = z.T
73 73 z = z[0:-1,0:-1]
74 74 mesh.set_array(z.ravel())
75 75
76 76 return mesh
77 77
78 78
79 79
80 80 No newline at end of file
@@ -1,2473 +1,2476
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JRODataIO.py 169 2012-11-19 21:57:03Z murco $
5 5 '''
6 6
7 7 import os, sys
8 8 import glob
9 9 import time
10 10 import numpy
11 11 import fnmatch
12 12 import time, datetime
13 13
14 14 from jrodata import *
15 15 from jroheaderIO import *
16 from jroprocessing import *
16 17
17 18 def isNumber(str):
18 19 """
19 20 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
20 21
21 22 Excepciones:
22 23 Si un determinado string no puede ser convertido a numero
23 24 Input:
24 25 str, string al cual se le analiza para determinar si convertible a un numero o no
25 26
26 27 Return:
27 28 True : si el string es uno numerico
28 29 False : no es un string numerico
29 30 """
30 31 try:
31 32 float( str )
32 33 return True
33 34 except:
34 35 return False
35 36
36 37 def isThisFileinRange(filename, startUTSeconds, endUTSeconds):
37 38 """
38 39 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
39 40
40 41 Inputs:
41 42 filename : nombre completo del archivo de datos en formato Jicamarca (.r)
42 43
43 44 startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en
44 45 segundos contados desde 01/01/1970.
45 46 endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en
46 47 segundos contados desde 01/01/1970.
47 48
48 49 Return:
49 50 Boolean : Retorna True si el archivo de datos contiene datos en el rango de
50 51 fecha especificado, de lo contrario retorna False.
51 52
52 53 Excepciones:
53 54 Si el archivo no existe o no puede ser abierto
54 55 Si la cabecera no puede ser leida.
55 56
56 57 """
57 58 basicHeaderObj = BasicHeader()
58 59
59 60 try:
60 61 fp = open(filename,'rb')
61 62 except:
62 63 raise IOError, "The file %s can't be opened" %(filename)
63 64
64 65 sts = basicHeaderObj.read(fp)
65 66 fp.close()
66 67
67 68 if not(sts):
68 69 print "Skipping the file %s because it has not a valid header" %(filename)
69 70 return 0
70 71
71 72 if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)):
72 73 return 0
73 74
74 75 return 1
75 76
76 77 def getlastFileFromPath(path, ext):
77 78 """
78 79 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
79 80 al final de la depuracion devuelve el ultimo file de la lista que quedo.
80 81
81 82 Input:
82 83 fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta
83 84 ext : extension de los files contenidos en una carpeta
84 85
85 86 Return:
86 87 El ultimo file de una determinada carpeta, no se considera el path.
87 88 """
88 89 validFilelist = []
89 90 fileList = os.listdir(path)
90 91
91 92 # 0 1234 567 89A BCDE
92 93 # H YYYY DDD SSS .ext
93 94
94 95 for file in fileList:
95 96 try:
96 97 year = int(file[1:5])
97 98 doy = int(file[5:8])
98 99
99 100 if (os.path.splitext(file)[-1].upper() != ext.upper()) : continue
100 101 except:
101 102 continue
102 103
103 104 validFilelist.append(file)
104 105
105 106 if validFilelist:
106 107 validFilelist = sorted( validFilelist, key=str.lower )
107 108 return validFilelist[-1]
108 109
109 110 return None
110 111
111 112 def checkForRealPath(path, year, doy, set, ext):
112 113 """
113 114 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
114 115 Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar
115 116 el path exacto de un determinado file.
116 117
117 118 Example :
118 119 nombre correcto del file es .../.../D2009307/P2009307367.ext
119 120
120 121 Entonces la funcion prueba con las siguientes combinaciones
121 122 .../.../x2009307/y2009307367.ext
122 123 .../.../x2009307/Y2009307367.ext
123 124 .../.../X2009307/y2009307367.ext
124 125 .../.../X2009307/Y2009307367.ext
125 126 siendo para este caso, la ultima combinacion de letras, identica al file buscado
126 127
127 128 Return:
128 129 Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file
129 130 caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas
130 131 para el filename
131 132 """
132 133 filepath = None
133 134 find_flag = False
134 135 filename = None
135 136
136 137 if ext.lower() == ".r": #voltage
137 138 header1 = "dD"
138 139 header2 = "dD"
139 140 elif ext.lower() == ".pdata": #spectra
140 141 header1 = "dD"
141 142 header2 = "pP"
142 143 else:
143 144 return None, filename
144 145
145 146 for dir in header1: #barrido por las dos combinaciones posibles de "D"
146 147 for fil in header2: #barrido por las dos combinaciones posibles de "D"
147 148 doypath = "%s%04d%03d" % ( dir, year, doy ) #formo el nombre del directorio xYYYYDDD (x=d o x=D)
148 149 filename = "%s%04d%03d%03d%s" % ( fil, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
149 150 filepath = os.path.join( path, doypath, filename ) #formo el path completo
150 151 if os.path.exists( filepath ): #verifico que exista
151 152 find_flag = True
152 153 break
153 154 if find_flag:
154 155 break
155 156
156 157 if not(find_flag):
157 158 return None, filename
158 159
159 160 return filepath, filename
160 161
161 162 class JRODataIO:
162 163
163 164 c = 3E8
164 165
165 166 isConfig = False
166 167
167 168 basicHeaderObj = BasicHeader()
168 169
169 170 systemHeaderObj = SystemHeader()
170 171
171 172 radarControllerHeaderObj = RadarControllerHeader()
172 173
173 174 processingHeaderObj = ProcessingHeader()
174 175
175 176 online = 0
176 177
177 178 dtype = None
178 179
179 180 pathList = []
180 181
181 182 filenameList = []
182 183
183 184 filename = None
184 185
185 186 ext = None
186 187
187 188 flagNoMoreFiles = 0
188 189
189 190 flagIsNewFile = 1
190 191
191 192 flagTimeBlock = 0
192 193
193 194 flagIsNewBlock = 0
194 195
195 196 fp = None
196 197
197 198 firstHeaderSize = 0
198 199
199 200 basicHeaderSize = 24
200 201
201 202 versionFile = 1103
202 203
203 204 fileSize = None
204 205
205 206 ippSeconds = None
206 207
207 208 fileSizeByHeader = None
208 209
209 210 fileIndex = None
210 211
211 212 profileIndex = None
212 213
213 214 blockIndex = None
214 215
215 216 nTotalBlocks = None
216 217
217 218 maxTimeStep = 30
218 219
219 220 lastUTTime = None
220 221
221 222 datablock = None
222 223
223 224 dataOut = None
224 225
225 226 blocksize = None
226 227
227 228 def __init__(self):
228 229
229 230 raise ValueError, "Not implemented"
230 231
231 232 def run(self):
232 233
233 234 raise ValueError, "Not implemented"
234 235
235 236 def getOutput(self):
236 237
237 238 return self.dataOut
238 239
239 class JRODataReader(JRODataIO):
240 class JRODataReader(JRODataIO, ProcessingUnit):
240 241
241 242 nReadBlocks = 0
242 243
243 244 delay = 60 #number of seconds waiting a new file
244 245
245 246 nTries = 3 #quantity tries
246 247
247 248 nFiles = 3 #number of files for searching
248 249
249 250
250 251 def __init__(self):
251 252
252 253 """
253 254
254 255 """
255 256
256 257 raise ValueError, "This method has not been implemented"
257 258
258 259
259 260 def createObjByDefault(self):
260 261 """
261 262
262 263 """
263 264 raise ValueError, "This method has not been implemented"
264 265
265 266 def getBlockDimension(self):
266 267
267 268 raise ValueError, "No implemented"
268 269
269 270 def __searchFilesOffLine(self,
270 271 path,
271 272 startDate,
272 273 endDate,
273 274 startTime=datetime.time(0,0,0),
274 275 endTime=datetime.time(23,59,59),
275 276 set=None,
276 277 expLabel="",
277 278 ext=".r"):
278 279 dirList = []
279 280 for thisPath in os.listdir(path):
280 281 if os.path.isdir(os.path.join(path,thisPath)):
281 282 dirList.append(thisPath)
282 283
283 284 if not(dirList):
284 285 return None, None
285 286
286 287 pathList = []
287 288 dateList = []
288 289
289 290 thisDate = startDate
290 291
291 292 while(thisDate <= endDate):
292 293 year = thisDate.timetuple().tm_year
293 294 doy = thisDate.timetuple().tm_yday
294 295
295 296 match = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy))
296 297 if len(match) == 0:
297 298 thisDate += datetime.timedelta(1)
298 299 continue
299 300
300 301 pathList.append(os.path.join(path,match[0],expLabel))
301 302 dateList.append(thisDate)
302 303 thisDate += datetime.timedelta(1)
303 304
304 305 filenameList = []
305 306 for index in range(len(pathList)):
306 307
307 308 thisPath = pathList[index]
308 309 fileList = glob.glob1(thisPath, "*%s" %ext)
309 310 fileList.sort()
310 311
311 312 #Busqueda de datos en el rango de horas indicados
312 313 thisDate = dateList[index]
313 314 startDT = datetime.datetime.combine(thisDate, startTime)
314 315 endDT = datetime.datetime.combine(thisDate, endTime)
315 316
316 317 startUtSeconds = time.mktime(startDT.timetuple())
317 318 endUtSeconds = time.mktime(endDT.timetuple())
318 319
319 320 for file in fileList:
320 321
321 322 filename = os.path.join(thisPath,file)
322 323
323 324 if isThisFileinRange(filename, startUtSeconds, endUtSeconds):
324 325 filenameList.append(filename)
325 326
326 327 if not(filenameList):
327 328 return None, None
328 329
329 330 self.filenameList = filenameList
330 331
331 332 return pathList, filenameList
332 333
333 334 def __searchFilesOnLine(self, path, startDate=None, endDate=None, startTime=None, endTime=None, expLabel = "", ext = None):
334 335
335 336 """
336 337 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
337 338 devuelve el archivo encontrado ademas de otros datos.
338 339
339 340 Input:
340 341 path : carpeta donde estan contenidos los files que contiene data
341 342
342 343 startDate : Fecha inicial. Rechaza todos los directorios donde
343 344 file end time < startDate (obejto datetime.date)
344 345
345 346 endDate : Fecha final. Rechaza todos los directorios donde
346 347 file start time > endDate (obejto datetime.date)
347 348
348 349 startTime : Tiempo inicial. Rechaza todos los archivos donde
349 350 file end time < startTime (obejto datetime.time)
350 351
351 352 endTime : Tiempo final. Rechaza todos los archivos donde
352 353 file start time > endTime (obejto datetime.time)
353 354
354 355 expLabel : Nombre del subexperimento (subfolder)
355 356
356 357 ext : extension de los files
357 358
358 359 Return:
359 360 directory : eL directorio donde esta el file encontrado
360 361 filename : el ultimo file de una determinada carpeta
361 362 year : el anho
362 363 doy : el numero de dia del anho
363 364 set : el set del archivo
364 365
365 366
366 367 """
367 368 dirList = []
368 369 pathList = []
369 370 directory = None
370 371
371 372 #Filtra solo los directorios
372 373 for thisPath in os.listdir(path):
373 374 if os.path.isdir(os.path.join(path, thisPath)):
374 375 dirList.append(thisPath)
375 376
376 377 if not(dirList):
377 378 return None, None, None, None, None
378 379
379 380 dirList = sorted( dirList, key=str.lower )
380 381
381 382 if startDate:
382 383 startDateTime = datetime.datetime.combine(startDate, startTime)
383 384 thisDateTime = startDateTime
384 385 if endDate == None: endDateTime = startDateTime
385 386 else: endDateTime = datetime.datetime.combine(endDate, endTime)
386 387
387 388 while(thisDateTime <= endDateTime):
388 389 year = thisDateTime.timetuple().tm_year
389 390 doy = thisDateTime.timetuple().tm_yday
390 391
391 392 match = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy))
392 393 if len(match) == 0:
393 394 thisDateTime += datetime.timedelta(1)
394 395 continue
395 396
396 397 pathList.append(os.path.join(path,match[0], expLabel))
397 398 thisDateTime += datetime.timedelta(1)
398 399
399 400 if not(pathList):
400 401 print "\tNo files in range: %s - %s" %(startDateTime.ctime(), endDateTime.ctime())
401 402 return None, None, None, None, None
402 403
403 404 directory = pathList[0]
404 405
405 406 else:
406 407 directory = dirList[-1]
407 408 directory = os.path.join(path,directory)
408 409
409 410 filename = getlastFileFromPath(directory, ext)
410 411
411 412 if not(filename):
412 413 return None, None, None, None, None
413 414
414 415 if not(self.__verifyFile(os.path.join(directory, filename))):
415 416 return None, None, None, None, None
416 417
417 418 year = int( filename[1:5] )
418 419 doy = int( filename[5:8] )
419 420 set = int( filename[8:11] )
420 421
421 422 return directory, filename, year, doy, set
422 423
423 def setup(self,
424 path=None,
425 startDate=None,
426 endDate=None,
427 startTime=datetime.time(0,0,0),
428 endTime=datetime.time(23,59,59),
429 set=0,
430 expLabel = "",
431 ext = None,
432 online = False,
433 delay = 60):
434 424
435 if path == None:
436 raise ValueError, "The path is not valid"
437
438 if ext == None:
439 ext = self.ext
440
441 if online:
442 print "Searching files in online mode..."
443 doypath, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext)
444
445 if not(doypath):
446 for nTries in range( self.nTries ):
447 print '\tWaiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
448 time.sleep( self.delay )
449 doypath, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=exp)
450 if doypath:
451 break
452
453 if not(doypath):
454 print "There 'isn't valied files in %s" % path
455 return None
456
457 self.year = year
458 self.doy = doy
459 self.set = set - 1
460 self.path = path
461
462 else:
463 print "Searching files in offline mode ..."
464 pathList, filenameList = self.__searchFilesOffLine(path, startDate, endDate, startTime, endTime, set, expLabel, ext)
465
466 if not(pathList):
467 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
468 datetime.datetime.combine(startDate,startTime).ctime(),
469 datetime.datetime.combine(endDate,endTime).ctime())
470
471 sys.exit(-1)
472
473
474 self.fileIndex = -1
475 self.pathList = pathList
476 self.filenameList = filenameList
477
478 self.online = online
479 self.delay = delay
480 ext = ext.lower()
481 self.ext = ext
482
483 if not(self.setNextFile()):
484 if (startDate!=None) and (endDate!=None):
485 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
486 elif startDate != None:
487 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
488 else:
489 print "No files"
490
491 sys.exit(-1)
492
493 # self.updateDataHeader()
494
495 return self.dataOut
496 425
497 426 def __setNextFileOffline(self):
498 427
499 428 idFile = self.fileIndex
500 429
501 430 while (True):
502 431 idFile += 1
503 432 if not(idFile < len(self.filenameList)):
504 433 self.flagNoMoreFiles = 1
505 434 print "No more Files"
506 435 return 0
507 436
508 437 filename = self.filenameList[idFile]
509 438
510 439 if not(self.__verifyFile(filename)):
511 440 continue
512 441
513 442 fileSize = os.path.getsize(filename)
514 443 fp = open(filename,'rb')
515 444 break
516 445
517 446 self.flagIsNewFile = 1
518 447 self.fileIndex = idFile
519 448 self.filename = filename
520 449 self.fileSize = fileSize
521 450 self.fp = fp
522 451
523 452 print "Setting the file: %s"%self.filename
524 453
525 454 return 1
526 455
527 456 def __setNextFileOnline(self):
528 457 """
529 458 Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si
530 459 no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files
531 460 siguientes.
532 461
533 462 Affected:
534 463 self.flagIsNewFile
535 464 self.filename
536 465 self.fileSize
537 466 self.fp
538 467 self.set
539 468 self.flagNoMoreFiles
540 469
541 470 Return:
542 471 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado
543 472 1 : si el file fue abierto con exito y esta listo a ser leido
544 473
545 474 Excepciones:
546 475 Si un determinado file no puede ser abierto
547 476 """
548 477 nFiles = 0
549 478 fileOk_flag = False
550 479 firstTime_flag = True
551 480
552 481 self.set += 1
553 482
554 483 #busca el 1er file disponible
555 484 file, filename = checkForRealPath( self.path, self.year, self.doy, self.set, self.ext )
556 485 if file:
557 486 if self.__verifyFile(file, False):
558 487 fileOk_flag = True
559 488
560 489 #si no encuentra un file entonces espera y vuelve a buscar
561 490 if not(fileOk_flag):
562 491 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
563 492
564 493 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
565 494 tries = self.nTries
566 495 else:
567 496 tries = 1 #si no es la 1era vez entonces solo lo hace una vez
568 497
569 498 for nTries in range( tries ):
570 499 if firstTime_flag:
571 500 print "\tWaiting %0.2f sec for the file \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 )
572 501 time.sleep( self.delay )
573 502 else:
574 503 print "\tSearching next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
575 504
576 505 file, filename = checkForRealPath( self.path, self.year, self.doy, self.set, self.ext )
577 506 if file:
578 507 if self.__verifyFile(file):
579 508 fileOk_flag = True
580 509 break
581 510
582 511 if fileOk_flag:
583 512 break
584 513
585 514 firstTime_flag = False
586 515
587 516 print "\tSkipping the file \"%s\" due to this file doesn't exist" % filename
588 517 self.set += 1
589 518
590 519 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
591 520 self.set = 0
592 521 self.doy += 1
593 522
594 523 if fileOk_flag:
595 524 self.fileSize = os.path.getsize( file )
596 525 self.filename = file
597 526 self.flagIsNewFile = 1
598 527 if self.fp != None: self.fp.close()
599 528 self.fp = open(file)
600 529 self.flagNoMoreFiles = 0
601 530 print 'Setting the file: %s' % file
602 531 else:
603 532 self.fileSize = 0
604 533 self.filename = None
605 534 self.flagIsNewFile = 0
606 535 self.fp = None
607 536 self.flagNoMoreFiles = 1
608 537 print 'No more Files'
609 538
610 539 return fileOk_flag
611 540
612 541
613 542 def setNextFile(self):
614 543 if self.fp != None:
615 544 self.fp.close()
616 545
617 546 if self.online:
618 547 newFile = self.__setNextFileOnline()
619 548 else:
620 549 newFile = self.__setNextFileOffline()
621 550
622 551 if not(newFile):
623 552 return 0
624 553
625 554 self.__readFirstHeader()
626 555 self.nReadBlocks = 0
627 556 return 1
628 557
629 558 def __setNewBlock(self):
630 559 if self.fp == None:
631 560 return 0
632 561
633 562 if self.flagIsNewFile:
634 563 return 1
635 564
636 565 self.lastUTTime = self.basicHeaderObj.utc
637 566 currentSize = self.fileSize - self.fp.tell()
638 567 neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize
639 568
640 569 if (currentSize >= neededSize):
641 570 self.__rdBasicHeader()
642 571 return 1
643 572
644 573 if not(self.setNextFile()):
645 574 return 0
646 575
647 576 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
648 577
649 578 self.flagTimeBlock = 0
650 579
651 580 if deltaTime > self.maxTimeStep:
652 581 self.flagTimeBlock = 1
653 582
654 583 return 1
655 584
656 585
657 586 def readNextBlock(self):
658 587 if not(self.__setNewBlock()):
659 588 return 0
660 589
661 590 if not(self.readBlock()):
662 591 return 0
663 592
664 593 return 1
665 594
666 595 def __rdProcessingHeader(self, fp=None):
667 596 if fp == None:
668 597 fp = self.fp
669 598
670 599 self.processingHeaderObj.read(fp)
671 600
672 601 def __rdRadarControllerHeader(self, fp=None):
673 602 if fp == None:
674 603 fp = self.fp
675 604
676 605 self.radarControllerHeaderObj.read(fp)
677 606
678 607 def __rdSystemHeader(self, fp=None):
679 608 if fp == None:
680 609 fp = self.fp
681 610
682 611 self.systemHeaderObj.read(fp)
683 612
684 613 def __rdBasicHeader(self, fp=None):
685 614 if fp == None:
686 615 fp = self.fp
687 616
688 617 self.basicHeaderObj.read(fp)
689 618
690 619
691 620 def __readFirstHeader(self):
692 621 self.__rdBasicHeader()
693 622 self.__rdSystemHeader()
694 623 self.__rdRadarControllerHeader()
695 624 self.__rdProcessingHeader()
696 625
697 626 self.firstHeaderSize = self.basicHeaderObj.size
698 627
699 628 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
700 629 if datatype == 0:
701 630 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
702 631 elif datatype == 1:
703 632 datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')])
704 633 elif datatype == 2:
705 634 datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')])
706 635 elif datatype == 3:
707 636 datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')])
708 637 elif datatype == 4:
709 638 datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')])
710 639 elif datatype == 5:
711 640 datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')])
712 641 else:
713 642 raise ValueError, 'Data type was not defined'
714 643
715 644 self.dtype = datatype_str
716 645 self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
717 646 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
718 647 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
719 648 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
720 649 self.getBlockDimension()
721 650
722 651
723 652 def __verifyFile(self, filename, msgFlag=True):
724 653 msg = None
725 654 try:
726 655 fp = open(filename, 'rb')
727 656 currentPosition = fp.tell()
728 657 except:
729 658 if msgFlag:
730 659 print "The file %s can't be opened" % (filename)
731 660 return False
732 661
733 662 neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize
734 663
735 664 if neededSize == 0:
736 665 basicHeaderObj = BasicHeader()
737 666 systemHeaderObj = SystemHeader()
738 667 radarControllerHeaderObj = RadarControllerHeader()
739 668 processingHeaderObj = ProcessingHeader()
740 669
741 670 try:
742 671 if not( basicHeaderObj.read(fp) ): raise ValueError
743 672 if not( systemHeaderObj.read(fp) ): raise ValueError
744 673 if not( radarControllerHeaderObj.read(fp) ): raise ValueError
745 674 if not( processingHeaderObj.read(fp) ): raise ValueError
746 675 data_type = int(numpy.log2((processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
747 676
748 677 neededSize = processingHeaderObj.blockSize + basicHeaderObj.size
749 678
750 679 except:
751 680 if msgFlag:
752 681 print "\tThe file %s is empty or it hasn't enough data" % filename
753 682
754 683 fp.close()
755 684 return False
756 685 else:
757 686 msg = "\tSkipping the file %s due to it hasn't enough data" %filename
758 687
759 688 fp.close()
760 689 fileSize = os.path.getsize(filename)
761 690 currentSize = fileSize - currentPosition
762 691 if currentSize < neededSize:
763 692 if msgFlag and (msg != None):
764 693 print msg #print"\tSkipping the file %s due to it hasn't enough data" %filename
765 694 return False
766 695
767 696 return True
768 697
698 def setup(self,
699 path=None,
700 startDate=None,
701 endDate=None,
702 startTime=datetime.time(0,0,0),
703 endTime=datetime.time(23,59,59),
704 set=0,
705 expLabel = "",
706 ext = None,
707 online = False,
708 delay = 60):
709
710 if path == None:
711 raise ValueError, "The path is not valid"
712
713 if ext == None:
714 ext = self.ext
715
716 if online:
717 print "Searching files in online mode..."
718 doypath, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext)
719
720 if not(doypath):
721 for nTries in range( self.nTries ):
722 print '\tWaiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1)
723 time.sleep( self.delay )
724 doypath, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=exp)
725 if doypath:
726 break
727
728 if not(doypath):
729 print "There 'isn't valied files in %s" % path
730 return None
731
732 self.year = year
733 self.doy = doy
734 self.set = set - 1
735 self.path = path
736
737 else:
738 print "Searching files in offline mode ..."
739 pathList, filenameList = self.__searchFilesOffLine(path, startDate, endDate, startTime, endTime, set, expLabel, ext)
740
741 if not(pathList):
742 print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path,
743 datetime.datetime.combine(startDate,startTime).ctime(),
744 datetime.datetime.combine(endDate,endTime).ctime())
745
746 sys.exit(-1)
747
748
749 self.fileIndex = -1
750 self.pathList = pathList
751 self.filenameList = filenameList
752
753 self.online = online
754 self.delay = delay
755 ext = ext.lower()
756 self.ext = ext
757
758 if not(self.setNextFile()):
759 if (startDate!=None) and (endDate!=None):
760 print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime())
761 elif startDate != None:
762 print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime())
763 else:
764 print "No files"
765
766 sys.exit(-1)
767
768 # self.updateDataHeader()
769
770 return self.dataOut
771
769 772 def getData():
770 773 pass
771 774
772 775 def hasNotDataInBuffer():
773 776 pass
774 777
775 778 def readBlock():
776 779 pass
777 780
778 781 def run(self, **kwargs):
779 782
780 783 if not(self.isConfig):
781 784
782 785 # self.dataOut = dataOut
783 786 self.setup(**kwargs)
784 787 self.isConfig = True
785 788
786 789 self.getData()
787 790
788 class JRODataWriter(JRODataIO):
791 class JRODataWriter(JRODataIO, Operation):
789 792
790 793 """
791 794 Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura
792 795 de los datos siempre se realiza por bloques.
793 796 """
794 797
795 798 blockIndex = 0
796 799
797 800 path = None
798 801
799 802 setFile = None
800 803
801 804 profilesPerBlock = None
802 805
803 806 blocksPerFile = None
804 807
805 808 nWriteBlocks = 0
806 809
807 810 def __init__(self, dataOut=None):
808 811 raise ValueError, "Not implemented"
809 812
810 813
811 814 def hasAllDataInBuffer(self):
812 815 raise ValueError, "Not implemented"
813 816
814 817
815 818 def setBlockDimension(self):
816 819 raise ValueError, "Not implemented"
817 820
818 821
819 822 def writeBlock(self):
820 823 raise ValueError, "No implemented"
821 824
822 825
823 826 def putData(self):
824 827 raise ValueError, "No implemented"
825 828
826 829 def getDataHeader(self):
827 830 """
828 831 Obtiene una copia del First Header
829 832
830 833 Affected:
831 834
832 835 self.basicHeaderObj
833 836 self.systemHeaderObj
834 837 self.radarControllerHeaderObj
835 838 self.processingHeaderObj self.
836 839
837 840 Return:
838 841 None
839 842 """
840 843
841 844 raise ValueError, "No implemented"
842 845
843 846 def getBasicHeader(self):
844 847
845 848 self.basicHeaderObj.size = self.basicHeaderSize #bytes
846 849 self.basicHeaderObj.version = self.versionFile
847 850 self.basicHeaderObj.dataBlock = self.nTotalBlocks
848 851
849 852 utc = numpy.floor(self.dataOut.utctime)
850 853 milisecond = (self.dataOut.utctime - utc)* 1000.0
851 854
852 855 self.basicHeaderObj.utc = utc
853 856 self.basicHeaderObj.miliSecond = milisecond
854 857 self.basicHeaderObj.timeZone = 0
855 858 self.basicHeaderObj.dstFlag = 0
856 859 self.basicHeaderObj.errorCount = 0
857 860
858 861 def __writeFirstHeader(self):
859 862 """
860 863 Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader)
861 864
862 865 Affected:
863 866 __dataType
864 867
865 868 Return:
866 869 None
867 870 """
868 871
869 872 # CALCULAR PARAMETROS
870 873
871 874 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
872 875 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
873 876
874 877 self.basicHeaderObj.write(self.fp)
875 878 self.systemHeaderObj.write(self.fp)
876 879 self.radarControllerHeaderObj.write(self.fp)
877 880 self.processingHeaderObj.write(self.fp)
878 881
879 882 self.dtype = self.dataOut.dtype
880 883
881 884 def __setNewBlock(self):
882 885 """
883 886 Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header
884 887
885 888 Return:
886 889 0 : si no pudo escribir nada
887 890 1 : Si escribio el Basic el First Header
888 891 """
889 892 if self.fp == None:
890 893 self.setNextFile()
891 894
892 895 if self.flagIsNewFile:
893 896 return 1
894 897
895 898 if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile:
896 899 self.basicHeaderObj.write(self.fp)
897 900 return 1
898 901
899 902 if not( self.setNextFile() ):
900 903 return 0
901 904
902 905 return 1
903 906
904 907
905 908 def writeNextBlock(self):
906 909 """
907 910 Selecciona el bloque siguiente de datos y los escribe en un file
908 911
909 912 Return:
910 913 0 : Si no hizo pudo escribir el bloque de datos
911 914 1 : Si no pudo escribir el bloque de datos
912 915 """
913 916 if not( self.__setNewBlock() ):
914 917 return 0
915 918
916 919 self.writeBlock()
917 920
918 921 return 1
919 922
920 923 def setNextFile(self):
921 924 """
922 925 Determina el siguiente file que sera escrito
923 926
924 927 Affected:
925 928 self.filename
926 929 self.subfolder
927 930 self.fp
928 931 self.setFile
929 932 self.flagIsNewFile
930 933
931 934 Return:
932 935 0 : Si el archivo no puede ser escrito
933 936 1 : Si el archivo esta listo para ser escrito
934 937 """
935 938 ext = self.ext
936 939 path = self.path
937 940
938 941 if self.fp != None:
939 942 self.fp.close()
940 943
941 944 timeTuple = time.localtime( self.dataOut.dataUtcTime)
942 945 subfolder = 'D%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday)
943 946
944 947 doypath = os.path.join( path, subfolder )
945 948 if not( os.path.exists(doypath) ):
946 949 os.mkdir(doypath)
947 950 self.setFile = -1 #inicializo mi contador de seteo
948 951 else:
949 952 filesList = os.listdir( doypath )
950 953 if len( filesList ) > 0:
951 954 filesList = sorted( filesList, key=str.lower )
952 955 filen = filesList[-1]
953 956 # el filename debera tener el siguiente formato
954 957 # 0 1234 567 89A BCDE (hex)
955 958 # x YYYY DDD SSS .ext
956 959 if isNumber( filen[8:11] ):
957 960 self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
958 961 else:
959 962 self.setFile = -1
960 963 else:
961 964 self.setFile = -1 #inicializo mi contador de seteo
962 965
963 966 setFile = self.setFile
964 967 setFile += 1
965 968
966 969 file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar,
967 970 timeTuple.tm_year,
968 971 timeTuple.tm_yday,
969 972 setFile,
970 973 ext )
971 974
972 975 filename = os.path.join( path, subfolder, file )
973 976
974 977 fp = open( filename,'wb' )
975 978
976 979 self.blockIndex = 0
977 980
978 981 #guardando atributos
979 982 self.filename = filename
980 983 self.subfolder = subfolder
981 984 self.fp = fp
982 985 self.setFile = setFile
983 986 self.flagIsNewFile = 1
984 987
985 988 self.getDataHeader()
986 989
987 990 print 'Writing the file: %s'%self.filename
988 991
989 992 self.__writeFirstHeader()
990 993
991 994 return 1
992 995
993 996 def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=None, set=0, ext=None):
994 997 """
995 998 Setea el tipo de formato en la cual sera guardada la data y escribe el First Header
996 999
997 1000 Inputs:
998 1001 path : el path destino en el cual se escribiran los files a crear
999 1002 format : formato en el cual sera salvado un file
1000 1003 set : el setebo del file
1001 1004
1002 1005 Return:
1003 1006 0 : Si no realizo un buen seteo
1004 1007 1 : Si realizo un buen seteo
1005 1008 """
1006 1009
1007 1010 if ext == None:
1008 1011 ext = self.ext
1009 1012
1010 1013 ext = ext.lower()
1011 1014
1012 1015 self.ext = ext
1013 1016
1014 1017 self.path = path
1015 1018
1016 1019 self.setFile = set - 1
1017 1020
1018 1021 self.blocksPerFile = blocksPerFile
1019 1022
1020 1023 self.profilesPerBlock = profilesPerBlock
1021 1024
1022 1025 self.dataOut = dataOut
1023 1026
1024 1027 if not(self.setNextFile()):
1025 1028 print "There isn't a next file"
1026 1029 return 0
1027 1030
1028 1031 self.setBlockDimension()
1029 1032
1030 1033 return 1
1031 1034
1032 1035 def run(self, dataOut, **kwargs):
1033 1036
1034 1037 if not(self.isConfig):
1035 1038
1036 1039 self.setup(dataOut, **kwargs)
1037 1040 self.isConfig = True
1038 1041
1039 1042 self.putData()
1040 1043
1041 1044 class VoltageReader(JRODataReader):
1042 1045 """
1043 1046 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
1044 1047 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones:
1045 1048 perfiles*alturas*canales) son almacenados en la variable "buffer".
1046 1049
1047 1050 perfiles * alturas * canales
1048 1051
1049 1052 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1050 1053 RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la
1051 1054 cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de
1052 1055 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1053 1056
1054 1057 Example:
1055 1058
1056 1059 dpath = "/home/myuser/data"
1057 1060
1058 1061 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1059 1062
1060 1063 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1061 1064
1062 1065 readerObj = VoltageReader()
1063 1066
1064 1067 readerObj.setup(dpath, startTime, endTime)
1065 1068
1066 1069 while(True):
1067 1070
1068 1071 #to get one profile
1069 1072 profile = readerObj.getData()
1070 1073
1071 1074 #print the profile
1072 1075 print profile
1073 1076
1074 1077 #If you want to see all datablock
1075 1078 print readerObj.datablock
1076 1079
1077 1080 if readerObj.flagNoMoreFiles:
1078 1081 break
1079 1082
1080 1083 """
1081 1084
1082 1085 ext = ".r"
1083 1086
1084 1087 optchar = "D"
1085 1088 dataOut = None
1086 1089
1087 1090
1088 1091 def __init__(self):
1089 1092 """
1090 1093 Inicializador de la clase VoltageReader para la lectura de datos de voltage.
1091 1094
1092 1095 Input:
1093 1096 dataOut : Objeto de la clase Voltage. Este objeto sera utilizado para
1094 1097 almacenar un perfil de datos cada vez que se haga un requerimiento
1095 1098 (getData). El perfil sera obtenido a partir del buffer de datos,
1096 1099 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1097 1100 bloque de datos.
1098 1101 Si este parametro no es pasado se creara uno internamente.
1099 1102
1100 1103 Variables afectadas:
1101 1104 self.dataOut
1102 1105
1103 1106 Return:
1104 1107 None
1105 1108 """
1106 1109
1107 1110 self.isConfig = False
1108 1111
1109 1112 self.datablock = None
1110 1113
1111 1114 self.utc = 0
1112 1115
1113 1116 self.ext = ".r"
1114 1117
1115 1118 self.optchar = "D"
1116 1119
1117 1120 self.basicHeaderObj = BasicHeader()
1118 1121
1119 1122 self.systemHeaderObj = SystemHeader()
1120 1123
1121 1124 self.radarControllerHeaderObj = RadarControllerHeader()
1122 1125
1123 1126 self.processingHeaderObj = ProcessingHeader()
1124 1127
1125 1128 self.online = 0
1126 1129
1127 1130 self.fp = None
1128 1131
1129 1132 self.idFile = None
1130 1133
1131 1134 self.dtype = None
1132 1135
1133 1136 self.fileSizeByHeader = None
1134 1137
1135 1138 self.filenameList = []
1136 1139
1137 1140 self.filename = None
1138 1141
1139 1142 self.fileSize = None
1140 1143
1141 1144 self.firstHeaderSize = 0
1142 1145
1143 1146 self.basicHeaderSize = 24
1144 1147
1145 1148 self.pathList = []
1146 1149
1147 1150 self.filenameList = []
1148 1151
1149 1152 self.lastUTTime = 0
1150 1153
1151 1154 self.maxTimeStep = 30
1152 1155
1153 1156 self.flagNoMoreFiles = 0
1154 1157
1155 1158 self.set = 0
1156 1159
1157 1160 self.path = None
1158 1161
1159 1162 self.profileIndex = 9999
1160 1163
1161 1164 self.delay = 3 #seconds
1162 1165
1163 1166 self.nTries = 3 #quantity tries
1164 1167
1165 1168 self.nFiles = 3 #number of files for searching
1166 1169
1167 1170 self.nReadBlocks = 0
1168 1171
1169 1172 self.flagIsNewFile = 1
1170 1173
1171 1174 self.ippSeconds = 0
1172 1175
1173 1176 self.flagTimeBlock = 0
1174 1177
1175 1178 self.flagIsNewBlock = 0
1176 1179
1177 1180 self.nTotalBlocks = 0
1178 1181
1179 1182 self.blocksize = 0
1180 1183
1181 1184 self.dataOut = self.createObjByDefault()
1182 1185
1183 1186 def createObjByDefault(self):
1184 1187
1185 1188 dataObj = Voltage()
1186 1189
1187 1190 return dataObj
1188 1191
1189 1192 def __hasNotDataInBuffer(self):
1190 1193 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1191 1194 return 1
1192 1195 return 0
1193 1196
1194 1197
1195 1198 def getBlockDimension(self):
1196 1199 """
1197 1200 Obtiene la cantidad de puntos a leer por cada bloque de datos
1198 1201
1199 1202 Affected:
1200 1203 self.blocksize
1201 1204
1202 1205 Return:
1203 1206 None
1204 1207 """
1205 1208 pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
1206 1209 self.blocksize = pts2read
1207 1210
1208 1211
1209 1212 def readBlock(self):
1210 1213 """
1211 1214 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
1212 1215 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1213 1216 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1214 1217 es seteado a 0
1215 1218
1216 1219 Inputs:
1217 1220 None
1218 1221
1219 1222 Return:
1220 1223 None
1221 1224
1222 1225 Affected:
1223 1226 self.profileIndex
1224 1227 self.datablock
1225 1228 self.flagIsNewFile
1226 1229 self.flagIsNewBlock
1227 1230 self.nTotalBlocks
1228 1231
1229 1232 Exceptions:
1230 1233 Si un bloque leido no es un bloque valido
1231 1234 """
1232 1235
1233 1236 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
1234 1237
1235 1238 try:
1236 1239 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
1237 1240 except:
1238 1241 print "The read block (%3d) has not enough data" %self.nReadBlocks
1239 1242 return 0
1240 1243
1241 1244 junk = numpy.transpose(junk, (2,0,1))
1242 1245 self.datablock = junk['real'] + junk['imag']*1j
1243 1246
1244 1247 self.profileIndex = 0
1245 1248
1246 1249 self.flagIsNewFile = 0
1247 1250 self.flagIsNewBlock = 1
1248 1251
1249 1252 self.nTotalBlocks += 1
1250 1253 self.nReadBlocks += 1
1251 1254
1252 1255 return 1
1253 1256
1254 1257
1255 1258 def getData(self):
1256 1259 """
1257 1260 getData obtiene una unidad de datos del buffer de lectura y la copia a la clase "Voltage"
1258 1261 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
1259 1262 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
1260 1263
1261 1264 Ademas incrementa el contador del buffer en 1.
1262 1265
1263 1266 Return:
1264 1267 data : retorna un perfil de voltages (alturas * canales) copiados desde el
1265 1268 buffer. Si no hay mas archivos a leer retorna None.
1266 1269
1267 1270 Variables afectadas:
1268 1271 self.dataOut
1269 1272 self.profileIndex
1270 1273
1271 1274 Affected:
1272 1275 self.dataOut
1273 1276 self.profileIndex
1274 1277 self.flagTimeBlock
1275 1278 self.flagIsNewBlock
1276 1279 """
1277 1280 if self.flagNoMoreFiles: return 0
1278 1281
1279 1282 self.flagTimeBlock = 0
1280 1283 self.flagIsNewBlock = 0
1281 1284
1282 1285 if self.__hasNotDataInBuffer():
1283 1286
1284 1287 if not( self.readNextBlock() ):
1285 1288 return 0
1286 1289
1287 1290 # self.updateDataHeader()
1288 1291
1289 1292 if self.flagNoMoreFiles == 1:
1290 1293 print 'Process finished'
1291 1294 return 0
1292 1295
1293 1296 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
1294 1297
1295 1298 if self.datablock == None:
1296 1299 self.dataOut.flagNoData = True
1297 1300 return 0
1298 1301
1299 1302 self.dataOut.data = self.datablock[:,self.profileIndex,:]
1300 1303
1301 1304 self.dataOut.dtype = self.dtype
1302 1305
1303 1306 self.dataOut.nChannels = self.systemHeaderObj.nChannels
1304 1307
1305 1308 self.dataOut.nHeights = self.processingHeaderObj.nHeights
1306 1309
1307 1310 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
1308 1311
1309 1312 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
1310 1313
1311 1314 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
1312 1315
1313 1316 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
1314 1317
1315 1318 self.dataOut.channelIndexList = range(self.systemHeaderObj.nChannels)
1316 1319
1317 1320 self.dataOut.flagTimeBlock = self.flagTimeBlock
1318 1321
1319 1322 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.ippSeconds
1320 1323
1321 1324 self.dataOut.ippSeconds = self.ippSeconds
1322 1325
1323 1326 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt
1324 1327
1325 1328 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
1326 1329
1327 1330 self.dataOut.flagShiftFFT = False
1328 1331
1329 1332 if self.processingHeaderObj.code != None:
1330 1333 self.dataOut.nCode = self.processingHeaderObj.nCode
1331 1334
1332 1335 self.dataOut.nBaud = self.processingHeaderObj.nBaud
1333 1336
1334 1337 self.dataOut.code = self.processingHeaderObj.code
1335 1338
1336 1339 self.profileIndex += 1
1337 1340
1338 1341 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
1339 1342
1340 1343 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
1341 1344
1342 1345 self.dataOut.flagNoData = False
1343 1346
1344 1347 # print self.profileIndex, self.dataOut.utctime
1345 1348 # if self.profileIndex == 800:
1346 1349 # a=1
1347 1350
1348 1351 return self.dataOut.data
1349 1352
1350 1353
1351 1354 class VoltageWriter(JRODataWriter):
1352 1355 """
1353 1356 Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura
1354 1357 de los datos siempre se realiza por bloques.
1355 1358 """
1356 1359
1357 1360 ext = ".r"
1358 1361
1359 1362 optchar = "D"
1360 1363
1361 1364 shapeBuffer = None
1362 1365
1363 1366
1364 1367 def __init__(self):
1365 1368 """
1366 1369 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
1367 1370
1368 1371 Affected:
1369 1372 self.dataOut
1370 1373
1371 1374 Return: None
1372 1375 """
1373 1376
1374 1377 self.nTotalBlocks = 0
1375 1378
1376 1379 self.profileIndex = 0
1377 1380
1378 1381 self.isConfig = False
1379 1382
1380 1383 self.fp = None
1381 1384
1382 1385 self.flagIsNewFile = 1
1383 1386
1384 1387 self.nTotalBlocks = 0
1385 1388
1386 1389 self.flagIsNewBlock = 0
1387 1390
1388 1391 self.flagNoMoreFiles = 0
1389 1392
1390 1393 self.setFile = None
1391 1394
1392 1395 self.dtype = None
1393 1396
1394 1397 self.path = None
1395 1398
1396 1399 self.noMoreFiles = 0
1397 1400
1398 1401 self.filename = None
1399 1402
1400 1403 self.basicHeaderObj = BasicHeader()
1401 1404
1402 1405 self.systemHeaderObj = SystemHeader()
1403 1406
1404 1407 self.radarControllerHeaderObj = RadarControllerHeader()
1405 1408
1406 1409 self.processingHeaderObj = ProcessingHeader()
1407 1410
1408 1411 def hasAllDataInBuffer(self):
1409 1412 if self.profileIndex >= self.processingHeaderObj.profilesPerBlock:
1410 1413 return 1
1411 1414 return 0
1412 1415
1413 1416
1414 1417 def setBlockDimension(self):
1415 1418 """
1416 1419 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
1417 1420
1418 1421 Affected:
1419 1422 self.shape_spc_Buffer
1420 1423 self.shape_cspc_Buffer
1421 1424 self.shape_dc_Buffer
1422 1425
1423 1426 Return: None
1424 1427 """
1425 1428 self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock,
1426 1429 self.processingHeaderObj.nHeights,
1427 1430 self.systemHeaderObj.nChannels)
1428 1431
1429 1432 self.datablock = numpy.zeros((self.systemHeaderObj.nChannels,
1430 1433 self.processingHeaderObj.profilesPerBlock,
1431 1434 self.processingHeaderObj.nHeights),
1432 1435 dtype=numpy.dtype('complex'))
1433 1436
1434 1437
1435 1438 def writeBlock(self):
1436 1439 """
1437 1440 Escribe el buffer en el file designado
1438 1441
1439 1442 Affected:
1440 1443 self.profileIndex
1441 1444 self.flagIsNewFile
1442 1445 self.flagIsNewBlock
1443 1446 self.nTotalBlocks
1444 1447 self.blockIndex
1445 1448
1446 1449 Return: None
1447 1450 """
1448 1451 data = numpy.zeros( self.shapeBuffer, self.dtype )
1449 1452
1450 1453 junk = numpy.transpose(self.datablock, (1,2,0))
1451 1454
1452 1455 data['real'] = junk.real
1453 1456 data['imag'] = junk.imag
1454 1457
1455 1458 data = data.reshape( (-1) )
1456 1459
1457 1460 data.tofile( self.fp )
1458 1461
1459 1462 self.datablock.fill(0)
1460 1463
1461 1464 self.profileIndex = 0
1462 1465 self.flagIsNewFile = 0
1463 1466 self.flagIsNewBlock = 1
1464 1467
1465 1468 self.blockIndex += 1
1466 1469 self.nTotalBlocks += 1
1467 1470
1468 1471 def putData(self):
1469 1472 """
1470 1473 Setea un bloque de datos y luego los escribe en un file
1471 1474
1472 1475 Affected:
1473 1476 self.flagIsNewBlock
1474 1477 self.profileIndex
1475 1478
1476 1479 Return:
1477 1480 0 : Si no hay data o no hay mas files que puedan escribirse
1478 1481 1 : Si se escribio la data de un bloque en un file
1479 1482 """
1480 1483 if self.dataOut.flagNoData:
1481 1484 return 0
1482 1485
1483 1486 self.flagIsNewBlock = 0
1484 1487
1485 1488 if self.dataOut.flagTimeBlock:
1486 1489
1487 1490 self.datablock.fill(0)
1488 1491 self.profileIndex = 0
1489 1492 self.setNextFile()
1490 1493
1491 1494 if self.profileIndex == 0:
1492 1495 self.getBasicHeader()
1493 1496
1494 1497 self.datablock[:,self.profileIndex,:] = self.dataOut.data
1495 1498
1496 1499 self.profileIndex += 1
1497 1500
1498 1501 if self.hasAllDataInBuffer():
1499 1502 #if self.flagIsNewFile:
1500 1503 self.writeNextBlock()
1501 1504 # self.getDataHeader()
1502 1505
1503 1506 if self.flagNoMoreFiles:
1504 1507 #print 'Process finished'
1505 1508 return 0
1506 1509
1507 1510 return 1
1508 1511
1509 1512 def __getProcessFlags(self):
1510 1513
1511 1514 processFlags = 0
1512 1515
1513 1516 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1514 1517 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1515 1518 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1516 1519 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1517 1520 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1518 1521 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1519 1522
1520 1523 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1521 1524
1522 1525
1523 1526
1524 1527 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
1525 1528 PROCFLAG.DATATYPE_SHORT,
1526 1529 PROCFLAG.DATATYPE_LONG,
1527 1530 PROCFLAG.DATATYPE_INT64,
1528 1531 PROCFLAG.DATATYPE_FLOAT,
1529 1532 PROCFLAG.DATATYPE_DOUBLE]
1530 1533
1531 1534
1532 1535 for index in range(len(dtypeList)):
1533 1536 if self.dataOut.dtype == dtypeList[index]:
1534 1537 dtypeValue = datatypeValueList[index]
1535 1538 break
1536 1539
1537 1540 processFlags += dtypeValue
1538 1541
1539 1542 if self.dataOut.flagDecodeData:
1540 1543 processFlags += PROCFLAG.DECODE_DATA
1541 1544
1542 1545 if self.dataOut.flagDeflipData:
1543 1546 processFlags += PROCFLAG.DEFLIP_DATA
1544 1547
1545 1548 if self.dataOut.code != None:
1546 1549 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
1547 1550
1548 1551 if self.dataOut.nCohInt > 1:
1549 1552 processFlags += PROCFLAG.COHERENT_INTEGRATION
1550 1553
1551 1554 return processFlags
1552 1555
1553 1556
1554 1557 def __getBlockSize(self):
1555 1558 '''
1556 1559 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage
1557 1560 '''
1558 1561
1559 1562 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
1560 1563 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
1561 1564 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
1562 1565 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
1563 1566 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
1564 1567 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
1565 1568
1566 1569 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
1567 1570 datatypeValueList = [1,2,4,8,4,8]
1568 1571 for index in range(len(dtypeList)):
1569 1572 if self.dataOut.dtype == dtypeList[index]:
1570 1573 datatypeValue = datatypeValueList[index]
1571 1574 break
1572 1575
1573 1576 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels * self.dataOut.nProfiles * datatypeValue * 2)
1574 1577
1575 1578 return blocksize
1576 1579
1577 1580 def getDataHeader(self):
1578 1581
1579 1582 """
1580 1583 Obtiene una copia del First Header
1581 1584
1582 1585 Affected:
1583 1586 self.systemHeaderObj
1584 1587 self.radarControllerHeaderObj
1585 1588 self.dtype
1586 1589
1587 1590 Return:
1588 1591 None
1589 1592 """
1590 1593
1591 1594 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
1592 1595 self.systemHeaderObj.nChannels = self.dataOut.nChannels
1593 1596 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
1594 1597
1595 1598 self.getBasicHeader()
1596 1599
1597 1600 processingHeaderSize = 40 # bytes
1598 1601 self.processingHeaderObj.dtype = 0 # Voltage
1599 1602 self.processingHeaderObj.blockSize = self.__getBlockSize()
1600 1603 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
1601 1604 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
1602 1605 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
1603 1606 self.processingHeaderObj.processFlags = self.__getProcessFlags()
1604 1607 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
1605 1608 self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage
1606 1609 self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage
1607 1610
1608 1611 if self.dataOut.code != None:
1609 1612 self.processingHeaderObj.code = self.dataOut.code
1610 1613 self.processingHeaderObj.nCode = self.dataOut.nCode
1611 1614 self.processingHeaderObj.nBaud = self.dataOut.nBaud
1612 1615 codesize = int(8 + 4 * self.dataOut.nCode * self.dataOut.nBaud)
1613 1616 processingHeaderSize += codesize
1614 1617
1615 1618 if self.processingHeaderObj.nWindows != 0:
1616 1619 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
1617 1620 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
1618 1621 self.processingHeaderObj.nHeights = self.dataOut.nHeights
1619 1622 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
1620 1623 processingHeaderSize += 12
1621 1624
1622 1625 self.processingHeaderObj.size = processingHeaderSize
1623 1626
1624 1627 class SpectraReader(JRODataReader):
1625 1628 """
1626 1629 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
1627 1630 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
1628 1631 son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel.
1629 1632
1630 1633 paresCanalesIguales * alturas * perfiles (Self Spectra)
1631 1634 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
1632 1635 canales * alturas (DC Channels)
1633 1636
1634 1637 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
1635 1638 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
1636 1639 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
1637 1640 datos desde el "buffer" cada vez que se ejecute el metodo "getData".
1638 1641
1639 1642 Example:
1640 1643 dpath = "/home/myuser/data"
1641 1644
1642 1645 startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0)
1643 1646
1644 1647 endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0)
1645 1648
1646 1649 readerObj = SpectraReader()
1647 1650
1648 1651 readerObj.setup(dpath, startTime, endTime)
1649 1652
1650 1653 while(True):
1651 1654
1652 1655 readerObj.getData()
1653 1656
1654 1657 print readerObj.data_spc
1655 1658
1656 1659 print readerObj.data_cspc
1657 1660
1658 1661 print readerObj.data_dc
1659 1662
1660 1663 if readerObj.flagNoMoreFiles:
1661 1664 break
1662 1665
1663 1666 """
1664 1667
1665 1668 pts2read_SelfSpectra = 0
1666 1669
1667 1670 pts2read_CrossSpectra = 0
1668 1671
1669 1672 pts2read_DCchannels = 0
1670 1673
1671 1674 ext = ".pdata"
1672 1675
1673 1676 optchar = "P"
1674 1677
1675 1678 dataOut = None
1676 1679
1677 1680 nRdChannels = None
1678 1681
1679 1682 nRdPairs = None
1680 1683
1681 1684 rdPairList = []
1682 1685
1683 1686
1684 1687 def __init__(self):
1685 1688 """
1686 1689 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
1687 1690
1688 1691 Inputs:
1689 1692 dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para
1690 1693 almacenar un perfil de datos cada vez que se haga un requerimiento
1691 1694 (getData). El perfil sera obtenido a partir del buffer de datos,
1692 1695 si el buffer esta vacio se hara un nuevo proceso de lectura de un
1693 1696 bloque de datos.
1694 1697 Si este parametro no es pasado se creara uno internamente.
1695 1698
1696 1699 Affected:
1697 1700 self.dataOut
1698 1701
1699 1702 Return : None
1700 1703 """
1701 1704
1702 1705 self.isConfig = False
1703 1706
1704 1707 self.pts2read_SelfSpectra = 0
1705 1708
1706 1709 self.pts2read_CrossSpectra = 0
1707 1710
1708 1711 self.pts2read_DCchannels = 0
1709 1712
1710 1713 self.datablock = None
1711 1714
1712 1715 self.utc = None
1713 1716
1714 1717 self.ext = ".pdata"
1715 1718
1716 1719 self.optchar = "P"
1717 1720
1718 1721 self.basicHeaderObj = BasicHeader()
1719 1722
1720 1723 self.systemHeaderObj = SystemHeader()
1721 1724
1722 1725 self.radarControllerHeaderObj = RadarControllerHeader()
1723 1726
1724 1727 self.processingHeaderObj = ProcessingHeader()
1725 1728
1726 1729 self.online = 0
1727 1730
1728 1731 self.fp = None
1729 1732
1730 1733 self.idFile = None
1731 1734
1732 1735 self.dtype = None
1733 1736
1734 1737 self.fileSizeByHeader = None
1735 1738
1736 1739 self.filenameList = []
1737 1740
1738 1741 self.filename = None
1739 1742
1740 1743 self.fileSize = None
1741 1744
1742 1745 self.firstHeaderSize = 0
1743 1746
1744 1747 self.basicHeaderSize = 24
1745 1748
1746 1749 self.pathList = []
1747 1750
1748 1751 self.lastUTTime = 0
1749 1752
1750 1753 self.maxTimeStep = 30
1751 1754
1752 1755 self.flagNoMoreFiles = 0
1753 1756
1754 1757 self.set = 0
1755 1758
1756 1759 self.path = None
1757 1760
1758 1761 self.delay = 3 #seconds
1759 1762
1760 1763 self.nTries = 3 #quantity tries
1761 1764
1762 1765 self.nFiles = 3 #number of files for searching
1763 1766
1764 1767 self.nReadBlocks = 0
1765 1768
1766 1769 self.flagIsNewFile = 1
1767 1770
1768 1771 self.ippSeconds = 0
1769 1772
1770 1773 self.flagTimeBlock = 0
1771 1774
1772 1775 self.flagIsNewBlock = 0
1773 1776
1774 1777 self.nTotalBlocks = 0
1775 1778
1776 1779 self.blocksize = 0
1777 1780
1778 1781 self.dataOut = self.createObjByDefault()
1779 1782
1780 1783
1781 1784 def createObjByDefault(self):
1782 1785
1783 1786 dataObj = Spectra()
1784 1787
1785 1788 return dataObj
1786 1789
1787 1790 def __hasNotDataInBuffer(self):
1788 1791 return 1
1789 1792
1790 1793
1791 1794 def getBlockDimension(self):
1792 1795 """
1793 1796 Obtiene la cantidad de puntos a leer por cada bloque de datos
1794 1797
1795 1798 Affected:
1796 1799 self.nRdChannels
1797 1800 self.nRdPairs
1798 1801 self.pts2read_SelfSpectra
1799 1802 self.pts2read_CrossSpectra
1800 1803 self.pts2read_DCchannels
1801 1804 self.blocksize
1802 1805 self.dataOut.nChannels
1803 1806 self.dataOut.nPairs
1804 1807
1805 1808 Return:
1806 1809 None
1807 1810 """
1808 1811 self.nRdChannels = 0
1809 1812 self.nRdPairs = 0
1810 1813 self.rdPairList = []
1811 1814
1812 1815 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
1813 1816 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
1814 1817 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
1815 1818 else:
1816 1819 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
1817 1820 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
1818 1821
1819 1822 pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock
1820 1823
1821 1824 self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read)
1822 1825 self.blocksize = self.pts2read_SelfSpectra
1823 1826
1824 1827 if self.processingHeaderObj.flag_cspc:
1825 1828 self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read)
1826 1829 self.blocksize += self.pts2read_CrossSpectra
1827 1830
1828 1831 if self.processingHeaderObj.flag_dc:
1829 1832 self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights)
1830 1833 self.blocksize += self.pts2read_DCchannels
1831 1834
1832 1835 # self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels
1833 1836
1834 1837
1835 1838 def readBlock(self):
1836 1839 """
1837 1840 Lee el bloque de datos desde la posicion actual del puntero del archivo
1838 1841 (self.fp) y actualiza todos los parametros relacionados al bloque de datos
1839 1842 (metadata + data). La data leida es almacenada en el buffer y el contador del buffer
1840 1843 es seteado a 0
1841 1844
1842 1845 Return: None
1843 1846
1844 1847 Variables afectadas:
1845 1848
1846 1849 self.flagIsNewFile
1847 1850 self.flagIsNewBlock
1848 1851 self.nTotalBlocks
1849 1852 self.data_spc
1850 1853 self.data_cspc
1851 1854 self.data_dc
1852 1855
1853 1856 Exceptions:
1854 1857 Si un bloque leido no es un bloque valido
1855 1858 """
1856 1859 blockOk_flag = False
1857 1860 fpointer = self.fp.tell()
1858 1861
1859 1862 spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra )
1860 1863 spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
1861 1864
1862 1865 if self.processingHeaderObj.flag_cspc:
1863 1866 cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra )
1864 1867 cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D
1865 1868
1866 1869 if self.processingHeaderObj.flag_dc:
1867 1870 dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) )
1868 1871 dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D
1869 1872
1870 1873
1871 1874 if not(self.processingHeaderObj.shif_fft):
1872 1875 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
1873 1876
1874 1877 if self.processingHeaderObj.flag_cspc:
1875 1878 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
1876 1879
1877 1880
1878 1881 spc = numpy.transpose( spc, (0,2,1) )
1879 1882 self.data_spc = spc
1880 1883
1881 1884 if self.processingHeaderObj.flag_cspc:
1882 1885 cspc = numpy.transpose( cspc, (0,2,1) )
1883 1886 self.data_cspc = cspc['real'] + cspc['imag']*1j
1884 1887 else:
1885 1888 self.data_cspc = None
1886 1889
1887 1890 if self.processingHeaderObj.flag_dc:
1888 1891 self.data_dc = dc['real'] + dc['imag']*1j
1889 1892 else:
1890 1893 self.data_dc = None
1891 1894
1892 1895 self.flagIsNewFile = 0
1893 1896 self.flagIsNewBlock = 1
1894 1897
1895 1898 self.nTotalBlocks += 1
1896 1899 self.nReadBlocks += 1
1897 1900
1898 1901 return 1
1899 1902
1900 1903
1901 1904 def getData(self):
1902 1905 """
1903 1906 Copia el buffer de lectura a la clase "Spectra",
1904 1907 con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de
1905 1908 lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock"
1906 1909
1907 1910 Return:
1908 1911 0 : Si no hay mas archivos disponibles
1909 1912 1 : Si hizo una buena copia del buffer
1910 1913
1911 1914 Affected:
1912 1915 self.dataOut
1913 1916
1914 1917 self.flagTimeBlock
1915 1918 self.flagIsNewBlock
1916 1919 """
1917 1920
1918 1921 if self.flagNoMoreFiles: return 0
1919 1922
1920 1923 self.flagTimeBlock = 0
1921 1924 self.flagIsNewBlock = 0
1922 1925
1923 1926 if self.__hasNotDataInBuffer():
1924 1927
1925 1928 if not( self.readNextBlock() ):
1926 1929 return 0
1927 1930
1928 1931 # self.updateDataHeader()
1929 1932
1930 1933 if self.flagNoMoreFiles == 1:
1931 1934 print 'Process finished'
1932 1935 return 0
1933 1936
1934 1937 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
1935 1938
1936 1939 if self.data_dc == None:
1937 1940 self.dataOut.flagNoData = True
1938 1941 return 0
1939 1942
1940 1943
1941 1944 self.dataOut.data_spc = self.data_spc
1942 1945
1943 1946 self.dataOut.data_cspc = self.data_cspc
1944 1947
1945 1948 self.dataOut.data_dc = self.data_dc
1946 1949
1947 1950 self.dataOut.flagTimeBlock = self.flagTimeBlock
1948 1951
1949 1952 self.dataOut.flagNoData = False
1950 1953
1951 1954 self.dataOut.dtype = self.dtype
1952 1955
1953 1956 self.dataOut.nChannels = self.nRdChannels
1954 1957
1955 1958 self.dataOut.nPairs = self.nRdPairs
1956 1959
1957 1960 self.dataOut.pairsList = self.rdPairList
1958 1961
1959 1962 self.dataOut.nHeights = self.processingHeaderObj.nHeights
1960 1963
1961 1964 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
1962 1965
1963 1966 self.dataOut.nFFTPoints = self.processingHeaderObj.profilesPerBlock
1964 1967
1965 1968 self.dataOut.nIncohInt = self.processingHeaderObj.nIncohInt
1966 1969
1967 1970
1968 1971 xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight
1969 1972
1970 1973 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight)
1971 1974
1972 1975 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
1973 1976
1974 1977 self.dataOut.channelIndexList = range(self.systemHeaderObj.nChannels)
1975 1978
1976 1979 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000.#+ self.profileIndex * self.ippSeconds
1977 1980
1978 1981 self.dataOut.ippSeconds = self.ippSeconds
1979 1982
1980 1983 self.dataOut.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt * self.processingHeaderObj.nIncohInt * self.dataOut.nFFTPoints
1981 1984
1982 1985 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
1983 1986
1984 1987 # self.profileIndex += 1
1985 1988
1986 1989 self.dataOut.systemHeaderObj = self.systemHeaderObj.copy()
1987 1990
1988 1991 self.dataOut.radarControllerHeaderObj = self.radarControllerHeaderObj.copy()
1989 1992
1990 1993 return self.dataOut.data_spc
1991 1994
1992 1995
1993 1996 class SpectraWriter(JRODataWriter):
1994 1997
1995 1998 """
1996 1999 Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura
1997 2000 de los datos siempre se realiza por bloques.
1998 2001 """
1999 2002
2000 2003 ext = ".pdata"
2001 2004
2002 2005 optchar = "P"
2003 2006
2004 2007 shape_spc_Buffer = None
2005 2008
2006 2009 shape_cspc_Buffer = None
2007 2010
2008 2011 shape_dc_Buffer = None
2009 2012
2010 2013 data_spc = None
2011 2014
2012 2015 data_cspc = None
2013 2016
2014 2017 data_dc = None
2015 2018
2016 2019 # dataOut = None
2017 2020
2018 2021 def __init__(self):
2019 2022 """
2020 2023 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
2021 2024
2022 2025 Affected:
2023 2026 self.dataOut
2024 2027 self.basicHeaderObj
2025 2028 self.systemHeaderObj
2026 2029 self.radarControllerHeaderObj
2027 2030 self.processingHeaderObj
2028 2031
2029 2032 Return: None
2030 2033 """
2031 2034
2032 2035 self.isConfig = False
2033 2036
2034 2037 self.nTotalBlocks = 0
2035 2038
2036 2039 self.data_spc = None
2037 2040
2038 2041 self.data_cspc = None
2039 2042
2040 2043 self.data_dc = None
2041 2044
2042 2045 self.fp = None
2043 2046
2044 2047 self.flagIsNewFile = 1
2045 2048
2046 2049 self.nTotalBlocks = 0
2047 2050
2048 2051 self.flagIsNewBlock = 0
2049 2052
2050 2053 self.flagNoMoreFiles = 0
2051 2054
2052 2055 self.setFile = None
2053 2056
2054 2057 self.dtype = None
2055 2058
2056 2059 self.path = None
2057 2060
2058 2061 self.noMoreFiles = 0
2059 2062
2060 2063 self.filename = None
2061 2064
2062 2065 self.basicHeaderObj = BasicHeader()
2063 2066
2064 2067 self.systemHeaderObj = SystemHeader()
2065 2068
2066 2069 self.radarControllerHeaderObj = RadarControllerHeader()
2067 2070
2068 2071 self.processingHeaderObj = ProcessingHeader()
2069 2072
2070 2073
2071 2074 def hasAllDataInBuffer(self):
2072 2075 return 1
2073 2076
2074 2077
2075 2078 def setBlockDimension(self):
2076 2079 """
2077 2080 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
2078 2081
2079 2082 Affected:
2080 2083 self.shape_spc_Buffer
2081 2084 self.shape_cspc_Buffer
2082 2085 self.shape_dc_Buffer
2083 2086
2084 2087 Return: None
2085 2088 """
2086 2089 self.shape_spc_Buffer = (self.dataOut.nChannels,
2087 2090 self.processingHeaderObj.nHeights,
2088 2091 self.processingHeaderObj.profilesPerBlock)
2089 2092
2090 2093 self.shape_cspc_Buffer = (self.dataOut.nPairs,
2091 2094 self.processingHeaderObj.nHeights,
2092 2095 self.processingHeaderObj.profilesPerBlock)
2093 2096
2094 2097 self.shape_dc_Buffer = (self.dataOut.nChannels,
2095 2098 self.processingHeaderObj.nHeights)
2096 2099
2097 2100
2098 2101 def writeBlock(self):
2099 2102 """
2100 2103 Escribe el buffer en el file designado
2101 2104
2102 2105 Affected:
2103 2106 self.data_spc
2104 2107 self.data_cspc
2105 2108 self.data_dc
2106 2109 self.flagIsNewFile
2107 2110 self.flagIsNewBlock
2108 2111 self.nTotalBlocks
2109 2112 self.nWriteBlocks
2110 2113
2111 2114 Return: None
2112 2115 """
2113 2116
2114 2117 spc = numpy.transpose( self.data_spc, (0,2,1) )
2115 2118 if not( self.processingHeaderObj.shif_fft ):
2116 2119 spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2117 2120 data = spc.reshape((-1))
2118 2121 data.tofile(self.fp)
2119 2122
2120 2123 if self.data_cspc != None:
2121 2124 data = numpy.zeros( self.shape_cspc_Buffer, self.dtype )
2122 2125 cspc = numpy.transpose( self.data_cspc, (0,2,1) )
2123 2126 if not( self.processingHeaderObj.shif_fft ):
2124 2127 cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones
2125 2128 data['real'] = cspc.real
2126 2129 data['imag'] = cspc.imag
2127 2130 data = data.reshape((-1))
2128 2131 data.tofile(self.fp)
2129 2132
2130 2133 if self.data_dc != None:
2131 2134 data = numpy.zeros( self.shape_dc_Buffer, self.dtype )
2132 2135 dc = self.data_dc
2133 2136 data['real'] = dc.real
2134 2137 data['imag'] = dc.imag
2135 2138 data = data.reshape((-1))
2136 2139 data.tofile(self.fp)
2137 2140
2138 2141 self.data_spc.fill(0)
2139 2142 self.data_dc.fill(0)
2140 2143 if self.data_cspc != None:
2141 2144 self.data_cspc.fill(0)
2142 2145
2143 2146 self.flagIsNewFile = 0
2144 2147 self.flagIsNewBlock = 1
2145 2148 self.nTotalBlocks += 1
2146 2149 self.nWriteBlocks += 1
2147 2150 self.blockIndex += 1
2148 2151
2149 2152
2150 2153 def putData(self):
2151 2154 """
2152 2155 Setea un bloque de datos y luego los escribe en un file
2153 2156
2154 2157 Affected:
2155 2158 self.data_spc
2156 2159 self.data_cspc
2157 2160 self.data_dc
2158 2161
2159 2162 Return:
2160 2163 0 : Si no hay data o no hay mas files que puedan escribirse
2161 2164 1 : Si se escribio la data de un bloque en un file
2162 2165 """
2163 2166
2164 2167 if self.dataOut.flagNoData:
2165 2168 return 0
2166 2169
2167 2170 self.flagIsNewBlock = 0
2168 2171
2169 2172 if self.dataOut.flagTimeBlock:
2170 2173 self.data_spc.fill(0)
2171 2174 self.data_cspc.fill(0)
2172 2175 self.data_dc.fill(0)
2173 2176 self.setNextFile()
2174 2177
2175 2178 if self.flagIsNewFile == 0:
2176 2179 self.getBasicHeader()
2177 2180
2178 2181 self.data_spc = self.dataOut.data_spc
2179 2182 self.data_cspc = self.dataOut.data_cspc
2180 2183 self.data_dc = self.dataOut.data_dc
2181 2184
2182 2185 # #self.processingHeaderObj.dataBlocksPerFile)
2183 2186 if self.hasAllDataInBuffer():
2184 2187 # self.getDataHeader()
2185 2188 self.writeNextBlock()
2186 2189
2187 2190 if self.flagNoMoreFiles:
2188 2191 #print 'Process finished'
2189 2192 return 0
2190 2193
2191 2194 return 1
2192 2195
2193 2196
2194 2197 def __getProcessFlags(self):
2195 2198
2196 2199 processFlags = 0
2197 2200
2198 2201 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2199 2202 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2200 2203 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2201 2204 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2202 2205 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2203 2206 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2204 2207
2205 2208 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2206 2209
2207 2210
2208 2211
2209 2212 datatypeValueList = [PROCFLAG.DATATYPE_CHAR,
2210 2213 PROCFLAG.DATATYPE_SHORT,
2211 2214 PROCFLAG.DATATYPE_LONG,
2212 2215 PROCFLAG.DATATYPE_INT64,
2213 2216 PROCFLAG.DATATYPE_FLOAT,
2214 2217 PROCFLAG.DATATYPE_DOUBLE]
2215 2218
2216 2219
2217 2220 for index in range(len(dtypeList)):
2218 2221 if self.dataOut.dtype == dtypeList[index]:
2219 2222 dtypeValue = datatypeValueList[index]
2220 2223 break
2221 2224
2222 2225 processFlags += dtypeValue
2223 2226
2224 2227 if self.dataOut.flagDecodeData:
2225 2228 processFlags += PROCFLAG.DECODE_DATA
2226 2229
2227 2230 if self.dataOut.flagDeflipData:
2228 2231 processFlags += PROCFLAG.DEFLIP_DATA
2229 2232
2230 2233 if self.dataOut.code != None:
2231 2234 processFlags += PROCFLAG.DEFINE_PROCESS_CODE
2232 2235
2233 2236 if self.dataOut.nIncohInt > 1:
2234 2237 processFlags += PROCFLAG.INCOHERENT_INTEGRATION
2235 2238
2236 2239 if self.dataOut.data_dc != None:
2237 2240 processFlags += PROCFLAG.SAVE_CHANNELS_DC
2238 2241
2239 2242 return processFlags
2240 2243
2241 2244
2242 2245 def __getBlockSize(self):
2243 2246 '''
2244 2247 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
2245 2248 '''
2246 2249
2247 2250 dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')])
2248 2251 dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')])
2249 2252 dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')])
2250 2253 dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')])
2251 2254 dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')])
2252 2255 dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')])
2253 2256
2254 2257 dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5]
2255 2258 datatypeValueList = [1,2,4,8,4,8]
2256 2259 for index in range(len(dtypeList)):
2257 2260 if self.dataOut.dtype == dtypeList[index]:
2258 2261 datatypeValue = datatypeValueList[index]
2259 2262 break
2260 2263
2261 2264
2262 2265 pts2write = self.dataOut.nHeights * self.dataOut.nFFTPoints
2263 2266
2264 2267 pts2write_SelfSpectra = int(self.dataOut.nChannels * pts2write)
2265 2268 blocksize = (pts2write_SelfSpectra*datatypeValue)
2266 2269
2267 2270 if self.dataOut.data_cspc != None:
2268 2271 pts2write_CrossSpectra = int(self.dataOut.nPairs * pts2write)
2269 2272 blocksize += (pts2write_CrossSpectra*datatypeValue*2)
2270 2273
2271 2274 if self.dataOut.data_dc != None:
2272 2275 pts2write_DCchannels = int(self.dataOut.nChannels * self.dataOut.nHeights)
2273 2276 blocksize += (pts2write_DCchannels*datatypeValue*2)
2274 2277
2275 2278 blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO
2276 2279
2277 2280 return blocksize
2278 2281
2279 2282 def getDataHeader(self):
2280 2283
2281 2284 """
2282 2285 Obtiene una copia del First Header
2283 2286
2284 2287 Affected:
2285 2288 self.systemHeaderObj
2286 2289 self.radarControllerHeaderObj
2287 2290 self.dtype
2288 2291
2289 2292 Return:
2290 2293 None
2291 2294 """
2292 2295
2293 2296 self.systemHeaderObj = self.dataOut.systemHeaderObj.copy()
2294 2297 self.systemHeaderObj.nChannels = self.dataOut.nChannels
2295 2298 self.radarControllerHeaderObj = self.dataOut.radarControllerHeaderObj.copy()
2296 2299
2297 2300 self.getBasicHeader()
2298 2301
2299 2302 processingHeaderSize = 40 # bytes
2300 2303 self.processingHeaderObj.dtype = 0 # Voltage
2301 2304 self.processingHeaderObj.blockSize = self.__getBlockSize()
2302 2305 self.processingHeaderObj.profilesPerBlock = self.dataOut.nFFTPoints
2303 2306 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
2304 2307 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
2305 2308 self.processingHeaderObj.processFlags = self.__getProcessFlags()
2306 2309 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt# Se requiere para determinar el valor de timeInterval
2307 2310 self.processingHeaderObj.nIncohInt = self.dataOut.nIncohInt
2308 2311 self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
2309 2312
2310 2313 if self.processingHeaderObj.totalSpectra > 0:
2311 2314 channelList = []
2312 2315 for channel in range(self.dataOut.nChannels):
2313 2316 channelList.append(channel)
2314 2317 channelList.append(channel)
2315 2318
2316 2319 pairsList = []
2317 2320 for pair in self.dataOut.pairsList:
2318 2321 pairsList.append(pair[0])
2319 2322 pairsList.append(pair[1])
2320 2323 spectraComb = channelList + pairsList
2321 2324 spectraComb = numpy.array(spectraComb,dtype="u1")
2322 2325 self.processingHeaderObj.spectraComb = spectraComb
2323 2326 sizeOfSpcComb = len(spectraComb)
2324 2327 processingHeaderSize += sizeOfSpcComb
2325 2328
2326 2329 if self.dataOut.code != None:
2327 2330 self.processingHeaderObj.code = self.dataOut.code
2328 2331 self.processingHeaderObj.nCode = self.dataOut.nCode
2329 2332 self.processingHeaderObj.nBaud = self.dataOut.nBaud
2330 2333 nCodeSize = 4 # bytes
2331 2334 nBaudSize = 4 # bytes
2332 2335 codeSize = 4 # bytes
2333 2336 sizeOfCode = int(nCodeSize + nBaudSize + codeSize * self.dataOut.nCode * self.dataOut.nBaud)
2334 2337 processingHeaderSize += sizeOfCode
2335 2338
2336 2339 if self.processingHeaderObj.nWindows != 0:
2337 2340 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
2338 2341 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
2339 2342 self.processingHeaderObj.nHeights = self.dataOut.nHeights
2340 2343 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
2341 2344 sizeOfFirstHeight = 4
2342 2345 sizeOfdeltaHeight = 4
2343 2346 sizeOfnHeights = 4
2344 2347 sizeOfWindows = (sizeOfFirstHeight + sizeOfdeltaHeight + sizeOfnHeights)*self.processingHeaderObj.nWindows
2345 2348 processingHeaderSize += sizeOfWindows
2346 2349
2347 2350 self.processingHeaderObj.size = processingHeaderSize
2348 2351
2349 2352 class SpectraHeisWriter():
2350 2353
2351 2354 i=0
2352 2355
2353 2356 def __init__(self, dataOut):
2354 2357
2355 2358 self.wrObj = FITS()
2356 2359 self.dataOut = dataOut
2357 2360
2358 2361 def isNumber(str):
2359 2362 """
2360 2363 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
2361 2364
2362 2365 Excepciones:
2363 2366 Si un determinado string no puede ser convertido a numero
2364 2367 Input:
2365 2368 str, string al cual se le analiza para determinar si convertible a un numero o no
2366 2369
2367 2370 Return:
2368 2371 True : si el string es uno numerico
2369 2372 False : no es un string numerico
2370 2373 """
2371 2374 try:
2372 2375 float( str )
2373 2376 return True
2374 2377 except:
2375 2378 return False
2376 2379
2377 2380 def setup(self, wrpath,):
2378 2381
2379 2382 if not(os.path.exists(wrpath)):
2380 2383 os.mkdir(wrpath)
2381 2384
2382 2385 self.wrpath = wrpath
2383 2386 self.setFile = 0
2384 2387
2385 2388 def putData(self):
2386 2389 # self.wrObj.writeHeader(nChannels=self.dataOut.nChannels, nFFTPoints=self.dataOut.nFFTPoints)
2387 2390 #name = self.dataOut.utctime
2388 2391 name= time.localtime( self.dataOut.utctime)
2389 2392 ext=".fits"
2390 2393 #folder='D%4.4d%3.3d'%(name.tm_year,name.tm_yday)
2391 2394 subfolder = 'D%4.4d%3.3d' % (name.tm_year,name.tm_yday)
2392 2395
2393 2396 doypath = os.path.join( self.wrpath, subfolder )
2394 2397 if not( os.path.exists(doypath) ):
2395 2398 os.mkdir(doypath)
2396 2399 self.setFile += 1
2397 2400 file = 'D%4.4d%3.3d%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext)
2398 2401
2399 2402 filename = os.path.join(self.wrpath,subfolder, file)
2400 2403
2401 2404 # print self.dataOut.ippSeconds
2402 2405 freq=numpy.arange(-1*self.dataOut.nHeights/2.,self.dataOut.nHeights/2.)/(2*self.dataOut.ippSeconds)
2403 2406
2404 2407 col1=self.wrObj.setColF(name="freq", format=str(self.dataOut.nFFTPoints)+'E', array=freq)
2405 2408 col2=self.wrObj.writeData(name="P_Ch1",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[0,:]))
2406 2409 col3=self.wrObj.writeData(name="P_Ch2",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[1,:]))
2407 2410 col4=self.wrObj.writeData(name="P_Ch3",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[2,:]))
2408 2411 col5=self.wrObj.writeData(name="P_Ch4",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[3,:]))
2409 2412 col6=self.wrObj.writeData(name="P_Ch5",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[4,:]))
2410 2413 col7=self.wrObj.writeData(name="P_Ch6",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[5,:]))
2411 2414 col8=self.wrObj.writeData(name="P_Ch7",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[6,:]))
2412 2415 col9=self.wrObj.writeData(name="P_Ch8",format=str(self.dataOut.nFFTPoints)+'E',data=10*numpy.log10(self.dataOut.data_spc[7,:]))
2413 2416 #n=numpy.arange((100))
2414 2417 n=self.dataOut.data_spc[6,:]
2415 2418 a=self.wrObj.cFImage(n)
2416 2419 b=self.wrObj.Ctable(col1,col2,col3,col4,col5,col6,col7,col8,col9)
2417 2420 self.wrObj.CFile(a,b)
2418 2421 self.wrObj.wFile(filename)
2419 2422 return 1
2420 2423
2421 2424 class FITS:
2422 2425
2423 2426 name=None
2424 2427 format=None
2425 2428 array =None
2426 2429 data =None
2427 2430 thdulist=None
2428 2431
2429 2432 def __init__(self):
2430 2433
2431 2434 pass
2432 2435
2433 2436 def setColF(self,name,format,array):
2434 2437 self.name=name
2435 2438 self.format=format
2436 2439 self.array=array
2437 2440 a1=numpy.array([self.array],dtype=numpy.float32)
2438 2441 self.col1 = pyfits.Column(name=self.name, format=self.format, array=a1)
2439 2442 return self.col1
2440 2443
2441 2444 # def setColP(self,name,format,data):
2442 2445 # self.name=name
2443 2446 # self.format=format
2444 2447 # self.data=data
2445 2448 # a2=numpy.array([self.data],dtype=numpy.float32)
2446 2449 # self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2447 2450 # return self.col2
2448 2451
2449 2452 def writeHeader(self,):
2450 2453 pass
2451 2454
2452 2455 def writeData(self,name,format,data):
2453 2456 self.name=name
2454 2457 self.format=format
2455 2458 self.data=data
2456 2459 a2=numpy.array([self.data],dtype=numpy.float32)
2457 2460 self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2)
2458 2461 return self.col2
2459 2462
2460 2463 def cFImage(self,n):
2461 2464 self.hdu= pyfits.PrimaryHDU(n)
2462 2465 return self.hdu
2463 2466
2464 2467 def Ctable(self,col1,col2,col3,col4,col5,col6,col7,col8,col9):
2465 2468 self.cols=pyfits.ColDefs( [col1,col2,col3,col4,col5,col6,col7,col8,col9])
2466 2469 self.tbhdu = pyfits.new_table(self.cols)
2467 2470 return self.tbhdu
2468 2471
2469 2472 def CFile(self,hdu,tbhdu):
2470 2473 self.thdulist=pyfits.HDUList([hdu,tbhdu])
2471 2474
2472 2475 def wFile(self,filename):
2473 2476 self.thdulist.writeto(filename) No newline at end of file
@@ -1,575 +1,588
1 1 '''
2 2
3 3 $Author: dsuarez $
4 4 $Id: Processor.py 1 2012-11-12 18:56:07Z dsuarez $
5 5 '''
6 6 import os
7 7 import numpy
8 8 import datetime
9 9 import time
10 10
11 11 from jrodata import *
12 12 from jrodataIO import *
13 13 from jroplot import *
14 14
15 15 class ProcessingUnit:
16 16
17 17 """
18 18 Esta es la clase base para el procesamiento de datos.
19 19
20 20 Contiene el metodo "call" para llamar operaciones. Las operaciones pueden ser:
21 21 - Metodos internos (callMethod)
22 22 - Objetos del tipo Operation (callObject). Antes de ser llamados, estos objetos
23 23 tienen que ser agreagados con el metodo "add".
24 24
25 25 """
26 26 # objeto de datos de entrada (Voltage, Spectra o Correlation)
27 27 dataIn = None
28 28
29 29 # objeto de datos de entrada (Voltage, Spectra o Correlation)
30 30 dataOut = None
31 31
32 32
33 33 objectDict = None
34 34
35 35 def __init__(self):
36 36
37 37 self.objectDict = {}
38 38
39 def init(self):
40
41 raise ValueError, "Not implemented"
42
39 43 def addOperation(self, object, objId):
40 44
41 45 """
42 46 Agrega el objeto "object" a la lista de objetos "self.objectList" y retorna el
43 47 identificador asociado a este objeto.
44 48
45 49 Input:
46 50
47 51 object : objeto de la clase "Operation"
48 52
49 53 Return:
50 54
51 55 objId : identificador del objeto, necesario para ejecutar la operacion
52 56 """
53 57
54 58 self.objectDict[objId] = object
55 59
56 60 return objId
57 61
58 62 def operation(self, **kwargs):
59 63
60 64 """
61 65 Operacion directa sobre la data (dataout.data). Es necesario actualizar los valores de los
62 66 atributos del objeto dataOut
63 67
64 68 Input:
65 69
66 70 **kwargs : Diccionario de argumentos de la funcion a ejecutar
67 71 """
68 72
69 73 raise ValueError, "ImplementedError"
70 74
71 75 def callMethod(self, name, **kwargs):
72 76
73 77 """
74 78 Ejecuta el metodo con el nombre "name" y con argumentos **kwargs de la propia clase.
75 79
76 80 Input:
77 81 name : nombre del metodo a ejecutar
78 82
79 83 **kwargs : diccionario con los nombres y valores de la funcion a ejecutar.
80 84
81 85 """
86 if name != 'run':
87
88 if name == 'init' and self.dataIn.isEmpty():
89 return
90
91 if name != 'init' and self.dataOut.isEmpty():
92 return
82 93
83 94 methodToCall = getattr(self, name)
84 95
85 96 methodToCall(**kwargs)
86 97
87 98 def callObject(self, objId, **kwargs):
88 99
89 100 """
90 101 Ejecuta la operacion asociada al identificador del objeto "objId"
91 102
92 103 Input:
93 104
94 105 objId : identificador del objeto a ejecutar
95 106
96 107 **kwargs : diccionario con los nombres y valores de la funcion a ejecutar.
97 108
98 109 Return:
99 110
100 111 None
101 112 """
102 113
114 if self.dataOut.isEmpty():
115 return
116
103 117 object = self.objectDict[objId]
104 118
105 119 object.run(self.dataOut, **kwargs)
106 120
107 121 def call(self, operationConf, **kwargs):
108 122
109 123 """
110 124 Ejecuta la operacion "operationConf.name" con los argumentos "**kwargs". La operacion puede
111 125 ser de dos tipos:
112 126
113 127 1. Un metodo propio de esta clase:
114 128
115 129 operation.type = "self"
116 130
117 131 2. El metodo "run" de un objeto del tipo Operation o de un derivado de ella:
118 132 operation.type = "other".
119 133
120 134 Este objeto de tipo Operation debe de haber sido agregado antes con el metodo:
121 135 "addOperation" e identificado con el operation.id
122 136
123 137
124 138 con el id de la operacion.
125 139
126 140 Input:
127 141
128 142 Operation : Objeto del tipo operacion con los atributos: name, type y id.
129 143
130 144 """
131 if self.dataIn.isEmpty():
132 return None
133 145
134 146 if operationConf.type == 'self':
135 147 self.callMethod(operationConf.name, **kwargs)
136 148 return
137 149
138 150 if operationConf.type == 'other':
139 151 self.callObject(operationConf.id, **kwargs)
140 152 return
141 153
142 154 def setInput(self, dataIn):
143 155
144 156 self.dataIn = dataIn
145 157
146 158 def getOutput(self):
147 159
148 160 return self.dataOut
149 161
150 162 class Operation():
151 163
152 164 """
153 165 Clase base para definir las operaciones adicionales que se pueden agregar a la clase ProcessingUnit
154 166 y necesiten acumular informacion previa de los datos a procesar. De preferencia usar un buffer de
155 167 acumulacion dentro de esta clase
156 168
157 169 Ejemplo: Integraciones coherentes, necesita la informacion previa de los n perfiles anteriores (bufffer)
158 170
159 171 """
160 172
161 173 __buffer = None
162 174 __isConfig = False
163 175
164 176 def __init__(self):
165 177
166 178 pass
167 179
168 180 def run(self, dataIn, **kwargs):
169 181
170 182 """
171 183 Realiza las operaciones necesarias sobre la dataIn.data y actualiza los atributos del objeto dataIn.
172 184
173 185 Input:
174 186
175 187 dataIn : objeto del tipo JROData
176 188
177 189 Return:
178 190
179 191 None
180 192
181 193 Affected:
182 194 __buffer : buffer de recepcion de datos.
183 195
184 196 """
185 197
186 198 raise ValueError, "ImplementedError"
187 199
188 200 class VoltageProc(ProcessingUnit):
189 201
190 202
191 203 def __init__(self):
192 204
193 205 self.objectDict = {}
194 206 self.dataOut = Voltage()
195 207
196 208 def init(self):
197 209
198 210 self.dataOut.copy(self.dataIn)
199 211 # No necesita copiar en cada init() los atributos de dataIn
200 212 # la copia deberia hacerse por cada nuevo bloque de datos
201 213
202 214 def selectChannels(self, channelList):
203 215
204 216 if self.dataIn.isEmpty():
205 217 return 0
206 218
207 219 self.selectChannelsByIndex(channelList)
208 220
209 221 def selectChannelsByIndex(self, channelIndexList):
210 222 """
211 223 Selecciona un bloque de datos en base a canales segun el channelIndexList
212 224
213 225 Input:
214 226 channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7]
215 227
216 228 Affected:
217 229 self.dataOut.data
218 230 self.dataOut.channelIndexList
219 231 self.dataOut.nChannels
220 232 self.dataOut.m_ProcessingHeader.totalSpectra
221 233 self.dataOut.systemHeaderObj.numChannels
222 234 self.dataOut.m_ProcessingHeader.blockSize
223 235
224 236 Return:
225 237 None
226 238 """
227 239
228 240 for channel in channelIndexList:
229 241 if channel not in self.dataOut.channelIndexList:
242 print channelIndexList
230 243 raise ValueError, "The value %d in channelIndexList is not valid" %channel
231 244
232 245 nChannels = len(channelIndexList)
233 246
234 247 data = self.dataOut.data[channelIndexList,:]
235 248
236 249 self.dataOut.data = data
237 250 self.dataOut.channelIndexList = channelIndexList
238 251 self.dataOut.channelList = [self.dataOut.channelList[i] for i in channelIndexList]
239 252 self.dataOut.nChannels = nChannels
240 253
241 254 return 1
242 255
243 256 class CohInt(Operation):
244 257
245 258 __profIndex = 0
246 259 __withOverapping = False
247 260
248 261 __byTime = False
249 262 __initime = None
250 263 __lastdatatime = None
251 264 __integrationtime = None
252 265
253 266 __buffer = None
254 267
255 268 __dataReady = False
256 269
257 270 nCohInt = None
258 271
259 272
260 273 def __init__(self):
261 274
262 275 self.__isConfig = False
263 276
264 277 def setup(self, nCohInt=None, timeInterval=None, overlapping=False):
265 278 """
266 279 Set the parameters of the integration class.
267 280
268 281 Inputs:
269 282
270 283 nCohInt : Number of coherent integrations
271 284 timeInterval : Time of integration. If the parameter "nCohInt" is selected this one does not work
272 285 overlapping :
273 286
274 287 """
275 288
276 289 self.__initime = None
277 290 self.__lastdatatime = 0
278 291 self.__buffer = None
279 292 self.__dataReady = False
280 293
281 294
282 295 if nCohInt == None and timeInterval == None:
283 296 raise ValueError, "nCohInt or timeInterval should be specified ..."
284 297
285 298 if nCohInt != None:
286 299 self.nCohInt = nCohInt
287 300 self.__byTime = False
288 301 else:
289 302 self.__integrationtime = timeInterval * 60. #if (type(timeInterval)!=integer) -> change this line
290 303 self.nCohInt = 9999
291 304 self.__byTime = True
292 305
293 306 if overlapping:
294 307 self.__withOverapping = True
295 308 self.__buffer = None
296 309 else:
297 310 self.__withOverapping = False
298 311 self.__buffer = 0
299 312
300 313 self.__profIndex = 0
301 314
302 315 def putData(self, data):
303 316
304 317 """
305 318 Add a profile to the __buffer and increase in one the __profileIndex
306 319
307 320 """
308 321
309 322 if not self.__withOverapping:
310 323 self.__buffer += data
311 324 self.__profIndex += 1
312 325 return
313 326
314 327 #Overlapping data
315 328 nChannels, nHeis = data.shape
316 329 data = numpy.reshape(data, (1, nChannels, nHeis))
317 330
318 331 #If the buffer is empty then it takes the data value
319 332 if self.__buffer == None:
320 333 self.__buffer = data
321 334 self.__profIndex += 1
322 335 return
323 336
324 337 #If the buffer length is lower than nCohInt then stakcing the data value
325 338 if self.__profIndex < self.nCohInt:
326 339 self.__buffer = numpy.vstack((self.__buffer, data))
327 340 self.__profIndex += 1
328 341 return
329 342
330 343 #If the buffer length is equal to nCohInt then replacing the last buffer value with the data value
331 344 self.__buffer = numpy.roll(self.__buffer, -1, axis=0)
332 345 self.__buffer[self.nCohInt-1] = data
333 346 self.__profIndex = self.nCohInt
334 347 return
335 348
336 349
337 350 def pushData(self):
338 351 """
339 352 Return the sum of the last profiles and the profiles used in the sum.
340 353
341 354 Affected:
342 355
343 356 self.__profileIndex
344 357
345 358 """
346 359
347 360 if not self.__withOverapping:
348 361 data = self.__buffer
349 362 nCohInt = self.__profIndex
350 363
351 364 self.__buffer = 0
352 365 self.__profIndex = 0
353 366
354 367 return data, nCohInt
355 368
356 369 #Integration with Overlapping
357 370 data = numpy.sum(self.__buffer, axis=0)
358 371 nCohInt = self.__profIndex
359 372
360 373 return data, nCohInt
361 374
362 375 def byProfiles(self, data):
363 376
364 377 self.__dataReady = False
365 378 avgdata = None
366 379 nCohInt = None
367 380
368 381 self.putData(data)
369 382
370 383 if self.__profIndex == self.nCohInt:
371 384
372 385 avgdata, nCohInt = self.pushData()
373 386 self.__dataReady = True
374 387
375 388 return avgdata
376 389
377 390 def byTime(self, data, datatime):
378 391
379 392 self.__dataReady = False
380 393 avgdata = None
381 394 nCohInt = None
382 395
383 396 self.putData(data)
384 397
385 398 if (datatime - self.__initime) >= self.__integrationtime:
386 399 avgdata, nCohInt = self.pushData()
387 400 self.nCohInt = nCohInt
388 401 self.__dataReady = True
389 402
390 403 return avgdata
391 404
392 405 def integrate(self, data, datatime=None):
393 406
394 407 if self.__initime == None:
395 408 self.__initime = datatime
396 409
397 410 if self.__byTime:
398 411 avgdata = self.byTime(data, datatime)
399 412 else:
400 413 avgdata = self.byProfiles(data)
401 414
402 415
403 416 self.__lastdatatime = datatime
404 417
405 418 if avgdata == None:
406 419 return None, None
407 420
408 421 avgdatatime = self.__initime
409 422
410 423 deltatime = datatime -self.__lastdatatime
411 424
412 425 if not self.__withOverapping:
413 426 self.__initime = datatime
414 427 else:
415 428 self.__initime += deltatime
416 429
417 430 return avgdata, avgdatatime
418 431
419 432 def run(self, dataOut, nCohInt=None, timeInterval=None, overlapping=False):
420 433
421 434 if not self.__isConfig:
422 435 self.setup(nCohInt, timeInterval, overlapping)
423 436 self.__isConfig = True
424 437
425 438 avgdata, avgdatatime = self.integrate(dataOut.data, dataOut.utctime)
426 439
427 440 # dataOut.timeInterval *= nCohInt
428 441 dataOut.flagNoData = True
429 442
430 443 if self.__dataReady:
431 444 dataOut.data = avgdata
432 445 dataOut.timeInterval *= self.nCohInt
433 446 dataOut.nCohInt *= self.nCohInt
434 447 dataOut.utctime = avgdatatime
435 448 dataOut.flagNoData = False
436 449
437 450
438 451 class SpectraProc(ProcessingUnit):
439 452
440 453 def __init__(self):
441 454 self.objectDict = {}
442 455 self.buffer = None
443 456 self.firstdatatime = None
444 457 self.profIndex = 0
445 458 self.dataOut = Spectra()
446 459
447 460 def init(self, nFFTPoints=None, pairsList=None):
448 461 if self.dataIn.type == "Spectra":
449 462 self.dataOut.copy(self.dataIn)
450 463 return
451 464
452 465 if self.dataIn.type == "Voltage":
453 466
454 467 if nFFTPoints == None:
455 468 raise ValueError, "This SpectraProc.setup() need nFFTPoints input variable"
456 469
457 470 if pairsList == None:
458 471 nPairs = 0
459 472 else:
460 473 nPairs = len(pairsList)
461 474
462 475 self.dataOut.nFFTPoints = nFFTPoints
463 476 self.dataOut.pairsList = pairsList
464 477 self.dataOut.nPairs = nPairs
465 478
466 479 if self.buffer == None:
467 480 self.buffer = numpy.zeros((self.dataIn.nChannels,
468 481 self.dataOut.nFFTPoints,
469 482 self.dataIn.nHeights),
470 483 dtype='complex')
471 484
472 485
473 486 self.buffer[:,self.profIndex,:] = self.dataIn.data
474 487 self.profIndex += 1
475 488
476 489 if self.firstdatatime == None:
477 490 self.firstdatatime = self.dataIn.utctime
478 491
479 492 if self.profIndex == self.dataOut.nFFTPoints:
480 493 self.__updateObjFromInput()
481 494 self.__getFft()
482 495
483 496 self.dataOut.flagNoData = False
484 497
485 498 self.buffer = None
486 499 self.firstdatatime = None
487 500 self.profIndex = 0
488 501
489 502 return
490 503
491 504 raise ValuError, "The type object %s is not valid"%(self.dataIn.type)
492 505
493 506 def __updateObjFromInput(self):
494 507
495 508 self.dataOut.radarControllerHeaderObj = self.dataIn.radarControllerHeaderObj.copy()
496 509 self.dataOut.systemHeaderObj = self.dataIn.systemHeaderObj.copy()
497 510 self.dataOut.channelList = self.dataIn.channelList
498 511 self.dataOut.heightList = self.dataIn.heightList
499 512 self.dataOut.dtype = self.dataIn.dtype
500 513 self.dataOut.nHeights = self.dataIn.nHeights
501 514 self.dataOut.nChannels = self.dataIn.nChannels
502 515 self.dataOut.nBaud = self.dataIn.nBaud
503 516 self.dataOut.nCode = self.dataIn.nCode
504 517 self.dataOut.code = self.dataIn.code
505 518 self.dataOut.nProfiles = self.dataOut.nFFTPoints
506 519 self.dataOut.channelIndexList = self.dataIn.channelIndexList
507 520 self.dataOut.flagTimeBlock = self.dataIn.flagTimeBlock
508 521 self.dataOut.utctime = self.firstdatatime
509 522 self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada
510 523 self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip
511 524 self.dataOut.flagShiftFFT = self.dataIn.flagShiftFFT
512 525 self.dataOut.nCohInt = self.dataIn.nCohInt
513 526 self.dataOut.nIncohInt = 1
514 527 self.dataOut.ippSeconds = self.dataIn.ippSeconds
515 528 self.dataOut.timeInterval = self.dataIn.timeInterval*self.dataOut.nFFTPoints
516 529
517 530 def __getFft(self):
518 531 """
519 532 Convierte valores de Voltaje a Spectra
520 533
521 534 Affected:
522 535 self.dataOut.data_spc
523 536 self.dataOut.data_cspc
524 537 self.dataOut.data_dc
525 538 self.dataOut.heightList
526 539 self.dataOut.m_BasicHeader
527 540 self.dataOut.m_ProcessingHeader
528 541 self.dataOut.radarControllerHeaderObj
529 542 self.dataOut.systemHeaderObj
530 543 self.profIndex
531 544 self.buffer
532 545 self.dataOut.flagNoData
533 546 self.dataOut.dtype
534 547 self.dataOut.nPairs
535 548 self.dataOut.nChannels
536 549 self.dataOut.nProfiles
537 550 self.dataOut.systemHeaderObj.numChannels
538 551 self.dataOut.m_ProcessingHeader.totalSpectra
539 552 self.dataOut.m_ProcessingHeader.profilesPerBlock
540 553 self.dataOut.m_ProcessingHeader.numHeights
541 554 self.dataOut.m_ProcessingHeader.spectraComb
542 555 self.dataOut.m_ProcessingHeader.shif_fft
543 556 """
544 557 fft_volt = numpy.fft.fft(self.buffer,axis=1)
545 558 dc = fft_volt[:,0,:]
546 559
547 560 #calculo de self-spectra
548 561 fft_volt = numpy.fft.fftshift(fft_volt,axes=(1,))
549 562 spc = fft_volt * numpy.conjugate(fft_volt)
550 563 spc = spc.real
551 564
552 565 blocksize = 0
553 566 blocksize += dc.size
554 567 blocksize += spc.size
555 568
556 569 cspc = None
557 570 pairIndex = 0
558 571 if self.dataOut.pairsList != None:
559 572 #calculo de cross-spectra
560 573 cspc = numpy.zeros((self.dataOut.nPairs, self.dataOut.nFFTPoints, self.dataOut.nHeights), dtype='complex')
561 574 for pair in self.dataOut.pairsList:
562 575 cspc[pairIndex,:,:] = numpy.abs(fft_volt[pair[0],:,:] * numpy.conjugate(fft_volt[pair[1],:,:]))
563 576 pairIndex += 1
564 577 blocksize += cspc.size
565 578
566 579 self.dataOut.data_spc = spc
567 580 self.dataOut.data_cspc = cspc
568 581 self.dataOut.data_dc = dc
569 582 self.dataOut.blockSize = blocksize
570 583
571 584
572 585 class IncohInt(Operation):
573 586
574 587 def __init__(self):
575 588 pass No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now