@@ -0,0 +1,370 | |||
|
1 | ''' | |
|
2 | Created on September , 2012 | |
|
3 | @author: | |
|
4 | ''' | |
|
5 | from xml.etree.ElementTree import Element, SubElement, ElementTree | |
|
6 | from element import prettify | |
|
7 | from xml.etree import ElementTree as ET | |
|
8 | import sys | |
|
9 | ||
|
10 | ||
|
11 | #def save(a, b): | |
|
12 | # | |
|
13 | # nameP = "Alexnder" | |
|
14 | # descripcion = self.projectWindow.Text() | |
|
15 | # id = 1 | |
|
16 | # x = self.data.projectWindow.cmbbox.value() | |
|
17 | # | |
|
18 | # projectObj = Project(id, name, description) | |
|
19 | # | |
|
20 | # projectObj.setup(id, name, description) | |
|
21 | ||
|
22 | class Project(): | |
|
23 | ||
|
24 | id = None | |
|
25 | name = None | |
|
26 | description = None | |
|
27 | readBranchObjList = None | |
|
28 | procBranchObjList = None | |
|
29 | ||
|
30 | def __init__(self): | |
|
31 | ||
|
32 | # self.id = id | |
|
33 | # self.name = name | |
|
34 | # self.description = description | |
|
35 | ||
|
36 | self.readBranchObjList = [] | |
|
37 | self.procBranchObjList = [] | |
|
38 | ||
|
39 | def setParms(self, id, name, description): | |
|
40 | ||
|
41 | self.id = id | |
|
42 | self.name = name | |
|
43 | self.description = description | |
|
44 | ||
|
45 | def addReadBranch(self, dpath, dataformat, readMode, startDate='', endDate='', startTime='', endTime=''): | |
|
46 | ||
|
47 | id = len(self.readBranchObjList) + 1 | |
|
48 | ||
|
49 | readBranchObj = ReadBranch(id, dpath, dataformat, readMode, startDate, endDate, startTime, endTime) | |
|
50 | ||
|
51 | self.readBranchObjList.append(readBranchObj) | |
|
52 | ||
|
53 | return readBranchObj | |
|
54 | ||
|
55 | def addProcBranch(self, name): | |
|
56 | ||
|
57 | id = len(self.procBranchObjList) + 1 | |
|
58 | ||
|
59 | procBranchObj = ProcBranch(id, name) | |
|
60 | ||
|
61 | self.procBranchObjList.append(procBranchObj) | |
|
62 | ||
|
63 | return procBranchObj | |
|
64 | ||
|
65 | def makeXml(self): | |
|
66 | ||
|
67 | projectElement = Element('Project') | |
|
68 | projectElement.set('id', str(self.id)) | |
|
69 | projectElement.set('name', self.name) | |
|
70 | #projectElement.set('description', self.description) | |
|
71 | ||
|
72 | se = SubElement(projectElement, 'description',description=self.description)#ESTO ES LO ULTIMO QUE SE TRABAJO | |
|
73 | #se.text = self.description #ULTIMA MODIFICACION PARA SACAR UN SUB ELEMENT | |
|
74 | ||
|
75 | for readBranchObj in self.readBranchObjList: | |
|
76 | readBranchObj.makeXml(projectElement) | |
|
77 | ||
|
78 | for procBranchObj in self.procBranchObjList: | |
|
79 | procBranchObj.makeXml(projectElement) | |
|
80 | ||
|
81 | self.projectElement = projectElement | |
|
82 | ||
|
83 | def writeXml(self, filename): | |
|
84 | ||
|
85 | self.makeXml() | |
|
86 | ElementTree(self.projectElement).write(filename, method='xml') | |
|
87 | print prettify(self.projectElement) | |
|
88 | ||
|
89 | def readXml(self,workspace): | |
|
90 | print "Aqui estoy leyendo" | |
|
91 | tree=ET.parse(workspace) | |
|
92 | root=tree.getroot() | |
|
93 | self.project=root.tag | |
|
94 | self.idProyect= root.attrib.get('id') | |
|
95 | self.nameProyect= root.attrib.get('name') | |
|
96 | for description in root.findall('description'): | |
|
97 | description = description.get('description') | |
|
98 | ||
|
99 | self.description= description | |
|
100 | ||
|
101 | for readBranch in root.findall('readBranch'): | |
|
102 | id = readBranch.get('id') | |
|
103 | self.idrb=id | |
|
104 | ||
|
105 | for procBranch in root.findall('procBranch'): | |
|
106 | id = readBranch.get('id') | |
|
107 | name = readBranch.get('name') | |
|
108 | self.idpb=id | |
|
109 | self.nameBranch=name | |
|
110 | # | |
|
111 | # | |
|
112 | print self.project | |
|
113 | print self.idProyect | |
|
114 | print self.nameProyect | |
|
115 | print self.description | |
|
116 | print self.idrb | |
|
117 | print self.idpb | |
|
118 | print self.nameBranch | |
|
119 | # | |
|
120 | ####ESTO DEL MEDIO ESTABA COMENTADO | |
|
121 | # print root.tag , root.attrib | |
|
122 | # | |
|
123 | # print root.attrib.get('id') | |
|
124 | # print root.attrib.get('name') | |
|
125 | ||
|
126 | ||
|
127 | # for description in root.findall('description'): | |
|
128 | # description = root.find('description').text | |
|
129 | # name = root.get('name') | |
|
130 | # print name, description | |
|
131 | ||
|
132 | # description=root.find('description').text | |
|
133 | # print description | |
|
134 | # ESTO FUNCIONABA HACIA ABAJO | |
|
135 | print "Otra forma " | |
|
136 | root=tree.getroot() | |
|
137 | print root.tag , root.attrib | |
|
138 | for child in root: | |
|
139 | print child.tag ,child.attrib | |
|
140 | for child in child: | |
|
141 | print child.tag ,child.attrib | |
|
142 | for child in child: | |
|
143 | print child.tag ,child.attrib | |
|
144 | for child in child: | |
|
145 | print child.tag ,child.attrib | |
|
146 | # | |
|
147 | class ReadBranch(): | |
|
148 | ||
|
149 | id = None | |
|
150 | dpath = None | |
|
151 | dataformat = None | |
|
152 | readMode = None | |
|
153 | startDate = None | |
|
154 | endDate = None | |
|
155 | startTime = None | |
|
156 | endTime = None | |
|
157 | ||
|
158 | def __init__(self, id, dpath, dataformat, readMode, startDate, endDate, startTime, endTime): | |
|
159 | ||
|
160 | self.id = id | |
|
161 | self.dpath = dpath | |
|
162 | self.dataformat = dataformat | |
|
163 | self.readMode = readMode | |
|
164 | self.startDate = startDate | |
|
165 | self.endDate = endDate | |
|
166 | self.startTime = startTime | |
|
167 | self.endTime = endTime | |
|
168 | ||
|
169 | def makeXml(self, projectElement): | |
|
170 | ||
|
171 | readBranchElement = SubElement(projectElement, 'readBranch') | |
|
172 | readBranchElement.set('id', str(self.id)) | |
|
173 | ||
|
174 | # readBranchElement.set('dpath', self.dpath) | |
|
175 | # readBranchElement.set('dataformat', self.dataformat) | |
|
176 | # readBranchElement.set('startDate', self.startDate) | |
|
177 | # readBranchElement.set('endDate', self.endDate) | |
|
178 | # readBranchElement.set('startTime', self.startTime) | |
|
179 | # readBranchElement.set('endTime', self.endTime) | |
|
180 | # readBranchElement.set('readMode', str(self.readMode)) | |
|
181 | ||
|
182 | # se = SubElement(readBranchElement, 'dpath')#ESTO ES LO ULTIMO QUE SE TRABAJO | |
|
183 | # se.text = self.dpath | |
|
184 | # | |
|
185 | # se = SubElement(readBranchElement, 'dataformat')#ESTO ES LO ULTIMO QUE SE TRABAJO | |
|
186 | # se.text = self.dataformat | |
|
187 | # | |
|
188 | # se = SubElement(readBranchElement, 'startDate')#ESTO ES LO ULTIMO QUE SE TRABAJO | |
|
189 | # se.text = self.startDate | |
|
190 | # | |
|
191 | # se = SubElement(readBranchElement, 'endDate')#ESTO ES LO ULTIMO QUE SE TRABAJO | |
|
192 | # se.text = self.endDate | |
|
193 | # | |
|
194 | # se = SubElement(readBranchElement, 'startTime')#ESTO ES LO ULTIMO QUE SE TRABAJO | |
|
195 | # se.text = self.startTime | |
|
196 | # | |
|
197 | # se = SubElement(readBranchElement, 'endTime')#ESTO ES LO ULTIMO QUE SE TRABAJO | |
|
198 | # se.text = self.endTime | |
|
199 | # | |
|
200 | # se = SubElement(readBranchElement, 'readMode')#ESTO ES LO ULTIMO QUE SE TRABAJO | |
|
201 | # se.text = str(self.readMode) | |
|
202 | ||
|
203 | ########################################################################## | |
|
204 | se = SubElement(readBranchElement, 'parameter', name='dpath' , value=self.dpath) | |
|
205 | se = SubElement(readBranchElement, 'parameter', name='dataformat', value=self.dataformat) | |
|
206 | se = SubElement(readBranchElement, 'parameter', name='startDate' , value=self.startDate) | |
|
207 | se = SubElement(readBranchElement, 'parameter', name='endDate' , value=self.endDate) | |
|
208 | se = SubElement(readBranchElement, 'parameter', name='startTime' , value=self.startTime) | |
|
209 | se = SubElement(readBranchElement, 'parameter', name='endTime' , value=self.endTime) | |
|
210 | se = SubElement(readBranchElement, 'parameter', name='readMode' , value=str(self.readMode)) | |
|
211 | ||
|
212 | ||
|
213 | class ProcBranch(): | |
|
214 | ||
|
215 | id = None | |
|
216 | name = None | |
|
217 | ||
|
218 | upObjList = None | |
|
219 | ||
|
220 | def __init__(self, id, name): | |
|
221 | ||
|
222 | self.id = id | |
|
223 | self.name = name | |
|
224 | ||
|
225 | self.upObjList = [] | |
|
226 | ||
|
227 | def addUP(self, name, type): | |
|
228 | ||
|
229 | id = len(self.upObjList) + 1 | |
|
230 | ||
|
231 | upObj = UP(id, name, type) | |
|
232 | ||
|
233 | self.upObjList.append(upObj) | |
|
234 | ||
|
235 | return upObj | |
|
236 | ||
|
237 | def makeXml(self, projectElement): | |
|
238 | ||
|
239 | procBranchElement = SubElement(projectElement, 'procBranch') | |
|
240 | procBranchElement.set('id', str(self.id)) | |
|
241 | procBranchElement.set('name', self.name) | |
|
242 | ||
|
243 | for upObj in self.upObjList: | |
|
244 | upObj.makeXml(procBranchElement) | |
|
245 | ||
|
246 | class UP(): | |
|
247 | ||
|
248 | id = None | |
|
249 | name = None | |
|
250 | type = None | |
|
251 | ||
|
252 | opObjList = [] | |
|
253 | ||
|
254 | def __init__(self, id, name, type): | |
|
255 | ||
|
256 | self.id = id | |
|
257 | self.name = name | |
|
258 | self.type = type | |
|
259 | ||
|
260 | self.opObjList = [] | |
|
261 | ||
|
262 | def addOperation(self, name, priority): | |
|
263 | ||
|
264 | id = len(self.opObjList) + 1 | |
|
265 | ||
|
266 | opObj = Operation(id, name, priority) | |
|
267 | ||
|
268 | self.opObjList.append(opObj) | |
|
269 | ||
|
270 | return opObj | |
|
271 | ||
|
272 | def makeXml(self, procBranchElement): | |
|
273 | ||
|
274 | upElement = SubElement(procBranchElement, 'UP') | |
|
275 | upElement.set('id', str(self.id)) | |
|
276 | upElement.set('name', self.name) | |
|
277 | upElement.set('type', self.type) | |
|
278 | ||
|
279 | for opObj in self.opObjList: | |
|
280 | opObj.makeXml(upElement) | |
|
281 | ||
|
282 | class Operation(): | |
|
283 | ||
|
284 | id = 0 | |
|
285 | name = None | |
|
286 | priority = None | |
|
287 | parmObjList = [] | |
|
288 | ||
|
289 | def __init__(self, id, name, priority): | |
|
290 | ||
|
291 | self.id = id | |
|
292 | self.name = name | |
|
293 | self.priority = priority | |
|
294 | ||
|
295 | self.parmObjList = [] | |
|
296 | ||
|
297 | def addParameter(self, name, value): | |
|
298 | ||
|
299 | id = len(self.parmObjList) + 1 | |
|
300 | ||
|
301 | parmObj = Parameter(id, name, value) | |
|
302 | ||
|
303 | self.parmObjList.append(parmObj) | |
|
304 | ||
|
305 | return parmObj | |
|
306 | ||
|
307 | def makeXml(self, upElement): | |
|
308 | ||
|
309 | opElement = SubElement(upElement, 'Operation') | |
|
310 | opElement.set('id', str(self.id)) | |
|
311 | opElement.set('name', self.name) | |
|
312 | opElement.set('priority', str(self.priority)) | |
|
313 | ||
|
314 | for parmObj in self.parmObjList: | |
|
315 | parmObj.makeXml(opElement) | |
|
316 | ||
|
317 | class Parameter(): | |
|
318 | ||
|
319 | id = None | |
|
320 | name = None | |
|
321 | value = None | |
|
322 | ||
|
323 | def __init__(self, id, name, value): | |
|
324 | ||
|
325 | self.id = id | |
|
326 | self.name = name | |
|
327 | self.value = value | |
|
328 | ||
|
329 | def makeXml(self, opElement): | |
|
330 | ||
|
331 | parmElement = SubElement(opElement, 'Parameter') | |
|
332 | parmElement.set('name', self.name) | |
|
333 | parmElement.set('value', self.value) | |
|
334 | ||
|
335 | # se = SubElement(parmElement, 'value')#ESTO ES LO ULTIMO QUE SE TRABAJO | |
|
336 | # se.text = self.value | |
|
337 | ||
|
338 | if __name__ == '__main__': | |
|
339 | ||
|
340 | desc = "Este es un test" | |
|
341 | filename = "test.xml" | |
|
342 | ||
|
343 | workspace=str("C:\\Users\\alex\\workspace\\GUIV2.0\\test.xml") | |
|
344 | ||
|
345 | projectObj = Project() | |
|
346 | ||
|
347 | projectObj.setParms(id = '11', name='test01', description=desc) | |
|
348 | ||
|
349 | readBranchObj = projectObj.addReadBranch(dpath='mydata', dataformat='rawdata', readMode=0, startDate='1', endDate='3', startTime='4', endTime='5') | |
|
350 | ||
|
351 | procBranchObj = projectObj.addProcBranch(name='Branch1') | |
|
352 | ||
|
353 | procBranchObj1 = projectObj.addProcBranch(name='Branch2') | |
|
354 | upObj1 = procBranchObj.addUP(name='UP1', type='Voltage') | |
|
355 | upObj2 = procBranchObj.addUP(name='UP2', type='Voltage') | |
|
356 | ||
|
357 | opObj11 = upObj1.addOperation(name='removeDC', priority=1) | |
|
358 | opObj11.addParameter(name='type', value='1') | |
|
359 | ||
|
360 | ||
|
361 | opObj12 = upObj1.addOperation(name='decodification', priority=2) | |
|
362 | opObj12.addParameter(name='ncode', value='2') | |
|
363 | opObj12.addParameter(name='nbauds', value='8') | |
|
364 | opObj12.addParameter(name='code1', value='001110011') | |
|
365 | opObj12.addParameter(name='code2', value='001110011') | |
|
366 | ||
|
367 | projectObj.writeXml(filename) | |
|
368 | ||
|
369 | projectObj.readXml(workspace) | |
|
370 | No newline at end of file |
@@ -0,0 +1,358 | |||
|
1 | ''' | |
|
2 | Created on September , 2012 | |
|
3 | @author: | |
|
4 | ''' | |
|
5 | from xml.etree.ElementTree import Element, SubElement, ElementTree | |
|
6 | from element import prettify | |
|
7 | from xml.etree import ElementTree as ET | |
|
8 | import sys | |
|
9 | ||
|
10 | class Project(): | |
|
11 | ||
|
12 | id = None | |
|
13 | name = None | |
|
14 | description = None | |
|
15 | readBranchObjList = None | |
|
16 | procBranchObjList = None | |
|
17 | ||
|
18 | def __init__(self): | |
|
19 | ||
|
20 | # self.id = id | |
|
21 | # self.name = name | |
|
22 | # self.description = description | |
|
23 | ||
|
24 | self.readBranchObjList = [] | |
|
25 | self.procBranchObjList = [] | |
|
26 | ||
|
27 | def setParms(self, id, name, description): | |
|
28 | ||
|
29 | self.id = id | |
|
30 | self.name = name | |
|
31 | self.description = description | |
|
32 | ||
|
33 | def addReadBranch(self,id, dpath, dataformat, opMode,readMode, startDate='', endDate='', startTime='', endTime=''): | |
|
34 | ||
|
35 | #id = len(self.readBranchObjList) + 1 | |
|
36 | ||
|
37 | readBranchObj = ReadBranch(id, dpath, dataformat, opMode , readMode, startDate, endDate, startTime, endTime) | |
|
38 | ||
|
39 | self.readBranchObjList.append(readBranchObj) | |
|
40 | ||
|
41 | return readBranchObj | |
|
42 | ||
|
43 | def addProcBranch(self, id,name): | |
|
44 | ||
|
45 | # id = len(self.procBranchObjList) + 1 | |
|
46 | ||
|
47 | procBranchObj = ProcBranch(id, name) | |
|
48 | ||
|
49 | self.procBranchObjList.append(procBranchObj) | |
|
50 | ||
|
51 | return procBranchObj | |
|
52 | ||
|
53 | def makeXml(self): | |
|
54 | ||
|
55 | projectElement = Element('Project') | |
|
56 | projectElement.set('id', str(self.id)) | |
|
57 | projectElement.set('name', self.name) | |
|
58 | #projectElement.set('description', self.description) | |
|
59 | ||
|
60 | se = SubElement(projectElement, 'description',description=self.description)#ESTO ES LO ULTIMO QUE SE TRABAJO | |
|
61 | #se.text = self.description #ULTIMA MODIFICACION PARA SACAR UN SUB ELEMENT | |
|
62 | ||
|
63 | for readBranchObj in self.readBranchObjList: | |
|
64 | readBranchObj.makeXml(projectElement) | |
|
65 | ||
|
66 | for branchObj in self.procBranchObjList: | |
|
67 | branchObj.makeXml(projectElement) | |
|
68 | ||
|
69 | self.projectElement = projectElement | |
|
70 | ||
|
71 | def writeXml(self, filename): | |
|
72 | ||
|
73 | self.makeXml() | |
|
74 | ElementTree(self.projectElement).write(filename, method='xml') | |
|
75 | #print prettify(self.projectElement) | |
|
76 | ||
|
77 | class ReadBranch(): | |
|
78 | ||
|
79 | id = None | |
|
80 | dpath = None | |
|
81 | dataformat = None | |
|
82 | opMode =None | |
|
83 | readMode = None | |
|
84 | startDate = None | |
|
85 | endDate = None | |
|
86 | startTime = None | |
|
87 | endTime = None | |
|
88 | ||
|
89 | def __init__(self, id, dpath, dataformat,opMode, readMode, startDate, endDate, startTime, endTime): | |
|
90 | ||
|
91 | self.id = id | |
|
92 | self.dpath = dpath | |
|
93 | self.dataformat = dataformat | |
|
94 | self.opMode = opMode | |
|
95 | self.readMode = readMode | |
|
96 | self.startDate = startDate | |
|
97 | self.endDate = endDate | |
|
98 | self.startTime = startTime | |
|
99 | self.endTime = endTime | |
|
100 | ||
|
101 | def makeXml(self, projectElement): | |
|
102 | ||
|
103 | readBranchElement = SubElement(projectElement, 'readBranch') | |
|
104 | readBranchElement.set('id', str(self.id)) | |
|
105 | ||
|
106 | ########################################################################## | |
|
107 | se = SubElement(readBranchElement, 'parameter', name='dpath' , value=self.dpath) | |
|
108 | se = SubElement(readBranchElement, 'parameter', name='dataformat', value=self.dataformat) | |
|
109 | se = SubElement(readBranchElement, 'parameter', name='opMode' , value=self.opMode) | |
|
110 | se = SubElement(readBranchElement, 'parameter', name='startDate' , value=self.startDate) | |
|
111 | se = SubElement(readBranchElement, 'parameter', name='endDate' , value=self.endDate) | |
|
112 | se = SubElement(readBranchElement, 'parameter', name='startTime' , value=self.startTime) | |
|
113 | se = SubElement(readBranchElement, 'parameter', name='endTime' , value=self.endTime) | |
|
114 | se = SubElement(readBranchElement, 'parameter', name='readMode' , value=str(self.readMode)) | |
|
115 | ||
|
116 | class ProcBranch(): | |
|
117 | ||
|
118 | id = None | |
|
119 | name = None | |
|
120 | ||
|
121 | upObjList = None | |
|
122 | upsubObjList=None | |
|
123 | ||
|
124 | def __init__(self, id, name): | |
|
125 | ||
|
126 | self.id = id | |
|
127 | self.name = name | |
|
128 | ||
|
129 | self.upObjList = [] | |
|
130 | self.upsubObjList = [] | |
|
131 | ||
|
132 | def addUP(self,id, name, type): | |
|
133 | ||
|
134 | #id = len(self.upObjList) + 1 | |
|
135 | ||
|
136 | upObj = UP(id, name, type) | |
|
137 | ||
|
138 | self.upObjList.append(upObj) | |
|
139 | ||
|
140 | return upObj | |
|
141 | ||
|
142 | def addUPSUB(self,id, name, type): | |
|
143 | ||
|
144 | # id = len(self.upsubObjList) + 1 | |
|
145 | ||
|
146 | upsubObj = UPSUB(id, name, type) | |
|
147 | ||
|
148 | self.upsubObjList.append(upsubObj) | |
|
149 | ||
|
150 | return upsubObj | |
|
151 | ||
|
152 | def makeXml(self, projectElement): | |
|
153 | ||
|
154 | procBranchElement = SubElement(projectElement, 'procBranch') | |
|
155 | procBranchElement.set('id', str(self.id)) | |
|
156 | procBranchElement.set('name', self.name) | |
|
157 | ||
|
158 | for upObj in self.upObjList: | |
|
159 | upObj.makeXml(procBranchElement) | |
|
160 | ||
|
161 | for upsubObj in self.upsubObjList: | |
|
162 | upsubObj.makeXml(procBranchElement) | |
|
163 | ||
|
164 | class UP(): | |
|
165 | ||
|
166 | id = None | |
|
167 | name = None | |
|
168 | type = None | |
|
169 | upsubObjList=None | |
|
170 | opObjList = None | |
|
171 | ||
|
172 | def __init__(self, id, name, type): | |
|
173 | ||
|
174 | self.id = id | |
|
175 | self.name = name | |
|
176 | self.type = type | |
|
177 | self.upsubObjList=[] | |
|
178 | self.up2subObjList=[] | |
|
179 | self.opObjList = [] | |
|
180 | ||
|
181 | def addOperation(self,id, name, priority): | |
|
182 | ||
|
183 | #id = len(self.opObjList) + 1 | |
|
184 | ||
|
185 | opObj = Operation(id, name, priority) | |
|
186 | ||
|
187 | self.opObjList.append(opObj) | |
|
188 | ||
|
189 | return opObj | |
|
190 | ||
|
191 | def addUPSUB(self,id, name, type): | |
|
192 | ||
|
193 | # id = len(self.upsubObjList) + 1 | |
|
194 | ||
|
195 | upsubObj = UPSUB(id, name, type) | |
|
196 | ||
|
197 | self.upsubObjList.append(upsubObj) | |
|
198 | ||
|
199 | return upsubObj | |
|
200 | ||
|
201 | def addUP2SUB(self,id, name, type): | |
|
202 | ||
|
203 | # id = len(self.upsubObjList) + 1 | |
|
204 | ||
|
205 | up2subObj = UP2SUB(id, name, type) | |
|
206 | ||
|
207 | self.up2subObjList.append(up2subObj) | |
|
208 | ||
|
209 | return up2subObj | |
|
210 | ||
|
211 | def makeXml(self, procBranchElement): | |
|
212 | ||
|
213 | upElement = SubElement(procBranchElement, 'UP') | |
|
214 | upElement.set('id', str(self.id)) | |
|
215 | upElement.set('name', self.name) | |
|
216 | upElement.set('type', self.type) | |
|
217 | ||
|
218 | for opObj in self.opObjList: | |
|
219 | opObj.makeXml(upElement) | |
|
220 | ||
|
221 | for upsubObj in self.upsubObjList: | |
|
222 | upsubObj.makeXml(upElement) | |
|
223 | ||
|
224 | class UPSUB(): | |
|
225 | ||
|
226 | id = None | |
|
227 | name = None | |
|
228 | type = None | |
|
229 | opObjList = None | |
|
230 | up2subObjList=None | |
|
231 | ||
|
232 | ||
|
233 | def __init__(self, id, name, type): | |
|
234 | ||
|
235 | self.id = id | |
|
236 | self.name = name | |
|
237 | self.type = type | |
|
238 | self.up2subObjList = [] | |
|
239 | self.opObjList = [] | |
|
240 | ||
|
241 | def addOperation(self, name, priority): | |
|
242 | ||
|
243 | id = len(self.opObjList) + 1 | |
|
244 | ||
|
245 | opObj = Operation(id, name, priority) | |
|
246 | ||
|
247 | self.opObjList.append(opObj) | |
|
248 | ||
|
249 | return opObj | |
|
250 | ||
|
251 | ||
|
252 | def addUP2SUB(self,id, name, type): | |
|
253 | # | |
|
254 | # id = len(self.opObjList) + 1 | |
|
255 | up2subObj = UP2SUB(id, name, type) | |
|
256 | ||
|
257 | self.up2subObjList.append(up2subObj) | |
|
258 | ||
|
259 | return up2subObj | |
|
260 | ||
|
261 | def makeXml(self, upElement): | |
|
262 | ||
|
263 | upsubElement = SubElement(upElement, 'UPSUB') | |
|
264 | upsubElement.set('id', str(self.id)) | |
|
265 | upsubElement.set('name', self.name) | |
|
266 | upsubElement.set('type', self.type) | |
|
267 | ||
|
268 | for opObj in self.opObjList: | |
|
269 | opObj.makeXml(upsubElement) | |
|
270 | ||
|
271 | for up2subObj in self.up2subObjList: | |
|
272 | up2subObj.makeXml(upsubElement) | |
|
273 | ||
|
274 | class UP2SUB(): | |
|
275 | ||
|
276 | id = None | |
|
277 | name = None | |
|
278 | type = None | |
|
279 | opObjList = None | |
|
280 | ||
|
281 | def __init__(self, id, name, type): | |
|
282 | ||
|
283 | self.id = id | |
|
284 | self.name = name | |
|
285 | self.type = type | |
|
286 | self.opObjList = [] | |
|
287 | ||
|
288 | def addOperation(self, name, priority): | |
|
289 | ||
|
290 | id = len(self.opObjList) + 1 | |
|
291 | ||
|
292 | opObj = Operation(id, name, priority) | |
|
293 | ||
|
294 | self.opObjList.append(opObj) | |
|
295 | ||
|
296 | return opObj | |
|
297 | ||
|
298 | def makeXml(self,upsubElement): | |
|
299 | up2subElement = SubElement(upsubElement, 'UPD2SUB') | |
|
300 | up2subElement.set('id', str(self.id)) | |
|
301 | up2subElement.set('name', self.name) | |
|
302 | up2subElement.set('type', self.type) | |
|
303 | ||
|
304 | for opObj in self.opObjList: | |
|
305 | opObj.makeXml(up2subElement) | |
|
306 | ||
|
307 | class Operation(): | |
|
308 | ||
|
309 | id = 0 | |
|
310 | name = None | |
|
311 | priority = None | |
|
312 | parmObjList = [] | |
|
313 | ||
|
314 | def __init__(self, id, name, priority): | |
|
315 | ||
|
316 | self.id = id | |
|
317 | self.name = name | |
|
318 | self.priority = priority | |
|
319 | ||
|
320 | self.parmObjList = [] | |
|
321 | ||
|
322 | def addParameter(self, name, value): | |
|
323 | ||
|
324 | id = len(self.parmObjList) + 1 | |
|
325 | ||
|
326 | parmObj = Parameter(id, name, value) | |
|
327 | ||
|
328 | self.parmObjList.append(parmObj) | |
|
329 | ||
|
330 | return parmObj | |
|
331 | ||
|
332 | def makeXml(self, upElement): | |
|
333 | ||
|
334 | opElement = SubElement(upElement, 'Operation') | |
|
335 | opElement.set('id', str(self.id)) | |
|
336 | opElement.set('name', self.name) | |
|
337 | opElement.set('priority', str(self.priority)) | |
|
338 | ||
|
339 | for parmObj in self.parmObjList: | |
|
340 | parmObj.makeXml(opElement) | |
|
341 | ||
|
342 | class Parameter(): | |
|
343 | ||
|
344 | id = None | |
|
345 | name = None | |
|
346 | value = None | |
|
347 | ||
|
348 | def __init__(self, id, name, value): | |
|
349 | ||
|
350 | self.id = id | |
|
351 | self.name = name | |
|
352 | self.value = value | |
|
353 | ||
|
354 | def makeXml(self, opElement): | |
|
355 | ||
|
356 | parmElement = SubElement(opElement, 'Parameter') | |
|
357 | parmElement.set('name', self.name) | |
|
358 | parmElement.set('value', self.value) No newline at end of file |
@@ -0,0 +1,241 | |||
|
1 | ''' | |
|
2 | ||
|
3 | $Author: murco $ | |
|
4 | $Id: JROData.py 173 2012-11-20 15:06:21Z murco $ | |
|
5 | ''' | |
|
6 | ||
|
7 | import os, sys | |
|
8 | import copy | |
|
9 | import numpy | |
|
10 | ||
|
11 | from JROHeaderIO import SystemHeader, RadarControllerHeader | |
|
12 | ||
|
13 | class JROData: | |
|
14 | ||
|
15 | # m_BasicHeader = BasicHeader() | |
|
16 | # m_ProcessingHeader = ProcessingHeader() | |
|
17 | ||
|
18 | systemHeaderObj = SystemHeader() | |
|
19 | ||
|
20 | radarControllerHeaderObj = RadarControllerHeader() | |
|
21 | ||
|
22 | # data = None | |
|
23 | ||
|
24 | type = None | |
|
25 | ||
|
26 | dtype = None | |
|
27 | ||
|
28 | nChannels = None | |
|
29 | ||
|
30 | nHeights = None | |
|
31 | ||
|
32 | nProfiles = None | |
|
33 | ||
|
34 | heightList = None | |
|
35 | ||
|
36 | channelList = None | |
|
37 | ||
|
38 | channelIndexList = None | |
|
39 | ||
|
40 | flagNoData = True | |
|
41 | ||
|
42 | flagTimeBlock = False | |
|
43 | ||
|
44 | utctime = None | |
|
45 | ||
|
46 | blocksize = None | |
|
47 | ||
|
48 | nCode = None | |
|
49 | ||
|
50 | nBaud = None | |
|
51 | ||
|
52 | code = None | |
|
53 | ||
|
54 | flagDecodeData = True #asumo q la data esta decodificada | |
|
55 | ||
|
56 | flagDeflipData = True #asumo q la data esta sin flip | |
|
57 | ||
|
58 | flagShiftFFT = False | |
|
59 | ||
|
60 | ippSeconds = None | |
|
61 | ||
|
62 | timeInterval = None | |
|
63 | ||
|
64 | def __init__(self): | |
|
65 | ||
|
66 | raise ValueError, "This class has not been implemented" | |
|
67 | ||
|
68 | def copy(self, inputObj=None): | |
|
69 | ||
|
70 | if inputObj == None: | |
|
71 | return copy.deepcopy(self) | |
|
72 | ||
|
73 | for key in inputObj.__dict__.keys(): | |
|
74 | self.__dict__[key] = inputObj.__dict__[key] | |
|
75 | ||
|
76 | def deepcopy(self): | |
|
77 | ||
|
78 | return copy.deepcopy(self) | |
|
79 | ||
|
80 | class Voltage(JROData): | |
|
81 | ||
|
82 | nCohInt = None | |
|
83 | ||
|
84 | #data es un numpy array de 2 dmensiones (canales, alturas) | |
|
85 | data = None | |
|
86 | ||
|
87 | def __init__(self): | |
|
88 | ''' | |
|
89 | Constructor | |
|
90 | ''' | |
|
91 | ||
|
92 | self.radarControllerHeaderObj = RadarControllerHeader() | |
|
93 | ||
|
94 | self.systemHeaderObj = SystemHeader() | |
|
95 | ||
|
96 | self.type = "Voltage" | |
|
97 | ||
|
98 | self.data = None | |
|
99 | ||
|
100 | self.dtype = None | |
|
101 | ||
|
102 | self.nChannels = 0 | |
|
103 | ||
|
104 | self.nHeights = 0 | |
|
105 | ||
|
106 | self.nProfiles = None | |
|
107 | ||
|
108 | self.heightList = None | |
|
109 | ||
|
110 | self.channelList = None | |
|
111 | ||
|
112 | self.channelIndexList = None | |
|
113 | ||
|
114 | self.flagNoData = True | |
|
115 | ||
|
116 | self.flagTimeBlock = False | |
|
117 | ||
|
118 | self.utctime = None | |
|
119 | ||
|
120 | self.nCohInt = None | |
|
121 | ||
|
122 | self.blocksize = None | |
|
123 | ||
|
124 | class Spectra(JROData): | |
|
125 | ||
|
126 | #data es un numpy array de 2 dmensiones (canales, perfiles, alturas) | |
|
127 | data_spc = None | |
|
128 | ||
|
129 | #data es un numpy array de 2 dmensiones (canales, pares, alturas) | |
|
130 | data_cspc = None | |
|
131 | ||
|
132 | #data es un numpy array de 2 dmensiones (canales, alturas) | |
|
133 | data_dc = None | |
|
134 | ||
|
135 | nFFTPoints = None | |
|
136 | ||
|
137 | nPairs = None | |
|
138 | ||
|
139 | pairsList = None | |
|
140 | ||
|
141 | nIncohInt = None | |
|
142 | ||
|
143 | wavelength = None #Necesario para cacular el rango de velocidad desde la frecuencia | |
|
144 | ||
|
145 | nCohInt = None #se requiere para determinar el valor de timeInterval | |
|
146 | ||
|
147 | def __init__(self): | |
|
148 | ''' | |
|
149 | Constructor | |
|
150 | ''' | |
|
151 | ||
|
152 | self.radarControllerHeaderObj = RadarControllerHeader() | |
|
153 | ||
|
154 | self.systemHeaderObj = SystemHeader() | |
|
155 | ||
|
156 | self.type = "Spectra" | |
|
157 | ||
|
158 | # self.data = None | |
|
159 | ||
|
160 | self.dtype = None | |
|
161 | ||
|
162 | self.nChannels = 0 | |
|
163 | ||
|
164 | self.nHeights = 0 | |
|
165 | ||
|
166 | self.nProfiles = None | |
|
167 | ||
|
168 | self.heightList = None | |
|
169 | ||
|
170 | self.channelList = None | |
|
171 | ||
|
172 | self.channelIndexList = None | |
|
173 | ||
|
174 | self.flagNoData = True | |
|
175 | ||
|
176 | self.flagTimeBlock = False | |
|
177 | ||
|
178 | self.utctime = None | |
|
179 | ||
|
180 | self.nIncohInt = None | |
|
181 | ||
|
182 | self.blocksize = None | |
|
183 | ||
|
184 | self.nFFTPoints = None | |
|
185 | ||
|
186 | self.wavelength = None | |
|
187 | ||
|
188 | def getFrequencies(self): | |
|
189 | ||
|
190 | xrange = numpy.arange(self.nFFTPoints) | |
|
191 | xrange = xrange | |
|
192 | return None | |
|
193 | ||
|
194 | ||
|
195 | class SpectraHeis(JROData): | |
|
196 | ||
|
197 | data_spc = None | |
|
198 | ||
|
199 | data_cspc = None | |
|
200 | ||
|
201 | data_dc = None | |
|
202 | ||
|
203 | nFFTPoints = None | |
|
204 | ||
|
205 | nPairs = None | |
|
206 | ||
|
207 | pairsList = None | |
|
208 | ||
|
209 | nIncohInt = None | |
|
210 | ||
|
211 | def __init__(self): | |
|
212 | ||
|
213 | self.radarControllerHeaderObj = RadarControllerHeader() | |
|
214 | ||
|
215 | self.systemHeaderObj = SystemHeader() | |
|
216 | ||
|
217 | self.type = "SpectraHeis" | |
|
218 | ||
|
219 | self.dtype = None | |
|
220 | ||
|
221 | self.nChannels = 0 | |
|
222 | ||
|
223 | self.nHeights = 0 | |
|
224 | ||
|
225 | self.nProfiles = None | |
|
226 | ||
|
227 | self.heightList = None | |
|
228 | ||
|
229 | self.channelList = None | |
|
230 | ||
|
231 | self.channelIndexList = None | |
|
232 | ||
|
233 | self.flagNoData = True | |
|
234 | ||
|
235 | self.flagTimeBlock = False | |
|
236 | ||
|
237 | self.nPairs = 0 | |
|
238 | ||
|
239 | self.utctime = None | |
|
240 | ||
|
241 | self.blocksize = None |
This diff has been collapsed as it changes many lines, (2461 lines changed) Show them Hide them | |||
@@ -0,0 +1,2461 | |||
|
1 | ''' | |
|
2 | ||
|
3 | $Author: murco $ | |
|
4 | $Id: JRODataIO.py 169 2012-11-19 21:57:03Z murco $ | |
|
5 | ''' | |
|
6 | ||
|
7 | import os, sys | |
|
8 | import glob | |
|
9 | import time | |
|
10 | import numpy | |
|
11 | import fnmatch | |
|
12 | import time, datetime | |
|
13 | ||
|
14 | from Data.JROData import * | |
|
15 | from JROHeaderIO import * | |
|
16 | ||
|
17 | def isNumber(str): | |
|
18 | """ | |
|
19 | Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero. | |
|
20 | ||
|
21 | Excepciones: | |
|
22 | Si un determinado string no puede ser convertido a numero | |
|
23 | Input: | |
|
24 | str, string al cual se le analiza para determinar si convertible a un numero o no | |
|
25 | ||
|
26 | Return: | |
|
27 | True : si el string es uno numerico | |
|
28 | False : no es un string numerico | |
|
29 | """ | |
|
30 | try: | |
|
31 | float( str ) | |
|
32 | return True | |
|
33 | except: | |
|
34 | return False | |
|
35 | ||
|
36 | def isThisFileinRange(filename, startUTSeconds, endUTSeconds): | |
|
37 | """ | |
|
38 | Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado. | |
|
39 | ||
|
40 | Inputs: | |
|
41 | filename : nombre completo del archivo de datos en formato Jicamarca (.r) | |
|
42 | ||
|
43 | startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en | |
|
44 | segundos contados desde 01/01/1970. | |
|
45 | endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en | |
|
46 | segundos contados desde 01/01/1970. | |
|
47 | ||
|
48 | Return: | |
|
49 | Boolean : Retorna True si el archivo de datos contiene datos en el rango de | |
|
50 | fecha especificado, de lo contrario retorna False. | |
|
51 | ||
|
52 | Excepciones: | |
|
53 | Si el archivo no existe o no puede ser abierto | |
|
54 | Si la cabecera no puede ser leida. | |
|
55 | ||
|
56 | """ | |
|
57 | basicHeaderObj = BasicHeader() | |
|
58 | ||
|
59 | try: | |
|
60 | fp = open(filename,'rb') | |
|
61 | except: | |
|
62 | raise IOError, "The file %s can't be opened" %(filename) | |
|
63 | ||
|
64 | sts = basicHeaderObj.read(fp) | |
|
65 | fp.close() | |
|
66 | ||
|
67 | if not(sts): | |
|
68 | print "Skipping the file %s because it has not a valid header" %(filename) | |
|
69 | return 0 | |
|
70 | ||
|
71 | if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)): | |
|
72 | return 0 | |
|
73 | ||
|
74 | return 1 | |
|
75 | ||
|
76 | def getlastFileFromPath(path, ext): | |
|
77 | """ | |
|
78 | Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext" | |
|
79 | al final de la depuracion devuelve el ultimo file de la lista que quedo. | |
|
80 | ||
|
81 | Input: | |
|
82 | fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta | |
|
83 | ext : extension de los files contenidos en una carpeta | |
|
84 | ||
|
85 | Return: | |
|
86 | El ultimo file de una determinada carpeta, no se considera el path. | |
|
87 | """ | |
|
88 | validFilelist = [] | |
|
89 | fileList = os.listdir(path) | |
|
90 | ||
|
91 | # 0 1234 567 89A BCDE | |
|
92 | # H YYYY DDD SSS .ext | |
|
93 | ||
|
94 | for file in fileList: | |
|
95 | try: | |
|
96 | year = int(file[1:5]) | |
|
97 | doy = int(file[5:8]) | |
|
98 | ||
|
99 | if (os.path.splitext(file)[-1].upper() != ext.upper()) : continue | |
|
100 | except: | |
|
101 | continue | |
|
102 | ||
|
103 | validFilelist.append(file) | |
|
104 | ||
|
105 | if validFilelist: | |
|
106 | validFilelist = sorted( validFilelist, key=str.lower ) | |
|
107 | return validFilelist[-1] | |
|
108 | ||
|
109 | return None | |
|
110 | ||
|
111 | def checkForRealPath(path, year, doy, set, ext): | |
|
112 | """ | |
|
113 | Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path, | |
|
114 | Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar | |
|
115 | el path exacto de un determinado file. | |
|
116 | ||
|
117 | Example : | |
|
118 | nombre correcto del file es .../.../D2009307/P2009307367.ext | |
|
119 | ||
|
120 | Entonces la funcion prueba con las siguientes combinaciones | |
|
121 | .../.../x2009307/y2009307367.ext | |
|
122 | .../.../x2009307/Y2009307367.ext | |
|
123 | .../.../X2009307/y2009307367.ext | |
|
124 | .../.../X2009307/Y2009307367.ext | |
|
125 | siendo para este caso, la ultima combinacion de letras, identica al file buscado | |
|
126 | ||
|
127 | Return: | |
|
128 | Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file | |
|
129 | caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas | |
|
130 | para el filename | |
|
131 | """ | |
|
132 | filepath = None | |
|
133 | find_flag = False | |
|
134 | filename = None | |
|
135 | ||
|
136 | if ext.lower() == ".r": #voltage | |
|
137 | header1 = "dD" | |
|
138 | header2 = "dD" | |
|
139 | elif ext.lower() == ".pdata": #spectra | |
|
140 | header1 = "dD" | |
|
141 | header2 = "pP" | |
|
142 | else: | |
|
143 | return None, filename | |
|
144 | ||
|
145 | for dir in header1: #barrido por las dos combinaciones posibles de "D" | |
|
146 | for fil in header2: #barrido por las dos combinaciones posibles de "D" | |
|
147 | doypath = "%s%04d%03d" % ( dir, year, doy ) #formo el nombre del directorio xYYYYDDD (x=d o x=D) | |
|
148 | filename = "%s%04d%03d%03d%s" % ( fil, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext | |
|
149 | filepath = os.path.join( path, doypath, filename ) #formo el path completo | |
|
150 | if os.path.exists( filepath ): #verifico que exista | |
|
151 | find_flag = True | |
|
152 | break | |
|
153 | if find_flag: | |
|
154 | break | |
|
155 | ||
|
156 | if not(find_flag): | |
|
157 | return None, filename | |
|
158 | ||
|
159 | return filepath, filename | |
|
160 | ||
|
161 | class JRODataIO: | |
|
162 | ||
|
163 | c = 3E8 | |
|
164 | ||
|
165 | basicHeaderObj = BasicHeader() | |
|
166 | ||
|
167 | systemHeaderObj = SystemHeader() | |
|
168 | ||
|
169 | radarControllerHeaderObj = RadarControllerHeader() | |
|
170 | ||
|
171 | processingHeaderObj = ProcessingHeader() | |
|
172 | ||
|
173 | online = 0 | |
|
174 | ||
|
175 | dtype = None | |
|
176 | ||
|
177 | pathList = [] | |
|
178 | ||
|
179 | filenameList = [] | |
|
180 | ||
|
181 | filename = None | |
|
182 | ||
|
183 | ext = None | |
|
184 | ||
|
185 | flagNoMoreFiles = 0 | |
|
186 | ||
|
187 | flagIsNewFile = 1 | |
|
188 | ||
|
189 | flagTimeBlock = 0 | |
|
190 | ||
|
191 | flagIsNewBlock = 0 | |
|
192 | ||
|
193 | fp = None | |
|
194 | ||
|
195 | firstHeaderSize = 0 | |
|
196 | ||
|
197 | basicHeaderSize = 24 | |
|
198 | ||
|
199 | versionFile = 1103 | |
|
200 | ||
|
201 | fileSize = None | |
|
202 | ||
|
203 | ippSeconds = None | |
|
204 | ||
|
205 | fileSizeByHeader = None | |
|
206 | ||
|
207 | fileIndex = None | |
|
208 | ||
|
209 | profileIndex = None | |
|
210 | ||
|
211 | blockIndex = None | |
|
212 | ||
|
213 | nTotalBlocks = None | |
|
214 | ||
|
215 | maxTimeStep = 30 | |
|
216 | ||
|
217 | lastUTTime = None | |
|
218 | ||
|
219 | datablock = None | |
|
220 | ||
|
221 | dataOutObj = None | |
|
222 | ||
|
223 | blocksize = None | |
|
224 | ||
|
225 | def __init__(self): | |
|
226 | pass | |
|
227 | ||
|
228 | class JRODataReader(JRODataIO): | |
|
229 | ||
|
230 | nReadBlocks = 0 | |
|
231 | ||
|
232 | delay = 60 #number of seconds waiting a new file | |
|
233 | ||
|
234 | nTries = 3 #quantity tries | |
|
235 | ||
|
236 | nFiles = 3 #number of files for searching | |
|
237 | ||
|
238 | ||
|
239 | def __init__(self): | |
|
240 | ||
|
241 | """ | |
|
242 | ||
|
243 | """ | |
|
244 | ||
|
245 | raise ValueError, "This method has not been implemented" | |
|
246 | ||
|
247 | ||
|
248 | def createObjByDefault(self): | |
|
249 | """ | |
|
250 | ||
|
251 | """ | |
|
252 | raise ValueError, "This method has not been implemented" | |
|
253 | ||
|
254 | def getBlockDimension(self): | |
|
255 | ||
|
256 | raise ValueError, "No implemented" | |
|
257 | ||
|
258 | def __searchFilesOffLine(self, | |
|
259 | path, | |
|
260 | startDate, | |
|
261 | endDate, | |
|
262 | startTime=datetime.time(0,0,0), | |
|
263 | endTime=datetime.time(23,59,59), | |
|
264 | set=None, | |
|
265 | expLabel="", | |
|
266 | ext=".r"): | |
|
267 | dirList = [] | |
|
268 | for thisPath in os.listdir(path): | |
|
269 | if os.path.isdir(os.path.join(path,thisPath)): | |
|
270 | dirList.append(thisPath) | |
|
271 | ||
|
272 | if not(dirList): | |
|
273 | return None, None | |
|
274 | ||
|
275 | pathList = [] | |
|
276 | dateList = [] | |
|
277 | ||
|
278 | thisDate = startDate | |
|
279 | ||
|
280 | while(thisDate <= endDate): | |
|
281 | year = thisDate.timetuple().tm_year | |
|
282 | doy = thisDate.timetuple().tm_yday | |
|
283 | ||
|
284 | match = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy)) | |
|
285 | if len(match) == 0: | |
|
286 | thisDate += datetime.timedelta(1) | |
|
287 | continue | |
|
288 | ||
|
289 | pathList.append(os.path.join(path,match[0],expLabel)) | |
|
290 | dateList.append(thisDate) | |
|
291 | thisDate += datetime.timedelta(1) | |
|
292 | ||
|
293 | filenameList = [] | |
|
294 | for index in range(len(pathList)): | |
|
295 | ||
|
296 | thisPath = pathList[index] | |
|
297 | fileList = glob.glob1(thisPath, "*%s" %ext) | |
|
298 | fileList.sort() | |
|
299 | ||
|
300 | #Busqueda de datos en el rango de horas indicados | |
|
301 | thisDate = dateList[index] | |
|
302 | startDT = datetime.datetime.combine(thisDate, startTime) | |
|
303 | endDT = datetime.datetime.combine(thisDate, endTime) | |
|
304 | ||
|
305 | startUtSeconds = time.mktime(startDT.timetuple()) | |
|
306 | endUtSeconds = time.mktime(endDT.timetuple()) | |
|
307 | ||
|
308 | for file in fileList: | |
|
309 | ||
|
310 | filename = os.path.join(thisPath,file) | |
|
311 | ||
|
312 | if isThisFileinRange(filename, startUtSeconds, endUtSeconds): | |
|
313 | filenameList.append(filename) | |
|
314 | ||
|
315 | if not(filenameList): | |
|
316 | return None, None | |
|
317 | ||
|
318 | self.filenameList = filenameList | |
|
319 | ||
|
320 | return pathList, filenameList | |
|
321 | ||
|
322 | def __searchFilesOnLine(self, path, startDate=None, endDate=None, startTime=None, endTime=None, expLabel = "", ext = None): | |
|
323 | ||
|
324 | """ | |
|
325 | Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y | |
|
326 | devuelve el archivo encontrado ademas de otros datos. | |
|
327 | ||
|
328 | Input: | |
|
329 | path : carpeta donde estan contenidos los files que contiene data | |
|
330 | ||
|
331 | startDate : Fecha inicial. Rechaza todos los directorios donde | |
|
332 | file end time < startDate (obejto datetime.date) | |
|
333 | ||
|
334 | endDate : Fecha final. Rechaza todos los directorios donde | |
|
335 | file start time > endDate (obejto datetime.date) | |
|
336 | ||
|
337 | startTime : Tiempo inicial. Rechaza todos los archivos donde | |
|
338 | file end time < startTime (obejto datetime.time) | |
|
339 | ||
|
340 | endTime : Tiempo final. Rechaza todos los archivos donde | |
|
341 | file start time > endTime (obejto datetime.time) | |
|
342 | ||
|
343 | expLabel : Nombre del subexperimento (subfolder) | |
|
344 | ||
|
345 | ext : extension de los files | |
|
346 | ||
|
347 | Return: | |
|
348 | directory : eL directorio donde esta el file encontrado | |
|
349 | filename : el ultimo file de una determinada carpeta | |
|
350 | year : el anho | |
|
351 | doy : el numero de dia del anho | |
|
352 | set : el set del archivo | |
|
353 | ||
|
354 | ||
|
355 | """ | |
|
356 | dirList = [] | |
|
357 | pathList = [] | |
|
358 | directory = None | |
|
359 | ||
|
360 | #Filtra solo los directorios | |
|
361 | for thisPath in os.listdir(path): | |
|
362 | if os.path.isdir(os.path.join(path, thisPath)): | |
|
363 | dirList.append(thisPath) | |
|
364 | ||
|
365 | if not(dirList): | |
|
366 | return None, None, None, None, None | |
|
367 | ||
|
368 | dirList = sorted( dirList, key=str.lower ) | |
|
369 | ||
|
370 | if startDate: | |
|
371 | startDateTime = datetime.datetime.combine(startDate, startTime) | |
|
372 | thisDateTime = startDateTime | |
|
373 | if endDate == None: endDateTime = startDateTime | |
|
374 | else: endDateTime = datetime.datetime.combine(endDate, endTime) | |
|
375 | ||
|
376 | while(thisDateTime <= endDateTime): | |
|
377 | year = thisDateTime.timetuple().tm_year | |
|
378 | doy = thisDateTime.timetuple().tm_yday | |
|
379 | ||
|
380 | match = fnmatch.filter(dirList, '?' + '%4.4d%3.3d' % (year,doy)) | |
|
381 | if len(match) == 0: | |
|
382 | thisDateTime += datetime.timedelta(1) | |
|
383 | continue | |
|
384 | ||
|
385 | pathList.append(os.path.join(path,match[0], expLabel)) | |
|
386 | thisDateTime += datetime.timedelta(1) | |
|
387 | ||
|
388 | if not(pathList): | |
|
389 | print "\tNo files in range: %s - %s" %(startDateTime.ctime(), endDateTime.ctime()) | |
|
390 | return None, None, None, None, None | |
|
391 | ||
|
392 | directory = pathList[0] | |
|
393 | ||
|
394 | else: | |
|
395 | directory = dirList[-1] | |
|
396 | directory = os.path.join(path,directory) | |
|
397 | ||
|
398 | filename = getlastFileFromPath(directory, ext) | |
|
399 | ||
|
400 | if not(filename): | |
|
401 | return None, None, None, None, None | |
|
402 | ||
|
403 | if not(self.__verifyFile(os.path.join(directory, filename))): | |
|
404 | return None, None, None, None, None | |
|
405 | ||
|
406 | year = int( filename[1:5] ) | |
|
407 | doy = int( filename[5:8] ) | |
|
408 | set = int( filename[8:11] ) | |
|
409 | ||
|
410 | return directory, filename, year, doy, set | |
|
411 | ||
|
412 | def setup(self,dataOutObj=None, | |
|
413 | path=None, | |
|
414 | startDate=None, | |
|
415 | endDate=None, | |
|
416 | startTime=datetime.time(0,0,0), | |
|
417 | endTime=datetime.time(23,59,59), | |
|
418 | set=0, | |
|
419 | expLabel = "", | |
|
420 | ext = None, | |
|
421 | online = False, | |
|
422 | delay = 60): | |
|
423 | ||
|
424 | if path == None: | |
|
425 | raise ValueError, "The path is not valid" | |
|
426 | ||
|
427 | if ext == None: | |
|
428 | ext = self.ext | |
|
429 | ||
|
430 | if dataOutObj == None: | |
|
431 | dataOutObj = self.createObjByDefault() | |
|
432 | ||
|
433 | self.dataOutObj = dataOutObj | |
|
434 | ||
|
435 | if online: | |
|
436 | print "Searching files in online mode..." | |
|
437 | doypath, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext) | |
|
438 | ||
|
439 | if not(doypath): | |
|
440 | for nTries in range( self.nTries ): | |
|
441 | print '\tWaiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1) | |
|
442 | time.sleep( self.delay ) | |
|
443 | doypath, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=exp) | |
|
444 | if doypath: | |
|
445 | break | |
|
446 | ||
|
447 | if not(doypath): | |
|
448 | print "There 'isn't valied files in %s" % path | |
|
449 | return None | |
|
450 | ||
|
451 | self.year = year | |
|
452 | self.doy = doy | |
|
453 | self.set = set - 1 | |
|
454 | self.path = path | |
|
455 | ||
|
456 | else: | |
|
457 | print "Searching files in offline mode ..." | |
|
458 | pathList, filenameList = self.__searchFilesOffLine(path, startDate, endDate, startTime, endTime, set, expLabel, ext) | |
|
459 | ||
|
460 | if not(pathList): | |
|
461 | print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path, | |
|
462 | datetime.datetime.combine(startDate,startTime).ctime(), | |
|
463 | datetime.datetime.combine(endDate,endTime).ctime()) | |
|
464 | ||
|
465 | sys.exit(-1) | |
|
466 | ||
|
467 | ||
|
468 | self.fileIndex = -1 | |
|
469 | self.pathList = pathList | |
|
470 | self.filenameList = filenameList | |
|
471 | ||
|
472 | self.online = online | |
|
473 | self.delay = delay | |
|
474 | ext = ext.lower() | |
|
475 | self.ext = ext | |
|
476 | ||
|
477 | if not(self.setNextFile()): | |
|
478 | if (startDate!=None) and (endDate!=None): | |
|
479 | print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime()) | |
|
480 | elif startDate != None: | |
|
481 | print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime()) | |
|
482 | else: | |
|
483 | print "No files" | |
|
484 | ||
|
485 | sys.exit(-1) | |
|
486 | ||
|
487 | # self.updateDataHeader() | |
|
488 | ||
|
489 | return self.dataOutObj | |
|
490 | ||
|
491 | def __setNextFileOffline(self): | |
|
492 | ||
|
493 | idFile = self.fileIndex | |
|
494 | ||
|
495 | while (True): | |
|
496 | idFile += 1 | |
|
497 | if not(idFile < len(self.filenameList)): | |
|
498 | self.flagNoMoreFiles = 1 | |
|
499 | print "No more Files" | |
|
500 | return 0 | |
|
501 | ||
|
502 | filename = self.filenameList[idFile] | |
|
503 | ||
|
504 | if not(self.__verifyFile(filename)): | |
|
505 | continue | |
|
506 | ||
|
507 | fileSize = os.path.getsize(filename) | |
|
508 | fp = open(filename,'rb') | |
|
509 | break | |
|
510 | ||
|
511 | self.flagIsNewFile = 1 | |
|
512 | self.fileIndex = idFile | |
|
513 | self.filename = filename | |
|
514 | self.fileSize = fileSize | |
|
515 | self.fp = fp | |
|
516 | ||
|
517 | print "Setting the file: %s"%self.filename | |
|
518 | ||
|
519 | return 1 | |
|
520 | ||
|
521 | def __setNextFileOnline(self): | |
|
522 | """ | |
|
523 | Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si | |
|
524 | no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files | |
|
525 | siguientes. | |
|
526 | ||
|
527 | Affected: | |
|
528 | self.flagIsNewFile | |
|
529 | self.filename | |
|
530 | self.fileSize | |
|
531 | self.fp | |
|
532 | self.set | |
|
533 | self.flagNoMoreFiles | |
|
534 | ||
|
535 | Return: | |
|
536 | 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado | |
|
537 | 1 : si el file fue abierto con exito y esta listo a ser leido | |
|
538 | ||
|
539 | Excepciones: | |
|
540 | Si un determinado file no puede ser abierto | |
|
541 | """ | |
|
542 | nFiles = 0 | |
|
543 | fileOk_flag = False | |
|
544 | firstTime_flag = True | |
|
545 | ||
|
546 | self.set += 1 | |
|
547 | ||
|
548 | #busca el 1er file disponible | |
|
549 | file, filename = checkForRealPath( self.path, self.year, self.doy, self.set, self.ext ) | |
|
550 | if file: | |
|
551 | if self.__verifyFile(file, False): | |
|
552 | fileOk_flag = True | |
|
553 | ||
|
554 | #si no encuentra un file entonces espera y vuelve a buscar | |
|
555 | if not(fileOk_flag): | |
|
556 | for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles | |
|
557 | ||
|
558 | if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces | |
|
559 | tries = self.nTries | |
|
560 | else: | |
|
561 | tries = 1 #si no es la 1era vez entonces solo lo hace una vez | |
|
562 | ||
|
563 | for nTries in range( tries ): | |
|
564 | if firstTime_flag: | |
|
565 | print "\tWaiting %0.2f sec for the file \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 ) | |
|
566 | time.sleep( self.delay ) | |
|
567 | else: | |
|
568 | print "\tSearching next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext) | |
|
569 | ||
|
570 | file, filename = checkForRealPath( self.path, self.year, self.doy, self.set, self.ext ) | |
|
571 | if file: | |
|
572 | if self.__verifyFile(file): | |
|
573 | fileOk_flag = True | |
|
574 | break | |
|
575 | ||
|
576 | if fileOk_flag: | |
|
577 | break | |
|
578 | ||
|
579 | firstTime_flag = False | |
|
580 | ||
|
581 | print "\tSkipping the file \"%s\" due to this file doesn't exist" % filename | |
|
582 | self.set += 1 | |
|
583 | ||
|
584 | if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta | |
|
585 | self.set = 0 | |
|
586 | self.doy += 1 | |
|
587 | ||
|
588 | if fileOk_flag: | |
|
589 | self.fileSize = os.path.getsize( file ) | |
|
590 | self.filename = file | |
|
591 | self.flagIsNewFile = 1 | |
|
592 | if self.fp != None: self.fp.close() | |
|
593 | self.fp = open(file) | |
|
594 | self.flagNoMoreFiles = 0 | |
|
595 | print 'Setting the file: %s' % file | |
|
596 | else: | |
|
597 | self.fileSize = 0 | |
|
598 | self.filename = None | |
|
599 | self.flagIsNewFile = 0 | |
|
600 | self.fp = None | |
|
601 | self.flagNoMoreFiles = 1 | |
|
602 | print 'No more Files' | |
|
603 | ||
|
604 | return fileOk_flag | |
|
605 | ||
|
606 | ||
|
607 | def setNextFile(self): | |
|
608 | if self.fp != None: | |
|
609 | self.fp.close() | |
|
610 | ||
|
611 | if self.online: | |
|
612 | newFile = self.__setNextFileOnline() | |
|
613 | else: | |
|
614 | newFile = self.__setNextFileOffline() | |
|
615 | ||
|
616 | if not(newFile): | |
|
617 | return 0 | |
|
618 | ||
|
619 | self.__readFirstHeader() | |
|
620 | self.nReadBlocks = 0 | |
|
621 | return 1 | |
|
622 | ||
|
623 | def __setNewBlock(self): | |
|
624 | if self.fp == None: | |
|
625 | return 0 | |
|
626 | ||
|
627 | if self.flagIsNewFile: | |
|
628 | return 1 | |
|
629 | ||
|
630 | self.lastUTTime = self.basicHeaderObj.utc | |
|
631 | currentSize = self.fileSize - self.fp.tell() | |
|
632 | neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize | |
|
633 | ||
|
634 | if (currentSize >= neededSize): | |
|
635 | self.__rdBasicHeader() | |
|
636 | return 1 | |
|
637 | ||
|
638 | if not(self.setNextFile()): | |
|
639 | return 0 | |
|
640 | ||
|
641 | deltaTime = self.basicHeaderObj.utc - self.lastUTTime # | |
|
642 | ||
|
643 | self.flagTimeBlock = 0 | |
|
644 | ||
|
645 | if deltaTime > self.maxTimeStep: | |
|
646 | self.flagTimeBlock = 1 | |
|
647 | ||
|
648 | return 1 | |
|
649 | ||
|
650 | ||
|
651 | def readNextBlock(self): | |
|
652 | if not(self.__setNewBlock()): | |
|
653 | return 0 | |
|
654 | ||
|
655 | if not(self.readBlock()): | |
|
656 | return 0 | |
|
657 | ||
|
658 | return 1 | |
|
659 | ||
|
660 | def __rdProcessingHeader(self, fp=None): | |
|
661 | if fp == None: | |
|
662 | fp = self.fp | |
|
663 | ||
|
664 | self.processingHeaderObj.read(fp) | |
|
665 | ||
|
666 | def __rdRadarControllerHeader(self, fp=None): | |
|
667 | if fp == None: | |
|
668 | fp = self.fp | |
|
669 | ||
|
670 | self.radarControllerHeaderObj.read(fp) | |
|
671 | ||
|
672 | def __rdSystemHeader(self, fp=None): | |
|
673 | if fp == None: | |
|
674 | fp = self.fp | |
|
675 | ||
|
676 | self.systemHeaderObj.read(fp) | |
|
677 | ||
|
678 | def __rdBasicHeader(self, fp=None): | |
|
679 | if fp == None: | |
|
680 | fp = self.fp | |
|
681 | ||
|
682 | self.basicHeaderObj.read(fp) | |
|
683 | ||
|
684 | ||
|
685 | def __readFirstHeader(self): | |
|
686 | self.__rdBasicHeader() | |
|
687 | self.__rdSystemHeader() | |
|
688 | self.__rdRadarControllerHeader() | |
|
689 | self.__rdProcessingHeader() | |
|
690 | ||
|
691 | self.firstHeaderSize = self.basicHeaderObj.size | |
|
692 | ||
|
693 | datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR)) | |
|
694 | if datatype == 0: | |
|
695 | datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')]) | |
|
696 | elif datatype == 1: | |
|
697 | datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')]) | |
|
698 | elif datatype == 2: | |
|
699 | datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')]) | |
|
700 | elif datatype == 3: | |
|
701 | datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')]) | |
|
702 | elif datatype == 4: | |
|
703 | datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')]) | |
|
704 | elif datatype == 5: | |
|
705 | datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')]) | |
|
706 | else: | |
|
707 | raise ValueError, 'Data type was not defined' | |
|
708 | ||
|
709 | self.dtype = datatype_str | |
|
710 | self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c | |
|
711 | self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1) | |
|
712 | # self.dataOutObj.channelList = numpy.arange(self.systemHeaderObj.numChannels) | |
|
713 | # self.dataOutObj.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels) | |
|
714 | self.getBlockDimension() | |
|
715 | ||
|
716 | ||
|
717 | def __verifyFile(self, filename, msgFlag=True): | |
|
718 | msg = None | |
|
719 | try: | |
|
720 | fp = open(filename, 'rb') | |
|
721 | currentPosition = fp.tell() | |
|
722 | except: | |
|
723 | if msgFlag: | |
|
724 | print "The file %s can't be opened" % (filename) | |
|
725 | return False | |
|
726 | ||
|
727 | neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize | |
|
728 | ||
|
729 | if neededSize == 0: | |
|
730 | basicHeaderObj = BasicHeader() | |
|
731 | systemHeaderObj = SystemHeader() | |
|
732 | radarControllerHeaderObj = RadarControllerHeader() | |
|
733 | processingHeaderObj = ProcessingHeader() | |
|
734 | ||
|
735 | try: | |
|
736 | if not( basicHeaderObj.read(fp) ): raise ValueError | |
|
737 | if not( systemHeaderObj.read(fp) ): raise ValueError | |
|
738 | if not( radarControllerHeaderObj.read(fp) ): raise ValueError | |
|
739 | if not( processingHeaderObj.read(fp) ): raise ValueError | |
|
740 | data_type = int(numpy.log2((processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR)) | |
|
741 | ||
|
742 | neededSize = processingHeaderObj.blockSize + basicHeaderObj.size | |
|
743 | ||
|
744 | except: | |
|
745 | if msgFlag: | |
|
746 | print "\tThe file %s is empty or it hasn't enough data" % filename | |
|
747 | ||
|
748 | fp.close() | |
|
749 | return False | |
|
750 | else: | |
|
751 | msg = "\tSkipping the file %s due to it hasn't enough data" %filename | |
|
752 | ||
|
753 | fp.close() | |
|
754 | fileSize = os.path.getsize(filename) | |
|
755 | currentSize = fileSize - currentPosition | |
|
756 | if currentSize < neededSize: | |
|
757 | if msgFlag and (msg != None): | |
|
758 | print msg #print"\tSkipping the file %s due to it hasn't enough data" %filename | |
|
759 | return False | |
|
760 | ||
|
761 | return True | |
|
762 | ||
|
763 | def getData(): | |
|
764 | pass | |
|
765 | ||
|
766 | def hasNotDataInBuffer(): | |
|
767 | pass | |
|
768 | ||
|
769 | def readBlock(): | |
|
770 | pass | |
|
771 | ||
|
772 | class JRODataWriter(JRODataIO): | |
|
773 | ||
|
774 | """ | |
|
775 | Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura | |
|
776 | de los datos siempre se realiza por bloques. | |
|
777 | """ | |
|
778 | ||
|
779 | blockIndex = 0 | |
|
780 | ||
|
781 | path = None | |
|
782 | ||
|
783 | setFile = None | |
|
784 | ||
|
785 | profilesPerBlock = None | |
|
786 | ||
|
787 | blocksPerFile = None | |
|
788 | ||
|
789 | nWriteBlocks = 0 | |
|
790 | ||
|
791 | isConfig = False | |
|
792 | ||
|
793 | def __init__(self, dataOutObj=None): | |
|
794 | raise ValueError, "Not implemented" | |
|
795 | ||
|
796 | ||
|
797 | def hasAllDataInBuffer(self): | |
|
798 | raise ValueError, "Not implemented" | |
|
799 | ||
|
800 | ||
|
801 | def setBlockDimension(self): | |
|
802 | raise ValueError, "Not implemented" | |
|
803 | ||
|
804 | ||
|
805 | def writeBlock(self): | |
|
806 | raise ValueError, "No implemented" | |
|
807 | ||
|
808 | ||
|
809 | def putData(self): | |
|
810 | raise ValueError, "No implemented" | |
|
811 | ||
|
812 | def getDataHeader(self): | |
|
813 | """ | |
|
814 | Obtiene una copia del First Header | |
|
815 | ||
|
816 | Affected: | |
|
817 | ||
|
818 | self.basicHeaderObj | |
|
819 | self.systemHeaderObj | |
|
820 | self.radarControllerHeaderObj | |
|
821 | self.processingHeaderObj self. | |
|
822 | ||
|
823 | Return: | |
|
824 | None | |
|
825 | """ | |
|
826 | ||
|
827 | raise ValueError, "No implemented" | |
|
828 | ||
|
829 | def getBasicHeader(self): | |
|
830 | ||
|
831 | self.basicHeaderObj.size = self.basicHeaderSize #bytes | |
|
832 | self.basicHeaderObj.version = self.versionFile | |
|
833 | self.basicHeaderObj.dataBlock = self.nTotalBlocks | |
|
834 | ||
|
835 | utc = numpy.floor(self.dataOutObj.utctime) | |
|
836 | milisecond = (self.dataOutObj.utctime - utc)* 1000.0 | |
|
837 | ||
|
838 | self.basicHeaderObj.utc = utc | |
|
839 | self.basicHeaderObj.miliSecond = milisecond | |
|
840 | self.basicHeaderObj.timeZone = 0 | |
|
841 | self.basicHeaderObj.dstFlag = 0 | |
|
842 | self.basicHeaderObj.errorCount = 0 | |
|
843 | ||
|
844 | def __writeFirstHeader(self): | |
|
845 | """ | |
|
846 | Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader) | |
|
847 | ||
|
848 | Affected: | |
|
849 | __dataType | |
|
850 | ||
|
851 | Return: | |
|
852 | None | |
|
853 | """ | |
|
854 | ||
|
855 | # CALCULAR PARAMETROS | |
|
856 | ||
|
857 | sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size | |
|
858 | self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader | |
|
859 | ||
|
860 | self.basicHeaderObj.write(self.fp) | |
|
861 | self.systemHeaderObj.write(self.fp) | |
|
862 | self.radarControllerHeaderObj.write(self.fp) | |
|
863 | self.processingHeaderObj.write(self.fp) | |
|
864 | ||
|
865 | self.dtype = self.dataOutObj.dtype | |
|
866 | ||
|
867 | def __setNewBlock(self): | |
|
868 | """ | |
|
869 | Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header | |
|
870 | ||
|
871 | Return: | |
|
872 | 0 : si no pudo escribir nada | |
|
873 | 1 : Si escribio el Basic el First Header | |
|
874 | """ | |
|
875 | if self.fp == None: | |
|
876 | self.setNextFile() | |
|
877 | ||
|
878 | if self.flagIsNewFile: | |
|
879 | return 1 | |
|
880 | ||
|
881 | if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile: | |
|
882 | self.basicHeaderObj.write(self.fp) | |
|
883 | return 1 | |
|
884 | ||
|
885 | if not( self.setNextFile() ): | |
|
886 | return 0 | |
|
887 | ||
|
888 | return 1 | |
|
889 | ||
|
890 | ||
|
891 | def writeNextBlock(self): | |
|
892 | """ | |
|
893 | Selecciona el bloque siguiente de datos y los escribe en un file | |
|
894 | ||
|
895 | Return: | |
|
896 | 0 : Si no hizo pudo escribir el bloque de datos | |
|
897 | 1 : Si no pudo escribir el bloque de datos | |
|
898 | """ | |
|
899 | if not( self.__setNewBlock() ): | |
|
900 | return 0 | |
|
901 | ||
|
902 | self.writeBlock() | |
|
903 | ||
|
904 | return 1 | |
|
905 | ||
|
906 | def setNextFile(self): | |
|
907 | """ | |
|
908 | Determina el siguiente file que sera escrito | |
|
909 | ||
|
910 | Affected: | |
|
911 | self.filename | |
|
912 | self.subfolder | |
|
913 | self.fp | |
|
914 | self.setFile | |
|
915 | self.flagIsNewFile | |
|
916 | ||
|
917 | Return: | |
|
918 | 0 : Si el archivo no puede ser escrito | |
|
919 | 1 : Si el archivo esta listo para ser escrito | |
|
920 | """ | |
|
921 | ext = self.ext | |
|
922 | path = self.path | |
|
923 | ||
|
924 | if self.fp != None: | |
|
925 | self.fp.close() | |
|
926 | ||
|
927 | timeTuple = time.localtime( self.dataOutObj.dataUtcTime) | |
|
928 | subfolder = 'D%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday) | |
|
929 | ||
|
930 | doypath = os.path.join( path, subfolder ) | |
|
931 | if not( os.path.exists(doypath) ): | |
|
932 | os.mkdir(doypath) | |
|
933 | self.setFile = -1 #inicializo mi contador de seteo | |
|
934 | else: | |
|
935 | filesList = os.listdir( doypath ) | |
|
936 | if len( filesList ) > 0: | |
|
937 | filesList = sorted( filesList, key=str.lower ) | |
|
938 | filen = filesList[-1] | |
|
939 | # el filename debera tener el siguiente formato | |
|
940 | # 0 1234 567 89A BCDE (hex) | |
|
941 | # x YYYY DDD SSS .ext | |
|
942 | if isNumber( filen[8:11] ): | |
|
943 | self.setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file | |
|
944 | else: | |
|
945 | self.setFile = -1 | |
|
946 | else: | |
|
947 | self.setFile = -1 #inicializo mi contador de seteo | |
|
948 | ||
|
949 | setFile = self.setFile | |
|
950 | setFile += 1 | |
|
951 | ||
|
952 | file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar, | |
|
953 | timeTuple.tm_year, | |
|
954 | timeTuple.tm_yday, | |
|
955 | setFile, | |
|
956 | ext ) | |
|
957 | ||
|
958 | filename = os.path.join( path, subfolder, file ) | |
|
959 | ||
|
960 | fp = open( filename,'wb' ) | |
|
961 | ||
|
962 | self.blockIndex = 0 | |
|
963 | ||
|
964 | #guardando atributos | |
|
965 | self.filename = filename | |
|
966 | self.subfolder = subfolder | |
|
967 | self.fp = fp | |
|
968 | self.setFile = setFile | |
|
969 | self.flagIsNewFile = 1 | |
|
970 | ||
|
971 | self.getDataHeader() | |
|
972 | ||
|
973 | print 'Writing the file: %s'%self.filename | |
|
974 | ||
|
975 | self.__writeFirstHeader() | |
|
976 | ||
|
977 | return 1 | |
|
978 | ||
|
979 | def setup(self, path, blocksPerFile, profilesPerBlock=None, set=0, ext=None): | |
|
980 | """ | |
|
981 | Setea el tipo de formato en la cual sera guardada la data y escribe el First Header | |
|
982 | ||
|
983 | Inputs: | |
|
984 | path : el path destino en el cual se escribiran los files a crear | |
|
985 | format : formato en el cual sera salvado un file | |
|
986 | set : el setebo del file | |
|
987 | ||
|
988 | Return: | |
|
989 | 0 : Si no realizo un buen seteo | |
|
990 | 1 : Si realizo un buen seteo | |
|
991 | """ | |
|
992 | ||
|
993 | if ext == None: | |
|
994 | ext = self.ext | |
|
995 | ||
|
996 | ext = ext.lower() | |
|
997 | ||
|
998 | self.ext = ext | |
|
999 | ||
|
1000 | self.path = path | |
|
1001 | ||
|
1002 | self.setFile = set - 1 | |
|
1003 | ||
|
1004 | self.blocksPerFile = blocksPerFile | |
|
1005 | ||
|
1006 | self.profilesPerBlock = profilesPerBlock | |
|
1007 | ||
|
1008 | if not(self.setNextFile()): | |
|
1009 | print "There isn't a next file" | |
|
1010 | return 0 | |
|
1011 | ||
|
1012 | self.setBlockDimension() | |
|
1013 | ||
|
1014 | return 1 | |
|
1015 | ||
|
1016 | def run(self, dataOut, **kwargs): | |
|
1017 | ||
|
1018 | if not(self.isConfig): | |
|
1019 | ||
|
1020 | self.dataOutObj = dataOut | |
|
1021 | self.setup(**kwargs) | |
|
1022 | ||
|
1023 | self.putData() | |
|
1024 | ||
|
1025 | class VoltageReader(JRODataReader): | |
|
1026 | """ | |
|
1027 | Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura | |
|
1028 | de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones: | |
|
1029 | perfiles*alturas*canales) son almacenados en la variable "buffer". | |
|
1030 | ||
|
1031 | perfiles * alturas * canales | |
|
1032 | ||
|
1033 | Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader, | |
|
1034 | RadarControllerHeader y Voltage. Los tres primeros se usan para almacenar informacion de la | |
|
1035 | cabecera de datos (metadata), y el cuarto (Voltage) para obtener y almacenar un perfil de | |
|
1036 | datos desde el "buffer" cada vez que se ejecute el metodo "getData". | |
|
1037 | ||
|
1038 | Example: | |
|
1039 | ||
|
1040 | dpath = "/home/myuser/data" | |
|
1041 | ||
|
1042 | startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0) | |
|
1043 | ||
|
1044 | endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0) | |
|
1045 | ||
|
1046 | readerObj = VoltageReader() | |
|
1047 | ||
|
1048 | readerObj.setup(dpath, startTime, endTime) | |
|
1049 | ||
|
1050 | while(True): | |
|
1051 | ||
|
1052 | #to get one profile | |
|
1053 | profile = readerObj.getData() | |
|
1054 | ||
|
1055 | #print the profile | |
|
1056 | print profile | |
|
1057 | ||
|
1058 | #If you want to see all datablock | |
|
1059 | print readerObj.datablock | |
|
1060 | ||
|
1061 | if readerObj.flagNoMoreFiles: | |
|
1062 | break | |
|
1063 | ||
|
1064 | """ | |
|
1065 | ||
|
1066 | ext = ".r" | |
|
1067 | ||
|
1068 | optchar = "D" | |
|
1069 | dataOutObj = None | |
|
1070 | ||
|
1071 | ||
|
1072 | def __init__(self, dataOutObj=None): | |
|
1073 | """ | |
|
1074 | Inicializador de la clase VoltageReader para la lectura de datos de voltage. | |
|
1075 | ||
|
1076 | Input: | |
|
1077 | dataOutObj : Objeto de la clase Voltage. Este objeto sera utilizado para | |
|
1078 | almacenar un perfil de datos cada vez que se haga un requerimiento | |
|
1079 | (getData). El perfil sera obtenido a partir del buffer de datos, | |
|
1080 | si el buffer esta vacio se hara un nuevo proceso de lectura de un | |
|
1081 | bloque de datos. | |
|
1082 | Si este parametro no es pasado se creara uno internamente. | |
|
1083 | ||
|
1084 | Variables afectadas: | |
|
1085 | self.dataOutObj | |
|
1086 | ||
|
1087 | Return: | |
|
1088 | None | |
|
1089 | """ | |
|
1090 | ||
|
1091 | self.datablock = None | |
|
1092 | ||
|
1093 | self.utc = 0 | |
|
1094 | ||
|
1095 | self.ext = ".r" | |
|
1096 | ||
|
1097 | self.optchar = "D" | |
|
1098 | ||
|
1099 | self.basicHeaderObj = BasicHeader() | |
|
1100 | ||
|
1101 | self.systemHeaderObj = SystemHeader() | |
|
1102 | ||
|
1103 | self.radarControllerHeaderObj = RadarControllerHeader() | |
|
1104 | ||
|
1105 | self.processingHeaderObj = ProcessingHeader() | |
|
1106 | ||
|
1107 | self.online = 0 | |
|
1108 | ||
|
1109 | self.fp = None | |
|
1110 | ||
|
1111 | self.idFile = None | |
|
1112 | ||
|
1113 | self.dtype = None | |
|
1114 | ||
|
1115 | self.fileSizeByHeader = None | |
|
1116 | ||
|
1117 | self.filenameList = [] | |
|
1118 | ||
|
1119 | self.filename = None | |
|
1120 | ||
|
1121 | self.fileSize = None | |
|
1122 | ||
|
1123 | self.firstHeaderSize = 0 | |
|
1124 | ||
|
1125 | self.basicHeaderSize = 24 | |
|
1126 | ||
|
1127 | self.pathList = [] | |
|
1128 | ||
|
1129 | self.filenameList = [] | |
|
1130 | ||
|
1131 | self.lastUTTime = 0 | |
|
1132 | ||
|
1133 | self.maxTimeStep = 30 | |
|
1134 | ||
|
1135 | self.flagNoMoreFiles = 0 | |
|
1136 | ||
|
1137 | self.set = 0 | |
|
1138 | ||
|
1139 | self.path = None | |
|
1140 | ||
|
1141 | self.profileIndex = 9999 | |
|
1142 | ||
|
1143 | self.delay = 3 #seconds | |
|
1144 | ||
|
1145 | self.nTries = 3 #quantity tries | |
|
1146 | ||
|
1147 | self.nFiles = 3 #number of files for searching | |
|
1148 | ||
|
1149 | self.nReadBlocks = 0 | |
|
1150 | ||
|
1151 | self.flagIsNewFile = 1 | |
|
1152 | ||
|
1153 | self.ippSeconds = 0 | |
|
1154 | ||
|
1155 | self.flagTimeBlock = 0 | |
|
1156 | ||
|
1157 | self.flagIsNewBlock = 0 | |
|
1158 | ||
|
1159 | self.nTotalBlocks = 0 | |
|
1160 | ||
|
1161 | self.blocksize = 0 | |
|
1162 | ||
|
1163 | def createObjByDefault(self): | |
|
1164 | ||
|
1165 | dataObj = Voltage() | |
|
1166 | ||
|
1167 | return dataObj | |
|
1168 | ||
|
1169 | def __hasNotDataInBuffer(self): | |
|
1170 | if self.profileIndex >= self.processingHeaderObj.profilesPerBlock: | |
|
1171 | return 1 | |
|
1172 | return 0 | |
|
1173 | ||
|
1174 | ||
|
1175 | def getBlockDimension(self): | |
|
1176 | """ | |
|
1177 | Obtiene la cantidad de puntos a leer por cada bloque de datos | |
|
1178 | ||
|
1179 | Affected: | |
|
1180 | self.blocksize | |
|
1181 | ||
|
1182 | Return: | |
|
1183 | None | |
|
1184 | """ | |
|
1185 | pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels | |
|
1186 | self.blocksize = pts2read | |
|
1187 | ||
|
1188 | ||
|
1189 | def readBlock(self): | |
|
1190 | """ | |
|
1191 | readBlock lee el bloque de datos desde la posicion actual del puntero del archivo | |
|
1192 | (self.fp) y actualiza todos los parametros relacionados al bloque de datos | |
|
1193 | (metadata + data). La data leida es almacenada en el buffer y el contador del buffer | |
|
1194 | es seteado a 0 | |
|
1195 | ||
|
1196 | Inputs: | |
|
1197 | None | |
|
1198 | ||
|
1199 | Return: | |
|
1200 | None | |
|
1201 | ||
|
1202 | Affected: | |
|
1203 | self.profileIndex | |
|
1204 | self.datablock | |
|
1205 | self.flagIsNewFile | |
|
1206 | self.flagIsNewBlock | |
|
1207 | self.nTotalBlocks | |
|
1208 | ||
|
1209 | Exceptions: | |
|
1210 | Si un bloque leido no es un bloque valido | |
|
1211 | """ | |
|
1212 | ||
|
1213 | junk = numpy.fromfile( self.fp, self.dtype, self.blocksize ) | |
|
1214 | ||
|
1215 | try: | |
|
1216 | junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) ) | |
|
1217 | except: | |
|
1218 | print "The read block (%3d) has not enough data" %self.nReadBlocks | |
|
1219 | return 0 | |
|
1220 | ||
|
1221 | junk = numpy.transpose(junk, (2,0,1)) | |
|
1222 | self.datablock = junk['real'] + junk['imag']*1j | |
|
1223 | ||
|
1224 | self.profileIndex = 0 | |
|
1225 | ||
|
1226 | self.flagIsNewFile = 0 | |
|
1227 | self.flagIsNewBlock = 1 | |
|
1228 | ||
|
1229 | self.nTotalBlocks += 1 | |
|
1230 | self.nReadBlocks += 1 | |
|
1231 | ||
|
1232 | return 1 | |
|
1233 | ||
|
1234 | ||
|
1235 | def getData(self): | |
|
1236 | """ | |
|
1237 | getData obtiene una unidad de datos del buffer de lectura y la copia a la clase "Voltage" | |
|
1238 | con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de | |
|
1239 | lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock" | |
|
1240 | ||
|
1241 | Ademas incrementa el contador del buffer en 1. | |
|
1242 | ||
|
1243 | Return: | |
|
1244 | data : retorna un perfil de voltages (alturas * canales) copiados desde el | |
|
1245 | buffer. Si no hay mas archivos a leer retorna None. | |
|
1246 | ||
|
1247 | Variables afectadas: | |
|
1248 | self.dataOutObj | |
|
1249 | self.profileIndex | |
|
1250 | ||
|
1251 | Affected: | |
|
1252 | self.dataOutObj | |
|
1253 | self.profileIndex | |
|
1254 | self.flagTimeBlock | |
|
1255 | self.flagIsNewBlock | |
|
1256 | """ | |
|
1257 | if self.flagNoMoreFiles: return 0 | |
|
1258 | ||
|
1259 | self.flagTimeBlock = 0 | |
|
1260 | self.flagIsNewBlock = 0 | |
|
1261 | ||
|
1262 | if self.__hasNotDataInBuffer(): | |
|
1263 | ||
|
1264 | if not( self.readNextBlock() ): | |
|
1265 | return 0 | |
|
1266 | ||
|
1267 | # self.updateDataHeader() | |
|
1268 | ||
|
1269 | if self.flagNoMoreFiles == 1: | |
|
1270 | print 'Process finished' | |
|
1271 | return 0 | |
|
1272 | ||
|
1273 | #data es un numpy array de 3 dmensiones (perfiles, alturas y canales) | |
|
1274 | ||
|
1275 | if self.datablock == None: | |
|
1276 | self.dataOutObj.flagNoData = True | |
|
1277 | return 0 | |
|
1278 | ||
|
1279 | self.dataOutObj.data = self.datablock[:,self.profileIndex,:] | |
|
1280 | ||
|
1281 | self.dataOutObj.dtype = self.dtype | |
|
1282 | ||
|
1283 | self.dataOutObj.nChannels = self.systemHeaderObj.nChannels | |
|
1284 | ||
|
1285 | self.dataOutObj.nHeights = self.processingHeaderObj.nHeights | |
|
1286 | ||
|
1287 | self.dataOutObj.nProfiles = self.processingHeaderObj.profilesPerBlock | |
|
1288 | ||
|
1289 | xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight | |
|
1290 | ||
|
1291 | self.dataOutObj.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight) | |
|
1292 | ||
|
1293 | self.dataOutObj.channelList = range(self.systemHeaderObj.nChannels) | |
|
1294 | ||
|
1295 | self.dataOutObj.channelIndexList = range(self.systemHeaderObj.nChannels) | |
|
1296 | ||
|
1297 | self.dataOutObj.flagTimeBlock = self.flagTimeBlock | |
|
1298 | ||
|
1299 | self.dataOutObj.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.ippSeconds | |
|
1300 | ||
|
1301 | self.dataOutObj.ippSeconds = self.ippSeconds | |
|
1302 | ||
|
1303 | self.dataOutObj.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt | |
|
1304 | ||
|
1305 | self.dataOutObj.nCohInt = self.processingHeaderObj.nCohInt | |
|
1306 | ||
|
1307 | self.dataOutObj.flagShiftFFT = False | |
|
1308 | ||
|
1309 | if self.processingHeaderObj.code != None: | |
|
1310 | self.dataOutObj.nCode = self.processingHeaderObj.nCode | |
|
1311 | ||
|
1312 | self.dataOutObj.nBaud = self.processingHeaderObj.nBaud | |
|
1313 | ||
|
1314 | self.dataOutObj.code = self.processingHeaderObj.code | |
|
1315 | ||
|
1316 | self.profileIndex += 1 | |
|
1317 | ||
|
1318 | self.dataOutObj.systemHeaderObj = self.systemHeaderObj.copy() | |
|
1319 | ||
|
1320 | self.dataOutObj.radarControllerHeaderObj = self.radarControllerHeaderObj.copy() | |
|
1321 | ||
|
1322 | self.dataOutObj.flagNoData = False | |
|
1323 | ||
|
1324 | # print self.profileIndex, self.dataOutObj.utctime | |
|
1325 | # if self.profileIndex == 800: | |
|
1326 | # a=1 | |
|
1327 | ||
|
1328 | return self.dataOutObj.data | |
|
1329 | ||
|
1330 | ||
|
1331 | class VoltageWriter(JRODataWriter): | |
|
1332 | """ | |
|
1333 | Esta clase permite escribir datos de voltajes a archivos procesados (.r). La escritura | |
|
1334 | de los datos siempre se realiza por bloques. | |
|
1335 | """ | |
|
1336 | ||
|
1337 | ext = ".r" | |
|
1338 | ||
|
1339 | optchar = "D" | |
|
1340 | ||
|
1341 | shapeBuffer = None | |
|
1342 | ||
|
1343 | ||
|
1344 | def __init__(self, dataOutObj=None): | |
|
1345 | """ | |
|
1346 | Inicializador de la clase VoltageWriter para la escritura de datos de espectros. | |
|
1347 | ||
|
1348 | Affected: | |
|
1349 | self.dataOutObj | |
|
1350 | ||
|
1351 | Return: None | |
|
1352 | """ | |
|
1353 | if dataOutObj == None: | |
|
1354 | dataOutObj = Voltage() | |
|
1355 | ||
|
1356 | if not( isinstance(dataOutObj, Voltage) ): | |
|
1357 | raise ValueError, "in VoltageReader, dataOutObj must be an Spectra class object" | |
|
1358 | ||
|
1359 | self.dataOutObj = dataOutObj | |
|
1360 | ||
|
1361 | self.nTotalBlocks = 0 | |
|
1362 | ||
|
1363 | self.profileIndex = 0 | |
|
1364 | ||
|
1365 | self.isConfig = False | |
|
1366 | ||
|
1367 | self.fp = None | |
|
1368 | ||
|
1369 | self.flagIsNewFile = 1 | |
|
1370 | ||
|
1371 | self.nTotalBlocks = 0 | |
|
1372 | ||
|
1373 | self.flagIsNewBlock = 0 | |
|
1374 | ||
|
1375 | self.flagNoMoreFiles = 0 | |
|
1376 | ||
|
1377 | self.setFile = None | |
|
1378 | ||
|
1379 | self.dtype = None | |
|
1380 | ||
|
1381 | self.path = None | |
|
1382 | ||
|
1383 | self.noMoreFiles = 0 | |
|
1384 | ||
|
1385 | self.filename = None | |
|
1386 | ||
|
1387 | self.basicHeaderObj = BasicHeader() | |
|
1388 | ||
|
1389 | self.systemHeaderObj = SystemHeader() | |
|
1390 | ||
|
1391 | self.radarControllerHeaderObj = RadarControllerHeader() | |
|
1392 | ||
|
1393 | self.processingHeaderObj = ProcessingHeader() | |
|
1394 | ||
|
1395 | def hasAllDataInBuffer(self): | |
|
1396 | if self.profileIndex >= self.processingHeaderObj.profilesPerBlock: | |
|
1397 | return 1 | |
|
1398 | return 0 | |
|
1399 | ||
|
1400 | ||
|
1401 | def setBlockDimension(self): | |
|
1402 | """ | |
|
1403 | Obtiene las formas dimensionales del los subbloques de datos que componen un bloque | |
|
1404 | ||
|
1405 | Affected: | |
|
1406 | self.shape_spc_Buffer | |
|
1407 | self.shape_cspc_Buffer | |
|
1408 | self.shape_dc_Buffer | |
|
1409 | ||
|
1410 | Return: None | |
|
1411 | """ | |
|
1412 | self.shapeBuffer = (self.processingHeaderObj.profilesPerBlock, | |
|
1413 | self.processingHeaderObj.nHeights, | |
|
1414 | self.systemHeaderObj.nChannels) | |
|
1415 | ||
|
1416 | self.datablock = numpy.zeros((self.systemHeaderObj.nChannels, | |
|
1417 | self.processingHeaderObj.profilesPerBlock, | |
|
1418 | self.processingHeaderObj.nHeights), | |
|
1419 | dtype=numpy.dtype('complex')) | |
|
1420 | ||
|
1421 | ||
|
1422 | def writeBlock(self): | |
|
1423 | """ | |
|
1424 | Escribe el buffer en el file designado | |
|
1425 | ||
|
1426 | Affected: | |
|
1427 | self.profileIndex | |
|
1428 | self.flagIsNewFile | |
|
1429 | self.flagIsNewBlock | |
|
1430 | self.nTotalBlocks | |
|
1431 | self.blockIndex | |
|
1432 | ||
|
1433 | Return: None | |
|
1434 | """ | |
|
1435 | data = numpy.zeros( self.shapeBuffer, self.dtype ) | |
|
1436 | ||
|
1437 | junk = numpy.transpose(self.datablock, (1,2,0)) | |
|
1438 | ||
|
1439 | data['real'] = junk.real | |
|
1440 | data['imag'] = junk.imag | |
|
1441 | ||
|
1442 | data = data.reshape( (-1) ) | |
|
1443 | ||
|
1444 | data.tofile( self.fp ) | |
|
1445 | ||
|
1446 | self.datablock.fill(0) | |
|
1447 | ||
|
1448 | self.profileIndex = 0 | |
|
1449 | self.flagIsNewFile = 0 | |
|
1450 | self.flagIsNewBlock = 1 | |
|
1451 | ||
|
1452 | self.blockIndex += 1 | |
|
1453 | self.nTotalBlocks += 1 | |
|
1454 | ||
|
1455 | def putData(self): | |
|
1456 | """ | |
|
1457 | Setea un bloque de datos y luego los escribe en un file | |
|
1458 | ||
|
1459 | Affected: | |
|
1460 | self.flagIsNewBlock | |
|
1461 | self.profileIndex | |
|
1462 | ||
|
1463 | Return: | |
|
1464 | 0 : Si no hay data o no hay mas files que puedan escribirse | |
|
1465 | 1 : Si se escribio la data de un bloque en un file | |
|
1466 | """ | |
|
1467 | if self.dataOutObj.flagNoData: | |
|
1468 | return 0 | |
|
1469 | ||
|
1470 | self.flagIsNewBlock = 0 | |
|
1471 | ||
|
1472 | if self.dataOutObj.flagTimeBlock: | |
|
1473 | ||
|
1474 | self.datablock.fill(0) | |
|
1475 | self.profileIndex = 0 | |
|
1476 | self.setNextFile() | |
|
1477 | ||
|
1478 | if self.profileIndex == 0: | |
|
1479 | self.getBasicHeader() | |
|
1480 | ||
|
1481 | self.datablock[:,self.profileIndex,:] = self.dataOutObj.data | |
|
1482 | ||
|
1483 | self.profileIndex += 1 | |
|
1484 | ||
|
1485 | if self.hasAllDataInBuffer(): | |
|
1486 | #if self.flagIsNewFile: | |
|
1487 | self.writeNextBlock() | |
|
1488 | # self.getDataHeader() | |
|
1489 | ||
|
1490 | if self.flagNoMoreFiles: | |
|
1491 | #print 'Process finished' | |
|
1492 | return 0 | |
|
1493 | ||
|
1494 | return 1 | |
|
1495 | ||
|
1496 | def __getProcessFlags(self): | |
|
1497 | ||
|
1498 | processFlags = 0 | |
|
1499 | ||
|
1500 | dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')]) | |
|
1501 | dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')]) | |
|
1502 | dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')]) | |
|
1503 | dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')]) | |
|
1504 | dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')]) | |
|
1505 | dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')]) | |
|
1506 | ||
|
1507 | dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5] | |
|
1508 | ||
|
1509 | ||
|
1510 | ||
|
1511 | datatypeValueList = [PROCFLAG.DATATYPE_CHAR, | |
|
1512 | PROCFLAG.DATATYPE_SHORT, | |
|
1513 | PROCFLAG.DATATYPE_LONG, | |
|
1514 | PROCFLAG.DATATYPE_INT64, | |
|
1515 | PROCFLAG.DATATYPE_FLOAT, | |
|
1516 | PROCFLAG.DATATYPE_DOUBLE] | |
|
1517 | ||
|
1518 | ||
|
1519 | for index in range(len(dtypeList)): | |
|
1520 | if self.dataOutObj.dtype == dtypeList[index]: | |
|
1521 | dtypeValue = datatypeValueList[index] | |
|
1522 | break | |
|
1523 | ||
|
1524 | processFlags += dtypeValue | |
|
1525 | ||
|
1526 | if self.dataOutObj.flagDecodeData: | |
|
1527 | processFlags += PROCFLAG.DECODE_DATA | |
|
1528 | ||
|
1529 | if self.dataOutObj.flagDeflipData: | |
|
1530 | processFlags += PROCFLAG.DEFLIP_DATA | |
|
1531 | ||
|
1532 | if self.dataOutObj.code != None: | |
|
1533 | processFlags += PROCFLAG.DEFINE_PROCESS_CODE | |
|
1534 | ||
|
1535 | if self.dataOutObj.nCohInt > 1: | |
|
1536 | processFlags += PROCFLAG.COHERENT_INTEGRATION | |
|
1537 | ||
|
1538 | return processFlags | |
|
1539 | ||
|
1540 | ||
|
1541 | def __getBlockSize(self): | |
|
1542 | ''' | |
|
1543 | Este metodos determina el cantidad de bytes para un bloque de datos de tipo Voltage | |
|
1544 | ''' | |
|
1545 | ||
|
1546 | dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')]) | |
|
1547 | dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')]) | |
|
1548 | dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')]) | |
|
1549 | dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')]) | |
|
1550 | dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')]) | |
|
1551 | dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')]) | |
|
1552 | ||
|
1553 | dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5] | |
|
1554 | datatypeValueList = [1,2,4,8,4,8] | |
|
1555 | for index in range(len(dtypeList)): | |
|
1556 | if self.dataOutObj.dtype == dtypeList[index]: | |
|
1557 | datatypeValue = datatypeValueList[index] | |
|
1558 | break | |
|
1559 | ||
|
1560 | blocksize = int(self.dataOutObj.nHeights * self.dataOutObj.nChannels * self.dataOutObj.nProfiles * datatypeValue * 2) | |
|
1561 | ||
|
1562 | return blocksize | |
|
1563 | ||
|
1564 | def getDataHeader(self): | |
|
1565 | ||
|
1566 | """ | |
|
1567 | Obtiene una copia del First Header | |
|
1568 | ||
|
1569 | Affected: | |
|
1570 | self.systemHeaderObj | |
|
1571 | self.radarControllerHeaderObj | |
|
1572 | self.dtype | |
|
1573 | ||
|
1574 | Return: | |
|
1575 | None | |
|
1576 | """ | |
|
1577 | ||
|
1578 | self.systemHeaderObj = self.dataOutObj.systemHeaderObj.copy() | |
|
1579 | self.systemHeaderObj.nChannels = self.dataOutObj.nChannels | |
|
1580 | self.radarControllerHeaderObj = self.dataOutObj.radarControllerHeaderObj.copy() | |
|
1581 | ||
|
1582 | self.getBasicHeader() | |
|
1583 | ||
|
1584 | processingHeaderSize = 40 # bytes | |
|
1585 | self.processingHeaderObj.dtype = 0 # Voltage | |
|
1586 | self.processingHeaderObj.blockSize = self.__getBlockSize() | |
|
1587 | self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock | |
|
1588 | self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile | |
|
1589 | self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOutObj.processingHeaderObj.nWindows | |
|
1590 | self.processingHeaderObj.processFlags = self.__getProcessFlags() | |
|
1591 | self.processingHeaderObj.nCohInt = self.dataOutObj.nCohInt | |
|
1592 | self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage | |
|
1593 | self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage | |
|
1594 | ||
|
1595 | if self.dataOutObj.code != None: | |
|
1596 | self.processingHeaderObj.code = self.dataOutObj.code | |
|
1597 | self.processingHeaderObj.nCode = self.dataOutObj.nCode | |
|
1598 | self.processingHeaderObj.nBaud = self.dataOutObj.nBaud | |
|
1599 | codesize = int(8 + 4 * self.dataOutObj.nCode * self.dataOutObj.nBaud) | |
|
1600 | processingHeaderSize += codesize | |
|
1601 | ||
|
1602 | if self.processingHeaderObj.nWindows != 0: | |
|
1603 | self.processingHeaderObj.firstHeight = self.dataOutObj.heightList[0] | |
|
1604 | self.processingHeaderObj.deltaHeight = self.dataOutObj.heightList[1] - self.dataOutObj.heightList[0] | |
|
1605 | self.processingHeaderObj.nHeights = self.dataOutObj.nHeights | |
|
1606 | self.processingHeaderObj.samplesWin = self.dataOutObj.nHeights | |
|
1607 | processingHeaderSize += 12 | |
|
1608 | ||
|
1609 | self.processingHeaderObj.size = processingHeaderSize | |
|
1610 | ||
|
1611 | class SpectraReader(JRODataReader): | |
|
1612 | """ | |
|
1613 | Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura | |
|
1614 | de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones) | |
|
1615 | son almacenados en tres buffer's para el Self Spectra, el Cross Spectra y el DC Channel. | |
|
1616 | ||
|
1617 | paresCanalesIguales * alturas * perfiles (Self Spectra) | |
|
1618 | paresCanalesDiferentes * alturas * perfiles (Cross Spectra) | |
|
1619 | canales * alturas (DC Channels) | |
|
1620 | ||
|
1621 | Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader, | |
|
1622 | RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la | |
|
1623 | cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de | |
|
1624 | datos desde el "buffer" cada vez que se ejecute el metodo "getData". | |
|
1625 | ||
|
1626 | Example: | |
|
1627 | dpath = "/home/myuser/data" | |
|
1628 | ||
|
1629 | startTime = datetime.datetime(2010,1,20,0,0,0,0,0,0) | |
|
1630 | ||
|
1631 | endTime = datetime.datetime(2010,1,21,23,59,59,0,0,0) | |
|
1632 | ||
|
1633 | readerObj = SpectraReader() | |
|
1634 | ||
|
1635 | readerObj.setup(dpath, startTime, endTime) | |
|
1636 | ||
|
1637 | while(True): | |
|
1638 | ||
|
1639 | readerObj.getData() | |
|
1640 | ||
|
1641 | print readerObj.data_spc | |
|
1642 | ||
|
1643 | print readerObj.data_cspc | |
|
1644 | ||
|
1645 | print readerObj.data_dc | |
|
1646 | ||
|
1647 | if readerObj.flagNoMoreFiles: | |
|
1648 | break | |
|
1649 | ||
|
1650 | """ | |
|
1651 | ||
|
1652 | pts2read_SelfSpectra = 0 | |
|
1653 | ||
|
1654 | pts2read_CrossSpectra = 0 | |
|
1655 | ||
|
1656 | pts2read_DCchannels = 0 | |
|
1657 | ||
|
1658 | ext = ".pdata" | |
|
1659 | ||
|
1660 | optchar = "P" | |
|
1661 | ||
|
1662 | dataOutObj = None | |
|
1663 | ||
|
1664 | nRdChannels = None | |
|
1665 | ||
|
1666 | nRdPairs = None | |
|
1667 | ||
|
1668 | rdPairList = [] | |
|
1669 | ||
|
1670 | ||
|
1671 | def __init__(self, dataOutObj=None): | |
|
1672 | """ | |
|
1673 | Inicializador de la clase SpectraReader para la lectura de datos de espectros. | |
|
1674 | ||
|
1675 | Inputs: | |
|
1676 | dataOutObj : Objeto de la clase Spectra. Este objeto sera utilizado para | |
|
1677 | almacenar un perfil de datos cada vez que se haga un requerimiento | |
|
1678 | (getData). El perfil sera obtenido a partir del buffer de datos, | |
|
1679 | si el buffer esta vacio se hara un nuevo proceso de lectura de un | |
|
1680 | bloque de datos. | |
|
1681 | Si este parametro no es pasado se creara uno internamente. | |
|
1682 | ||
|
1683 | Affected: | |
|
1684 | self.dataOutObj | |
|
1685 | ||
|
1686 | Return : None | |
|
1687 | """ | |
|
1688 | ||
|
1689 | self.pts2read_SelfSpectra = 0 | |
|
1690 | ||
|
1691 | self.pts2read_CrossSpectra = 0 | |
|
1692 | ||
|
1693 | self.pts2read_DCchannels = 0 | |
|
1694 | ||
|
1695 | self.datablock = None | |
|
1696 | ||
|
1697 | self.utc = None | |
|
1698 | ||
|
1699 | self.ext = ".pdata" | |
|
1700 | ||
|
1701 | self.optchar = "P" | |
|
1702 | ||
|
1703 | self.basicHeaderObj = BasicHeader() | |
|
1704 | ||
|
1705 | self.systemHeaderObj = SystemHeader() | |
|
1706 | ||
|
1707 | self.radarControllerHeaderObj = RadarControllerHeader() | |
|
1708 | ||
|
1709 | self.processingHeaderObj = ProcessingHeader() | |
|
1710 | ||
|
1711 | self.online = 0 | |
|
1712 | ||
|
1713 | self.fp = None | |
|
1714 | ||
|
1715 | self.idFile = None | |
|
1716 | ||
|
1717 | self.dtype = None | |
|
1718 | ||
|
1719 | self.fileSizeByHeader = None | |
|
1720 | ||
|
1721 | self.filenameList = [] | |
|
1722 | ||
|
1723 | self.filename = None | |
|
1724 | ||
|
1725 | self.fileSize = None | |
|
1726 | ||
|
1727 | self.firstHeaderSize = 0 | |
|
1728 | ||
|
1729 | self.basicHeaderSize = 24 | |
|
1730 | ||
|
1731 | self.pathList = [] | |
|
1732 | ||
|
1733 | self.lastUTTime = 0 | |
|
1734 | ||
|
1735 | self.maxTimeStep = 30 | |
|
1736 | ||
|
1737 | self.flagNoMoreFiles = 0 | |
|
1738 | ||
|
1739 | self.set = 0 | |
|
1740 | ||
|
1741 | self.path = None | |
|
1742 | ||
|
1743 | self.delay = 3 #seconds | |
|
1744 | ||
|
1745 | self.nTries = 3 #quantity tries | |
|
1746 | ||
|
1747 | self.nFiles = 3 #number of files for searching | |
|
1748 | ||
|
1749 | self.nReadBlocks = 0 | |
|
1750 | ||
|
1751 | self.flagIsNewFile = 1 | |
|
1752 | ||
|
1753 | self.ippSeconds = 0 | |
|
1754 | ||
|
1755 | self.flagTimeBlock = 0 | |
|
1756 | ||
|
1757 | self.flagIsNewBlock = 0 | |
|
1758 | ||
|
1759 | self.nTotalBlocks = 0 | |
|
1760 | ||
|
1761 | self.blocksize = 0 | |
|
1762 | ||
|
1763 | ||
|
1764 | def createObjByDefault(self): | |
|
1765 | ||
|
1766 | dataObj = Spectra() | |
|
1767 | ||
|
1768 | return dataObj | |
|
1769 | ||
|
1770 | def __hasNotDataInBuffer(self): | |
|
1771 | return 1 | |
|
1772 | ||
|
1773 | ||
|
1774 | def getBlockDimension(self): | |
|
1775 | """ | |
|
1776 | Obtiene la cantidad de puntos a leer por cada bloque de datos | |
|
1777 | ||
|
1778 | Affected: | |
|
1779 | self.nRdChannels | |
|
1780 | self.nRdPairs | |
|
1781 | self.pts2read_SelfSpectra | |
|
1782 | self.pts2read_CrossSpectra | |
|
1783 | self.pts2read_DCchannels | |
|
1784 | self.blocksize | |
|
1785 | self.dataOutObj.nChannels | |
|
1786 | self.dataOutObj.nPairs | |
|
1787 | ||
|
1788 | Return: | |
|
1789 | None | |
|
1790 | """ | |
|
1791 | self.nRdChannels = 0 | |
|
1792 | self.nRdPairs = 0 | |
|
1793 | self.rdPairList = [] | |
|
1794 | ||
|
1795 | for i in range(0, self.processingHeaderObj.totalSpectra*2, 2): | |
|
1796 | if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]: | |
|
1797 | self.nRdChannels = self.nRdChannels + 1 #par de canales iguales | |
|
1798 | else: | |
|
1799 | self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes | |
|
1800 | self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1])) | |
|
1801 | ||
|
1802 | pts2read = self.processingHeaderObj.nHeights * self.processingHeaderObj.profilesPerBlock | |
|
1803 | ||
|
1804 | self.pts2read_SelfSpectra = int(self.nRdChannels * pts2read) | |
|
1805 | self.blocksize = self.pts2read_SelfSpectra | |
|
1806 | ||
|
1807 | if self.processingHeaderObj.flag_cspc: | |
|
1808 | self.pts2read_CrossSpectra = int(self.nRdPairs * pts2read) | |
|
1809 | self.blocksize += self.pts2read_CrossSpectra | |
|
1810 | ||
|
1811 | if self.processingHeaderObj.flag_dc: | |
|
1812 | self.pts2read_DCchannels = int(self.systemHeaderObj.nChannels * self.processingHeaderObj.nHeights) | |
|
1813 | self.blocksize += self.pts2read_DCchannels | |
|
1814 | ||
|
1815 | # self.blocksize = self.pts2read_SelfSpectra + self.pts2read_CrossSpectra + self.pts2read_DCchannels | |
|
1816 | ||
|
1817 | ||
|
1818 | def readBlock(self): | |
|
1819 | """ | |
|
1820 | Lee el bloque de datos desde la posicion actual del puntero del archivo | |
|
1821 | (self.fp) y actualiza todos los parametros relacionados al bloque de datos | |
|
1822 | (metadata + data). La data leida es almacenada en el buffer y el contador del buffer | |
|
1823 | es seteado a 0 | |
|
1824 | ||
|
1825 | Return: None | |
|
1826 | ||
|
1827 | Variables afectadas: | |
|
1828 | ||
|
1829 | self.flagIsNewFile | |
|
1830 | self.flagIsNewBlock | |
|
1831 | self.nTotalBlocks | |
|
1832 | self.data_spc | |
|
1833 | self.data_cspc | |
|
1834 | self.data_dc | |
|
1835 | ||
|
1836 | Exceptions: | |
|
1837 | Si un bloque leido no es un bloque valido | |
|
1838 | """ | |
|
1839 | blockOk_flag = False | |
|
1840 | fpointer = self.fp.tell() | |
|
1841 | ||
|
1842 | spc = numpy.fromfile( self.fp, self.dtype[0], self.pts2read_SelfSpectra ) | |
|
1843 | spc = spc.reshape( (self.nRdChannels, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D | |
|
1844 | ||
|
1845 | if self.processingHeaderObj.flag_cspc: | |
|
1846 | cspc = numpy.fromfile( self.fp, self.dtype, self.pts2read_CrossSpectra ) | |
|
1847 | cspc = cspc.reshape( (self.nRdPairs, self.processingHeaderObj.nHeights, self.processingHeaderObj.profilesPerBlock) ) #transforma a un arreglo 3D | |
|
1848 | ||
|
1849 | if self.processingHeaderObj.flag_dc: | |
|
1850 | dc = numpy.fromfile( self.fp, self.dtype, self.pts2read_DCchannels ) #int(self.processingHeaderObj.nHeights*self.systemHeaderObj.nChannels) ) | |
|
1851 | dc = dc.reshape( (self.systemHeaderObj.nChannels, self.processingHeaderObj.nHeights) ) #transforma a un arreglo 2D | |
|
1852 | ||
|
1853 | ||
|
1854 | if not(self.processingHeaderObj.shif_fft): | |
|
1855 | spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones | |
|
1856 | ||
|
1857 | if self.processingHeaderObj.flag_cspc: | |
|
1858 | cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones | |
|
1859 | ||
|
1860 | ||
|
1861 | spc = numpy.transpose( spc, (0,2,1) ) | |
|
1862 | self.data_spc = spc | |
|
1863 | ||
|
1864 | if self.processingHeaderObj.flag_cspc: | |
|
1865 | cspc = numpy.transpose( cspc, (0,2,1) ) | |
|
1866 | self.data_cspc = cspc['real'] + cspc['imag']*1j | |
|
1867 | else: | |
|
1868 | self.data_cspc = None | |
|
1869 | ||
|
1870 | if self.processingHeaderObj.flag_dc: | |
|
1871 | self.data_dc = dc['real'] + dc['imag']*1j | |
|
1872 | else: | |
|
1873 | self.data_dc = None | |
|
1874 | ||
|
1875 | self.flagIsNewFile = 0 | |
|
1876 | self.flagIsNewBlock = 1 | |
|
1877 | ||
|
1878 | self.nTotalBlocks += 1 | |
|
1879 | self.nReadBlocks += 1 | |
|
1880 | ||
|
1881 | return 1 | |
|
1882 | ||
|
1883 | ||
|
1884 | def getData(self): | |
|
1885 | """ | |
|
1886 | Copia el buffer de lectura a la clase "Spectra", | |
|
1887 | con todos los parametros asociados a este (metadata). cuando no hay datos en el buffer de | |
|
1888 | lectura es necesario hacer una nueva lectura de los bloques de datos usando "readNextBlock" | |
|
1889 | ||
|
1890 | Return: | |
|
1891 | 0 : Si no hay mas archivos disponibles | |
|
1892 | 1 : Si hizo una buena copia del buffer | |
|
1893 | ||
|
1894 | Affected: | |
|
1895 | self.dataOutObj | |
|
1896 | ||
|
1897 | self.flagTimeBlock | |
|
1898 | self.flagIsNewBlock | |
|
1899 | """ | |
|
1900 | ||
|
1901 | if self.flagNoMoreFiles: return 0 | |
|
1902 | ||
|
1903 | self.flagTimeBlock = 0 | |
|
1904 | self.flagIsNewBlock = 0 | |
|
1905 | ||
|
1906 | if self.__hasNotDataInBuffer(): | |
|
1907 | ||
|
1908 | if not( self.readNextBlock() ): | |
|
1909 | return 0 | |
|
1910 | ||
|
1911 | # self.updateDataHeader() | |
|
1912 | ||
|
1913 | if self.flagNoMoreFiles == 1: | |
|
1914 | print 'Process finished' | |
|
1915 | return 0 | |
|
1916 | ||
|
1917 | #data es un numpy array de 3 dmensiones (perfiles, alturas y canales) | |
|
1918 | ||
|
1919 | if self.data_dc == None: | |
|
1920 | self.dataOutObj.flagNoData = True | |
|
1921 | return 0 | |
|
1922 | ||
|
1923 | ||
|
1924 | self.dataOutObj.data_spc = self.data_spc | |
|
1925 | ||
|
1926 | self.dataOutObj.data_cspc = self.data_cspc | |
|
1927 | ||
|
1928 | self.dataOutObj.data_dc = self.data_dc | |
|
1929 | ||
|
1930 | self.dataOutObj.flagTimeBlock = self.flagTimeBlock | |
|
1931 | ||
|
1932 | self.dataOutObj.flagNoData = False | |
|
1933 | ||
|
1934 | self.dataOutObj.dtype = self.dtype | |
|
1935 | ||
|
1936 | self.dataOutObj.nChannels = self.nRdChannels | |
|
1937 | ||
|
1938 | self.dataOutObj.nPairs = self.nRdPairs | |
|
1939 | ||
|
1940 | self.dataOutObj.pairsList = self.rdPairList | |
|
1941 | ||
|
1942 | self.dataOutObj.nHeights = self.processingHeaderObj.nHeights | |
|
1943 | ||
|
1944 | self.dataOutObj.nProfiles = self.processingHeaderObj.profilesPerBlock | |
|
1945 | ||
|
1946 | self.dataOutObj.nFFTPoints = self.processingHeaderObj.profilesPerBlock | |
|
1947 | ||
|
1948 | self.dataOutObj.nIncohInt = self.processingHeaderObj.nIncohInt | |
|
1949 | ||
|
1950 | ||
|
1951 | xf = self.processingHeaderObj.firstHeight + self.processingHeaderObj.nHeights*self.processingHeaderObj.deltaHeight | |
|
1952 | ||
|
1953 | self.dataOutObj.heightList = numpy.arange(self.processingHeaderObj.firstHeight, xf, self.processingHeaderObj.deltaHeight) | |
|
1954 | ||
|
1955 | self.dataOutObj.channelList = range(self.systemHeaderObj.nChannels) | |
|
1956 | ||
|
1957 | self.dataOutObj.channelIndexList = range(self.systemHeaderObj.nChannels) | |
|
1958 | ||
|
1959 | self.dataOutObj.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000.#+ self.profileIndex * self.ippSeconds | |
|
1960 | ||
|
1961 | self.dataOutObj.ippSeconds = self.ippSeconds | |
|
1962 | ||
|
1963 | self.dataOutObj.timeInterval = self.ippSeconds * self.processingHeaderObj.nCohInt * self.processingHeaderObj.nIncohInt * self.dataOutObj.nFFTPoints | |
|
1964 | ||
|
1965 | self.dataOutObj.flagShiftFFT = self.processingHeaderObj.shif_fft | |
|
1966 | ||
|
1967 | # self.profileIndex += 1 | |
|
1968 | ||
|
1969 | self.dataOutObj.systemHeaderObj = self.systemHeaderObj.copy() | |
|
1970 | ||
|
1971 | self.dataOutObj.radarControllerHeaderObj = self.radarControllerHeaderObj.copy() | |
|
1972 | ||
|
1973 | return self.dataOutObj.data_spc | |
|
1974 | ||
|
1975 | ||
|
1976 | class SpectraWriter(JRODataWriter): | |
|
1977 | ||
|
1978 | """ | |
|
1979 | Esta clase permite escribir datos de espectros a archivos procesados (.pdata). La escritura | |
|
1980 | de los datos siempre se realiza por bloques. | |
|
1981 | """ | |
|
1982 | ||
|
1983 | ext = ".pdata" | |
|
1984 | ||
|
1985 | optchar = "P" | |
|
1986 | ||
|
1987 | shape_spc_Buffer = None | |
|
1988 | ||
|
1989 | shape_cspc_Buffer = None | |
|
1990 | ||
|
1991 | shape_dc_Buffer = None | |
|
1992 | ||
|
1993 | data_spc = None | |
|
1994 | ||
|
1995 | data_cspc = None | |
|
1996 | ||
|
1997 | data_dc = None | |
|
1998 | ||
|
1999 | # dataOutObj = None | |
|
2000 | ||
|
2001 | def __init__(self, dataOutObj=None): | |
|
2002 | """ | |
|
2003 | Inicializador de la clase SpectraWriter para la escritura de datos de espectros. | |
|
2004 | ||
|
2005 | Affected: | |
|
2006 | self.dataOutObj | |
|
2007 | self.basicHeaderObj | |
|
2008 | self.systemHeaderObj | |
|
2009 | self.radarControllerHeaderObj | |
|
2010 | self.processingHeaderObj | |
|
2011 | ||
|
2012 | Return: None | |
|
2013 | """ | |
|
2014 | if dataOutObj == None: | |
|
2015 | dataOutObj = Spectra() | |
|
2016 | ||
|
2017 | if not( isinstance(dataOutObj, Spectra) ): | |
|
2018 | raise ValueError, "in SpectraReader, dataOutObj must be an Spectra class object" | |
|
2019 | ||
|
2020 | self.dataOutObj = dataOutObj | |
|
2021 | ||
|
2022 | self.nTotalBlocks = 0 | |
|
2023 | ||
|
2024 | self.data_spc = None | |
|
2025 | ||
|
2026 | self.data_cspc = None | |
|
2027 | ||
|
2028 | self.data_dc = None | |
|
2029 | ||
|
2030 | self.fp = None | |
|
2031 | ||
|
2032 | self.flagIsNewFile = 1 | |
|
2033 | ||
|
2034 | self.nTotalBlocks = 0 | |
|
2035 | ||
|
2036 | self.flagIsNewBlock = 0 | |
|
2037 | ||
|
2038 | self.flagNoMoreFiles = 0 | |
|
2039 | ||
|
2040 | self.setFile = None | |
|
2041 | ||
|
2042 | self.dtype = None | |
|
2043 | ||
|
2044 | self.path = None | |
|
2045 | ||
|
2046 | self.noMoreFiles = 0 | |
|
2047 | ||
|
2048 | self.filename = None | |
|
2049 | ||
|
2050 | self.basicHeaderObj = BasicHeader() | |
|
2051 | ||
|
2052 | self.systemHeaderObj = SystemHeader() | |
|
2053 | ||
|
2054 | self.radarControllerHeaderObj = RadarControllerHeader() | |
|
2055 | ||
|
2056 | self.processingHeaderObj = ProcessingHeader() | |
|
2057 | ||
|
2058 | ||
|
2059 | def hasAllDataInBuffer(self): | |
|
2060 | return 1 | |
|
2061 | ||
|
2062 | ||
|
2063 | def setBlockDimension(self): | |
|
2064 | """ | |
|
2065 | Obtiene las formas dimensionales del los subbloques de datos que componen un bloque | |
|
2066 | ||
|
2067 | Affected: | |
|
2068 | self.shape_spc_Buffer | |
|
2069 | self.shape_cspc_Buffer | |
|
2070 | self.shape_dc_Buffer | |
|
2071 | ||
|
2072 | Return: None | |
|
2073 | """ | |
|
2074 | self.shape_spc_Buffer = (self.dataOutObj.nChannels, | |
|
2075 | self.processingHeaderObj.nHeights, | |
|
2076 | self.processingHeaderObj.profilesPerBlock) | |
|
2077 | ||
|
2078 | self.shape_cspc_Buffer = (self.dataOutObj.nPairs, | |
|
2079 | self.processingHeaderObj.nHeights, | |
|
2080 | self.processingHeaderObj.profilesPerBlock) | |
|
2081 | ||
|
2082 | self.shape_dc_Buffer = (self.dataOutObj.nChannels, | |
|
2083 | self.processingHeaderObj.nHeights) | |
|
2084 | ||
|
2085 | ||
|
2086 | def writeBlock(self): | |
|
2087 | """ | |
|
2088 | Escribe el buffer en el file designado | |
|
2089 | ||
|
2090 | Affected: | |
|
2091 | self.data_spc | |
|
2092 | self.data_cspc | |
|
2093 | self.data_dc | |
|
2094 | self.flagIsNewFile | |
|
2095 | self.flagIsNewBlock | |
|
2096 | self.nTotalBlocks | |
|
2097 | self.nWriteBlocks | |
|
2098 | ||
|
2099 | Return: None | |
|
2100 | """ | |
|
2101 | ||
|
2102 | spc = numpy.transpose( self.data_spc, (0,2,1) ) | |
|
2103 | if not( self.processingHeaderObj.shif_fft ): | |
|
2104 | spc = numpy.roll( spc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones | |
|
2105 | data = spc.reshape((-1)) | |
|
2106 | data.tofile(self.fp) | |
|
2107 | ||
|
2108 | if self.data_cspc != None: | |
|
2109 | data = numpy.zeros( self.shape_cspc_Buffer, self.dtype ) | |
|
2110 | cspc = numpy.transpose( self.data_cspc, (0,2,1) ) | |
|
2111 | if not( self.processingHeaderObj.shif_fft ): | |
|
2112 | cspc = numpy.roll( cspc, self.processingHeaderObj.profilesPerBlock/2, axis=2 ) #desplaza a la derecha en el eje 2 determinadas posiciones | |
|
2113 | data['real'] = cspc.real | |
|
2114 | data['imag'] = cspc.imag | |
|
2115 | data = data.reshape((-1)) | |
|
2116 | data.tofile(self.fp) | |
|
2117 | ||
|
2118 | if self.data_dc != None: | |
|
2119 | data = numpy.zeros( self.shape_dc_Buffer, self.dtype ) | |
|
2120 | dc = self.data_dc | |
|
2121 | data['real'] = dc.real | |
|
2122 | data['imag'] = dc.imag | |
|
2123 | data = data.reshape((-1)) | |
|
2124 | data.tofile(self.fp) | |
|
2125 | ||
|
2126 | self.data_spc.fill(0) | |
|
2127 | self.data_dc.fill(0) | |
|
2128 | if self.data_cspc != None: | |
|
2129 | self.data_cspc.fill(0) | |
|
2130 | ||
|
2131 | self.flagIsNewFile = 0 | |
|
2132 | self.flagIsNewBlock = 1 | |
|
2133 | self.nTotalBlocks += 1 | |
|
2134 | self.nWriteBlocks += 1 | |
|
2135 | self.blockIndex += 1 | |
|
2136 | ||
|
2137 | ||
|
2138 | def putData(self): | |
|
2139 | """ | |
|
2140 | Setea un bloque de datos y luego los escribe en un file | |
|
2141 | ||
|
2142 | Affected: | |
|
2143 | self.data_spc | |
|
2144 | self.data_cspc | |
|
2145 | self.data_dc | |
|
2146 | ||
|
2147 | Return: | |
|
2148 | 0 : Si no hay data o no hay mas files que puedan escribirse | |
|
2149 | 1 : Si se escribio la data de un bloque en un file | |
|
2150 | """ | |
|
2151 | ||
|
2152 | if self.dataOutObj.flagNoData: | |
|
2153 | return 0 | |
|
2154 | ||
|
2155 | self.flagIsNewBlock = 0 | |
|
2156 | ||
|
2157 | if self.dataOutObj.flagTimeBlock: | |
|
2158 | self.data_spc.fill(0) | |
|
2159 | self.data_cspc.fill(0) | |
|
2160 | self.data_dc.fill(0) | |
|
2161 | self.setNextFile() | |
|
2162 | ||
|
2163 | if self.flagIsNewFile == 0: | |
|
2164 | self.getBasicHeader() | |
|
2165 | ||
|
2166 | self.data_spc = self.dataOutObj.data_spc | |
|
2167 | self.data_cspc = self.dataOutObj.data_cspc | |
|
2168 | self.data_dc = self.dataOutObj.data_dc | |
|
2169 | ||
|
2170 | # #self.processingHeaderObj.dataBlocksPerFile) | |
|
2171 | if self.hasAllDataInBuffer(): | |
|
2172 | # self.getDataHeader() | |
|
2173 | self.writeNextBlock() | |
|
2174 | ||
|
2175 | if self.flagNoMoreFiles: | |
|
2176 | #print 'Process finished' | |
|
2177 | return 0 | |
|
2178 | ||
|
2179 | return 1 | |
|
2180 | ||
|
2181 | ||
|
2182 | def __getProcessFlags(self): | |
|
2183 | ||
|
2184 | processFlags = 0 | |
|
2185 | ||
|
2186 | dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')]) | |
|
2187 | dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')]) | |
|
2188 | dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')]) | |
|
2189 | dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')]) | |
|
2190 | dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')]) | |
|
2191 | dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')]) | |
|
2192 | ||
|
2193 | dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5] | |
|
2194 | ||
|
2195 | ||
|
2196 | ||
|
2197 | datatypeValueList = [PROCFLAG.DATATYPE_CHAR, | |
|
2198 | PROCFLAG.DATATYPE_SHORT, | |
|
2199 | PROCFLAG.DATATYPE_LONG, | |
|
2200 | PROCFLAG.DATATYPE_INT64, | |
|
2201 | PROCFLAG.DATATYPE_FLOAT, | |
|
2202 | PROCFLAG.DATATYPE_DOUBLE] | |
|
2203 | ||
|
2204 | ||
|
2205 | for index in range(len(dtypeList)): | |
|
2206 | if self.dataOutObj.dtype == dtypeList[index]: | |
|
2207 | dtypeValue = datatypeValueList[index] | |
|
2208 | break | |
|
2209 | ||
|
2210 | processFlags += dtypeValue | |
|
2211 | ||
|
2212 | if self.dataOutObj.flagDecodeData: | |
|
2213 | processFlags += PROCFLAG.DECODE_DATA | |
|
2214 | ||
|
2215 | if self.dataOutObj.flagDeflipData: | |
|
2216 | processFlags += PROCFLAG.DEFLIP_DATA | |
|
2217 | ||
|
2218 | if self.dataOutObj.code != None: | |
|
2219 | processFlags += PROCFLAG.DEFINE_PROCESS_CODE | |
|
2220 | ||
|
2221 | if self.dataOutObj.nIncohInt > 1: | |
|
2222 | processFlags += PROCFLAG.INCOHERENT_INTEGRATION | |
|
2223 | ||
|
2224 | if self.dataOutObj.data_dc != None: | |
|
2225 | processFlags += PROCFLAG.SAVE_CHANNELS_DC | |
|
2226 | ||
|
2227 | return processFlags | |
|
2228 | ||
|
2229 | ||
|
2230 | def __getBlockSize(self): | |
|
2231 | ''' | |
|
2232 | Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra | |
|
2233 | ''' | |
|
2234 | ||
|
2235 | dtype0 = numpy.dtype([('real','<i1'),('imag','<i1')]) | |
|
2236 | dtype1 = numpy.dtype([('real','<i2'),('imag','<i2')]) | |
|
2237 | dtype2 = numpy.dtype([('real','<i4'),('imag','<i4')]) | |
|
2238 | dtype3 = numpy.dtype([('real','<i8'),('imag','<i8')]) | |
|
2239 | dtype4 = numpy.dtype([('real','<f4'),('imag','<f4')]) | |
|
2240 | dtype5 = numpy.dtype([('real','<f8'),('imag','<f8')]) | |
|
2241 | ||
|
2242 | dtypeList = [dtype0, dtype1, dtype2, dtype3, dtype4, dtype5] | |
|
2243 | datatypeValueList = [1,2,4,8,4,8] | |
|
2244 | for index in range(len(dtypeList)): | |
|
2245 | if self.dataOutObj.dtype == dtypeList[index]: | |
|
2246 | datatypeValue = datatypeValueList[index] | |
|
2247 | break | |
|
2248 | ||
|
2249 | ||
|
2250 | pts2write = self.dataOutObj.nHeights * self.dataOutObj.nFFTPoints | |
|
2251 | ||
|
2252 | pts2write_SelfSpectra = int(self.dataOutObj.nChannels * pts2write) | |
|
2253 | blocksize = (pts2write_SelfSpectra*datatypeValue) | |
|
2254 | ||
|
2255 | if self.dataOutObj.data_cspc != None: | |
|
2256 | pts2write_CrossSpectra = int(self.dataOutObj.nPairs * pts2write) | |
|
2257 | blocksize += (pts2write_CrossSpectra*datatypeValue*2) | |
|
2258 | ||
|
2259 | if self.dataOutObj.data_dc != None: | |
|
2260 | pts2write_DCchannels = int(self.dataOutObj.nChannels * self.dataOutObj.nHeights) | |
|
2261 | blocksize += (pts2write_DCchannels*datatypeValue*2) | |
|
2262 | ||
|
2263 | blocksize = blocksize #* datatypeValue * 2 #CORREGIR ESTO | |
|
2264 | ||
|
2265 | return blocksize | |
|
2266 | ||
|
2267 | def getDataHeader(self): | |
|
2268 | ||
|
2269 | """ | |
|
2270 | Obtiene una copia del First Header | |
|
2271 | ||
|
2272 | Affected: | |
|
2273 | self.systemHeaderObj | |
|
2274 | self.radarControllerHeaderObj | |
|
2275 | self.dtype | |
|
2276 | ||
|
2277 | Return: | |
|
2278 | None | |
|
2279 | """ | |
|
2280 | ||
|
2281 | self.systemHeaderObj = self.dataOutObj.systemHeaderObj.copy() | |
|
2282 | self.systemHeaderObj.nChannels = self.dataOutObj.nChannels | |
|
2283 | self.radarControllerHeaderObj = self.dataOutObj.radarControllerHeaderObj.copy() | |
|
2284 | ||
|
2285 | self.getBasicHeader() | |
|
2286 | ||
|
2287 | processingHeaderSize = 40 # bytes | |
|
2288 | self.processingHeaderObj.dtype = 0 # Voltage | |
|
2289 | self.processingHeaderObj.blockSize = self.__getBlockSize() | |
|
2290 | self.processingHeaderObj.profilesPerBlock = self.dataOutObj.nFFTPoints | |
|
2291 | self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile | |
|
2292 | self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOutObj.processingHeaderObj.nWindows | |
|
2293 | self.processingHeaderObj.processFlags = self.__getProcessFlags() | |
|
2294 | self.processingHeaderObj.nCohInt = self.dataOutObj.nCohInt# Se requiere para determinar el valor de timeInterval | |
|
2295 | self.processingHeaderObj.nIncohInt = self.dataOutObj.nIncohInt | |
|
2296 | self.processingHeaderObj.totalSpectra = self.dataOutObj.nPairs + self.dataOutObj.nChannels | |
|
2297 | ||
|
2298 | if self.processingHeaderObj.totalSpectra > 0: | |
|
2299 | channelList = [] | |
|
2300 | for channel in range(self.dataOutObj.nChannels): | |
|
2301 | channelList.append(channel) | |
|
2302 | channelList.append(channel) | |
|
2303 | ||
|
2304 | pairsList = [] | |
|
2305 | for pair in self.dataOutObj.pairsList: | |
|
2306 | pairsList.append(pair[0]) | |
|
2307 | pairsList.append(pair[1]) | |
|
2308 | spectraComb = channelList + pairsList | |
|
2309 | spectraComb = numpy.array(spectraComb,dtype="u1") | |
|
2310 | self.processingHeaderObj.spectraComb = spectraComb | |
|
2311 | sizeOfSpcComb = len(spectraComb) | |
|
2312 | processingHeaderSize += sizeOfSpcComb | |
|
2313 | ||
|
2314 | if self.dataOutObj.code != None: | |
|
2315 | self.processingHeaderObj.code = self.dataOutObj.code | |
|
2316 | self.processingHeaderObj.nCode = self.dataOutObj.nCode | |
|
2317 | self.processingHeaderObj.nBaud = self.dataOutObj.nBaud | |
|
2318 | nCodeSize = 4 # bytes | |
|
2319 | nBaudSize = 4 # bytes | |
|
2320 | codeSize = 4 # bytes | |
|
2321 | sizeOfCode = int(nCodeSize + nBaudSize + codeSize * self.dataOutObj.nCode * self.dataOutObj.nBaud) | |
|
2322 | processingHeaderSize += sizeOfCode | |
|
2323 | ||
|
2324 | if self.processingHeaderObj.nWindows != 0: | |
|
2325 | self.processingHeaderObj.firstHeight = self.dataOutObj.heightList[0] | |
|
2326 | self.processingHeaderObj.deltaHeight = self.dataOutObj.heightList[1] - self.dataOutObj.heightList[0] | |
|
2327 | self.processingHeaderObj.nHeights = self.dataOutObj.nHeights | |
|
2328 | self.processingHeaderObj.samplesWin = self.dataOutObj.nHeights | |
|
2329 | sizeOfFirstHeight = 4 | |
|
2330 | sizeOfdeltaHeight = 4 | |
|
2331 | sizeOfnHeights = 4 | |
|
2332 | sizeOfWindows = (sizeOfFirstHeight + sizeOfdeltaHeight + sizeOfnHeights)*self.processingHeaderObj.nWindows | |
|
2333 | processingHeaderSize += sizeOfWindows | |
|
2334 | ||
|
2335 | self.processingHeaderObj.size = processingHeaderSize | |
|
2336 | ||
|
2337 | class SpectraHeisWriter(): | |
|
2338 | ||
|
2339 | i=0 | |
|
2340 | ||
|
2341 | def __init__(self, dataOutObj): | |
|
2342 | ||
|
2343 | self.wrObj = FITS() | |
|
2344 | self.dataOutObj = dataOutObj | |
|
2345 | ||
|
2346 | def isNumber(str): | |
|
2347 | """ | |
|
2348 | Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero. | |
|
2349 | ||
|
2350 | Excepciones: | |
|
2351 | Si un determinado string no puede ser convertido a numero | |
|
2352 | Input: | |
|
2353 | str, string al cual se le analiza para determinar si convertible a un numero o no | |
|
2354 | ||
|
2355 | Return: | |
|
2356 | True : si el string es uno numerico | |
|
2357 | False : no es un string numerico | |
|
2358 | """ | |
|
2359 | try: | |
|
2360 | float( str ) | |
|
2361 | return True | |
|
2362 | except: | |
|
2363 | return False | |
|
2364 | ||
|
2365 | def setup(self, wrpath,): | |
|
2366 | ||
|
2367 | if not(os.path.exists(wrpath)): | |
|
2368 | os.mkdir(wrpath) | |
|
2369 | ||
|
2370 | self.wrpath = wrpath | |
|
2371 | self.setFile = 0 | |
|
2372 | ||
|
2373 | def putData(self): | |
|
2374 | # self.wrObj.writeHeader(nChannels=self.dataOutObj.nChannels, nFFTPoints=self.dataOutObj.nFFTPoints) | |
|
2375 | #name = self.dataOutObj.utctime | |
|
2376 | name= time.localtime( self.dataOutObj.utctime) | |
|
2377 | ext=".fits" | |
|
2378 | #folder='D%4.4d%3.3d'%(name.tm_year,name.tm_yday) | |
|
2379 | subfolder = 'D%4.4d%3.3d' % (name.tm_year,name.tm_yday) | |
|
2380 | ||
|
2381 | doypath = os.path.join( self.wrpath, subfolder ) | |
|
2382 | if not( os.path.exists(doypath) ): | |
|
2383 | os.mkdir(doypath) | |
|
2384 | self.setFile += 1 | |
|
2385 | file = 'D%4.4d%3.3d%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext) | |
|
2386 | ||
|
2387 | filename = os.path.join(self.wrpath,subfolder, file) | |
|
2388 | ||
|
2389 | # print self.dataOutObj.ippSeconds | |
|
2390 | freq=numpy.arange(-1*self.dataOutObj.nHeights/2.,self.dataOutObj.nHeights/2.)/(2*self.dataOutObj.ippSeconds) | |
|
2391 | ||
|
2392 | col1=self.wrObj.setColF(name="freq", format=str(self.dataOutObj.nFFTPoints)+'E', array=freq) | |
|
2393 | col2=self.wrObj.writeData(name="P_Ch1",format=str(self.dataOutObj.nFFTPoints)+'E',data=10*numpy.log10(self.dataOutObj.data_spc[0,:])) | |
|
2394 | col3=self.wrObj.writeData(name="P_Ch2",format=str(self.dataOutObj.nFFTPoints)+'E',data=10*numpy.log10(self.dataOutObj.data_spc[1,:])) | |
|
2395 | col4=self.wrObj.writeData(name="P_Ch3",format=str(self.dataOutObj.nFFTPoints)+'E',data=10*numpy.log10(self.dataOutObj.data_spc[2,:])) | |
|
2396 | col5=self.wrObj.writeData(name="P_Ch4",format=str(self.dataOutObj.nFFTPoints)+'E',data=10*numpy.log10(self.dataOutObj.data_spc[3,:])) | |
|
2397 | col6=self.wrObj.writeData(name="P_Ch5",format=str(self.dataOutObj.nFFTPoints)+'E',data=10*numpy.log10(self.dataOutObj.data_spc[4,:])) | |
|
2398 | col7=self.wrObj.writeData(name="P_Ch6",format=str(self.dataOutObj.nFFTPoints)+'E',data=10*numpy.log10(self.dataOutObj.data_spc[5,:])) | |
|
2399 | col8=self.wrObj.writeData(name="P_Ch7",format=str(self.dataOutObj.nFFTPoints)+'E',data=10*numpy.log10(self.dataOutObj.data_spc[6,:])) | |
|
2400 | col9=self.wrObj.writeData(name="P_Ch8",format=str(self.dataOutObj.nFFTPoints)+'E',data=10*numpy.log10(self.dataOutObj.data_spc[7,:])) | |
|
2401 | #n=numpy.arange((100)) | |
|
2402 | n=self.dataOutObj.data_spc[6,:] | |
|
2403 | a=self.wrObj.cFImage(n) | |
|
2404 | b=self.wrObj.Ctable(col1,col2,col3,col4,col5,col6,col7,col8,col9) | |
|
2405 | self.wrObj.CFile(a,b) | |
|
2406 | self.wrObj.wFile(filename) | |
|
2407 | return 1 | |
|
2408 | ||
|
2409 | class FITS: | |
|
2410 | ||
|
2411 | name=None | |
|
2412 | format=None | |
|
2413 | array =None | |
|
2414 | data =None | |
|
2415 | thdulist=None | |
|
2416 | ||
|
2417 | def __init__(self): | |
|
2418 | ||
|
2419 | pass | |
|
2420 | ||
|
2421 | def setColF(self,name,format,array): | |
|
2422 | self.name=name | |
|
2423 | self.format=format | |
|
2424 | self.array=array | |
|
2425 | a1=numpy.array([self.array],dtype=numpy.float32) | |
|
2426 | self.col1 = pyfits.Column(name=self.name, format=self.format, array=a1) | |
|
2427 | return self.col1 | |
|
2428 | ||
|
2429 | # def setColP(self,name,format,data): | |
|
2430 | # self.name=name | |
|
2431 | # self.format=format | |
|
2432 | # self.data=data | |
|
2433 | # a2=numpy.array([self.data],dtype=numpy.float32) | |
|
2434 | # self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2) | |
|
2435 | # return self.col2 | |
|
2436 | ||
|
2437 | def writeHeader(self,): | |
|
2438 | pass | |
|
2439 | ||
|
2440 | def writeData(self,name,format,data): | |
|
2441 | self.name=name | |
|
2442 | self.format=format | |
|
2443 | self.data=data | |
|
2444 | a2=numpy.array([self.data],dtype=numpy.float32) | |
|
2445 | self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2) | |
|
2446 | return self.col2 | |
|
2447 | ||
|
2448 | def cFImage(self,n): | |
|
2449 | self.hdu= pyfits.PrimaryHDU(n) | |
|
2450 | return self.hdu | |
|
2451 | ||
|
2452 | def Ctable(self,col1,col2,col3,col4,col5,col6,col7,col8,col9): | |
|
2453 | self.cols=pyfits.ColDefs( [col1,col2,col3,col4,col5,col6,col7,col8,col9]) | |
|
2454 | self.tbhdu = pyfits.new_table(self.cols) | |
|
2455 | return self.tbhdu | |
|
2456 | ||
|
2457 | def CFile(self,hdu,tbhdu): | |
|
2458 | self.thdulist=pyfits.HDUList([hdu,tbhdu]) | |
|
2459 | ||
|
2460 | def wFile(self,filename): | |
|
2461 | self.thdulist.writeto(filename) No newline at end of file |
This diff has been collapsed as it changes many lines, (505 lines changed) Show them Hide them | |||
@@ -0,0 +1,505 | |||
|
1 | ''' | |
|
2 | ||
|
3 | $Author: murco $ | |
|
4 | $Id: JROHeaderIO.py 151 2012-10-31 19:00:51Z murco $ | |
|
5 | ''' | |
|
6 | ||
|
7 | import numpy | |
|
8 | import copy | |
|
9 | ||
|
10 | class Header: | |
|
11 | ||
|
12 | def __init__(self): | |
|
13 | raise | |
|
14 | ||
|
15 | def copy(self): | |
|
16 | return copy.deepcopy(self) | |
|
17 | ||
|
18 | def read(): | |
|
19 | pass | |
|
20 | ||
|
21 | def write(): | |
|
22 | pass | |
|
23 | ||
|
24 | class BasicHeader(Header): | |
|
25 | ||
|
26 | size = None | |
|
27 | version = None | |
|
28 | dataBlock = None | |
|
29 | utc = None | |
|
30 | miliSecond = None | |
|
31 | timeZone = None | |
|
32 | dstFlag = None | |
|
33 | errorCount = None | |
|
34 | struct = None | |
|
35 | ||
|
36 | def __init__(self): | |
|
37 | ||
|
38 | self.size = 0 | |
|
39 | self.version = 0 | |
|
40 | self.dataBlock = 0 | |
|
41 | self.utc = 0 | |
|
42 | self.miliSecond = 0 | |
|
43 | self.timeZone = 0 | |
|
44 | self.dstFlag = 0 | |
|
45 | self.errorCount = 0 | |
|
46 | self.struct = numpy.dtype([ | |
|
47 | ('nSize','<u4'), | |
|
48 | ('nVersion','<u2'), | |
|
49 | ('nDataBlockId','<u4'), | |
|
50 | ('nUtime','<u4'), | |
|
51 | ('nMilsec','<u2'), | |
|
52 | ('nTimezone','<i2'), | |
|
53 | ('nDstflag','<i2'), | |
|
54 | ('nErrorCount','<u4') | |
|
55 | ]) | |
|
56 | ||
|
57 | ||
|
58 | def read(self, fp): | |
|
59 | try: | |
|
60 | header = numpy.fromfile(fp, self.struct,1) | |
|
61 | self.size = header['nSize'][0] | |
|
62 | self.version = header['nVersion'][0] | |
|
63 | self.dataBlock = header['nDataBlockId'][0] | |
|
64 | self.utc = header['nUtime'][0] | |
|
65 | self.miliSecond = header['nMilsec'][0] | |
|
66 | self.timeZone = header['nTimezone'][0] | |
|
67 | self.dstFlag = header['nDstflag'][0] | |
|
68 | self.errorCount = header['nErrorCount'][0] | |
|
69 | except: | |
|
70 | return 0 | |
|
71 | ||
|
72 | return 1 | |
|
73 | ||
|
74 | def write(self, fp): | |
|
75 | headerTuple = (self.size,self.version,self.dataBlock,self.utc,self.miliSecond,self.timeZone,self.dstFlag,self.errorCount) | |
|
76 | header = numpy.array(headerTuple,self.struct) | |
|
77 | header.tofile(fp) | |
|
78 | ||
|
79 | return 1 | |
|
80 | ||
|
81 | class SystemHeader(Header): | |
|
82 | ||
|
83 | size = None | |
|
84 | nSamples = None | |
|
85 | nProfiles = None | |
|
86 | nChannels = None | |
|
87 | adcResolution = None | |
|
88 | pciDioBusWidth = None | |
|
89 | struct = None | |
|
90 | ||
|
91 | def __init__(self): | |
|
92 | self.size = 0 | |
|
93 | self.nSamples = 0 | |
|
94 | self.nProfiles = 0 | |
|
95 | self.nChannels = 0 | |
|
96 | self.adcResolution = 0 | |
|
97 | self.pciDioBusWidth = 0 | |
|
98 | self.struct = numpy.dtype([ | |
|
99 | ('nSize','<u4'), | |
|
100 | ('nNumSamples','<u4'), | |
|
101 | ('nNumProfiles','<u4'), | |
|
102 | ('nNumChannels','<u4'), | |
|
103 | ('nADCResolution','<u4'), | |
|
104 | ('nPCDIOBusWidth','<u4'), | |
|
105 | ]) | |
|
106 | ||
|
107 | ||
|
108 | def read(self, fp): | |
|
109 | try: | |
|
110 | header = numpy.fromfile(fp,self.struct,1) | |
|
111 | self.size = header['nSize'][0] | |
|
112 | self.nSamples = header['nNumSamples'][0] | |
|
113 | self.nProfiles = header['nNumProfiles'][0] | |
|
114 | self.nChannels = header['nNumChannels'][0] | |
|
115 | self.adcResolution = header['nADCResolution'][0] | |
|
116 | self.pciDioBusWidth = header['nPCDIOBusWidth'][0] | |
|
117 | except: | |
|
118 | return 0 | |
|
119 | ||
|
120 | return 1 | |
|
121 | ||
|
122 | def write(self, fp): | |
|
123 | headerTuple = (self.size,self.nSamples,self.nProfiles,self.nChannels,self.adcResolution,self.pciDioBusWidth) | |
|
124 | header = numpy.array(headerTuple,self.struct) | |
|
125 | header.tofile(fp) | |
|
126 | ||
|
127 | return 1 | |
|
128 | ||
|
129 | class RadarControllerHeader(Header): | |
|
130 | ||
|
131 | size = None | |
|
132 | expType = None | |
|
133 | nTx = None | |
|
134 | ipp = None | |
|
135 | txA = None | |
|
136 | txB = None | |
|
137 | nWindows = None | |
|
138 | numTaus = None | |
|
139 | codeType = None | |
|
140 | line6Function = None | |
|
141 | line5Function = None | |
|
142 | fClock = None | |
|
143 | prePulseBefore = None | |
|
144 | prePulserAfter = None | |
|
145 | rangeIpp = None | |
|
146 | rangeTxA = None | |
|
147 | rangeTxB = None | |
|
148 | struct = None | |
|
149 | ||
|
150 | def __init__(self): | |
|
151 | self.size = 0 | |
|
152 | self.expType = 0 | |
|
153 | self.nTx = 0 | |
|
154 | self.ipp = 0 | |
|
155 | self.txA = 0 | |
|
156 | self.txB = 0 | |
|
157 | self.nWindows = 0 | |
|
158 | self.numTaus = 0 | |
|
159 | self.codeType = 0 | |
|
160 | self.line6Function = 0 | |
|
161 | self.line5Function = 0 | |
|
162 | self.fClock = 0 | |
|
163 | self.prePulseBefore = 0 | |
|
164 | self.prePulserAfter = 0 | |
|
165 | self.rangeIpp = 0 | |
|
166 | self.rangeTxA = 0 | |
|
167 | self.rangeTxB = 0 | |
|
168 | self.struct = numpy.dtype([ | |
|
169 | ('nSize','<u4'), | |
|
170 | ('nExpType','<u4'), | |
|
171 | ('nNTx','<u4'), | |
|
172 | ('fIpp','<f4'), | |
|
173 | ('fTxA','<f4'), | |
|
174 | ('fTxB','<f4'), | |
|
175 | ('nNumWindows','<u4'), | |
|
176 | ('nNumTaus','<u4'), | |
|
177 | ('nCodeType','<u4'), | |
|
178 | ('nLine6Function','<u4'), | |
|
179 | ('nLine5Function','<u4'), | |
|
180 | ('fClock','<f4'), | |
|
181 | ('nPrePulseBefore','<u4'), | |
|
182 | ('nPrePulseAfter','<u4'), | |
|
183 | ('sRangeIPP','<a20'), | |
|
184 | ('sRangeTxA','<a20'), | |
|
185 | ('sRangeTxB','<a20'), | |
|
186 | ]) | |
|
187 | ||
|
188 | self.samplingWindowStruct = numpy.dtype([('h0','<f4'),('dh','<f4'),('nsa','<u4')]) | |
|
189 | ||
|
190 | self.samplingWindow = None | |
|
191 | self.nHeights = None | |
|
192 | self.firstHeight = None | |
|
193 | self.deltaHeight = None | |
|
194 | self.samplesWin = None | |
|
195 | ||
|
196 | self.nCode = None | |
|
197 | self.nBaud = None | |
|
198 | self.code = None | |
|
199 | self.flip1 = None | |
|
200 | self.flip2 = None | |
|
201 | ||
|
202 | self.dynamic = numpy.array([],numpy.dtype('byte')) | |
|
203 | ||
|
204 | ||
|
205 | def read(self, fp): | |
|
206 | try: | |
|
207 | startFp = fp.tell() | |
|
208 | header = numpy.fromfile(fp,self.struct,1) | |
|
209 | self.size = header['nSize'][0] | |
|
210 | self.expType = header['nExpType'][0] | |
|
211 | self.nTx = header['nNTx'][0] | |
|
212 | self.ipp = header['fIpp'][0] | |
|
213 | self.txA = header['fTxA'][0] | |
|
214 | self.txB = header['fTxB'][0] | |
|
215 | self.nWindows = header['nNumWindows'][0] | |
|
216 | self.numTaus = header['nNumTaus'][0] | |
|
217 | self.codeType = header['nCodeType'][0] | |
|
218 | self.line6Function = header['nLine6Function'][0] | |
|
219 | self.line5Function = header['nLine5Function'][0] | |
|
220 | self.fClock = header['fClock'][0] | |
|
221 | self.prePulseBefore = header['nPrePulseBefore'][0] | |
|
222 | self.prePulserAfter = header['nPrePulseAfter'][0] | |
|
223 | self.rangeIpp = header['sRangeIPP'][0] | |
|
224 | self.rangeTxA = header['sRangeTxA'][0] | |
|
225 | self.rangeTxB = header['sRangeTxB'][0] | |
|
226 | # jump Dynamic Radar Controller Header | |
|
227 | jumpFp = self.size - 116 | |
|
228 | self.dynamic = numpy.fromfile(fp,numpy.dtype('byte'),jumpFp) | |
|
229 | #pointer backward to dynamic header and read | |
|
230 | backFp = fp.tell() - jumpFp | |
|
231 | fp.seek(backFp) | |
|
232 | ||
|
233 | self.samplingWindow = numpy.fromfile(fp,self.samplingWindowStruct,self.nWindows) | |
|
234 | self.nHeights = numpy.sum(self.samplingWindow['nsa']) | |
|
235 | self.firstHeight = self.samplingWindow['h0'] | |
|
236 | self.deltaHeight = self.samplingWindow['dh'] | |
|
237 | self.samplesWin = self.samplingWindow['nsa'] | |
|
238 | ||
|
239 | self.Taus = numpy.fromfile(fp,'<f4',self.numTaus) | |
|
240 | ||
|
241 | if self.codeType != 0: | |
|
242 | self.nCode = numpy.fromfile(fp,'<u4',1) | |
|
243 | self.nBaud = numpy.fromfile(fp,'<u4',1) | |
|
244 | self.code = numpy.empty([self.nCode,self.nBaud],dtype='u1') | |
|
245 | tempList = [] | |
|
246 | for ic in range(self.nCode): | |
|
247 | temp = numpy.fromfile(fp,'u1',4*numpy.ceil(self.nBaud/32.)) | |
|
248 | tempList.append(temp) | |
|
249 | self.code[ic] = numpy.unpackbits(temp[::-1])[-1*self.nBaud:] | |
|
250 | self.code = 2.0*self.code - 1.0 | |
|
251 | ||
|
252 | if self.line5Function == RCfunction.FLIP: | |
|
253 | self.flip1 = numpy.fromfile(fp,'<u4',1) | |
|
254 | ||
|
255 | if self.line6Function == RCfunction.FLIP: | |
|
256 | self.flip2 = numpy.fromfile(fp,'<u4',1) | |
|
257 | ||
|
258 | endFp = self.size + startFp | |
|
259 | jumpFp = endFp - fp.tell() | |
|
260 | if jumpFp > 0: | |
|
261 | fp.seek(jumpFp) | |
|
262 | ||
|
263 | except: | |
|
264 | return 0 | |
|
265 | ||
|
266 | return 1 | |
|
267 | ||
|
268 | def write(self, fp): | |
|
269 | headerTuple = (self.size, | |
|
270 | self.expType, | |
|
271 | self.nTx, | |
|
272 | self.ipp, | |
|
273 | self.txA, | |
|
274 | self.txB, | |
|
275 | self.nWindows, | |
|
276 | self.numTaus, | |
|
277 | self.codeType, | |
|
278 | self.line6Function, | |
|
279 | self.line5Function, | |
|
280 | self.fClock, | |
|
281 | self.prePulseBefore, | |
|
282 | self.prePulserAfter, | |
|
283 | self.rangeIpp, | |
|
284 | self.rangeTxA, | |
|
285 | self.rangeTxB) | |
|
286 | ||
|
287 | header = numpy.array(headerTuple,self.struct) | |
|
288 | header.tofile(fp) | |
|
289 | ||
|
290 | dynamic = self.dynamic | |
|
291 | dynamic.tofile(fp) | |
|
292 | ||
|
293 | return 1 | |
|
294 | ||
|
295 | ||
|
296 | ||
|
297 | class ProcessingHeader(Header): | |
|
298 | ||
|
299 | size = None | |
|
300 | dtype = None | |
|
301 | blockSize = None | |
|
302 | profilesPerBlock = None | |
|
303 | dataBlocksPerFile = None | |
|
304 | nWindows = None | |
|
305 | processFlags = None | |
|
306 | nCohInt = None | |
|
307 | nIncohInt = None | |
|
308 | totalSpectra = None | |
|
309 | struct = None | |
|
310 | flag_dc = None | |
|
311 | flag_cspc = None | |
|
312 | ||
|
313 | def __init__(self): | |
|
314 | self.size = 0 | |
|
315 | self.dtype = 0 | |
|
316 | self.blockSize = 0 | |
|
317 | self.profilesPerBlock = 0 | |
|
318 | self.dataBlocksPerFile = 0 | |
|
319 | self.nWindows = 0 | |
|
320 | self.processFlags = 0 | |
|
321 | self.nCohInt = 0 | |
|
322 | self.nIncohInt = 0 | |
|
323 | self.totalSpectra = 0 | |
|
324 | self.struct = numpy.dtype([ | |
|
325 | ('nSize','<u4'), | |
|
326 | ('nDataType','<u4'), | |
|
327 | ('nSizeOfDataBlock','<u4'), | |
|
328 | ('nProfilesperBlock','<u4'), | |
|
329 | ('nDataBlocksperFile','<u4'), | |
|
330 | ('nNumWindows','<u4'), | |
|
331 | ('nProcessFlags','<u4'), | |
|
332 | ('nCoherentIntegrations','<u4'), | |
|
333 | ('nIncoherentIntegrations','<u4'), | |
|
334 | ('nTotalSpectra','<u4') | |
|
335 | ]) | |
|
336 | self.samplingWindow = 0 | |
|
337 | self.structSamplingWindow = numpy.dtype([('h0','<f4'),('dh','<f4'),('nsa','<u4')]) | |
|
338 | self.nHeights = 0 | |
|
339 | self.firstHeight = 0 | |
|
340 | self.deltaHeight = 0 | |
|
341 | self.samplesWin = 0 | |
|
342 | self.spectraComb = 0 | |
|
343 | self.nCode = None | |
|
344 | self.code = None | |
|
345 | self.nBaud = None | |
|
346 | self.shif_fft = False | |
|
347 | self.flag_dc = False | |
|
348 | self.flag_cspc = False | |
|
349 | ||
|
350 | def read(self, fp): | |
|
351 | try: | |
|
352 | header = numpy.fromfile(fp,self.struct,1) | |
|
353 | self.size = header['nSize'][0] | |
|
354 | self.dtype = header['nDataType'][0] | |
|
355 | self.blockSize = header['nSizeOfDataBlock'][0] | |
|
356 | self.profilesPerBlock = header['nProfilesperBlock'][0] | |
|
357 | self.dataBlocksPerFile = header['nDataBlocksperFile'][0] | |
|
358 | self.nWindows = header['nNumWindows'][0] | |
|
359 | self.processFlags = header['nProcessFlags'] | |
|
360 | self.nCohInt = header['nCoherentIntegrations'][0] | |
|
361 | self.nIncohInt = header['nIncoherentIntegrations'][0] | |
|
362 | self.totalSpectra = header['nTotalSpectra'][0] | |
|
363 | self.samplingWindow = numpy.fromfile(fp,self.structSamplingWindow,self.nWindows) | |
|
364 | self.nHeights = numpy.sum(self.samplingWindow['nsa']) | |
|
365 | self.firstHeight = self.samplingWindow['h0'][0] | |
|
366 | self.deltaHeight = self.samplingWindow['dh'][0] | |
|
367 | self.samplesWin = self.samplingWindow['nsa'] | |
|
368 | self.spectraComb = numpy.fromfile(fp,'u1',2*self.totalSpectra) | |
|
369 | ||
|
370 | if ((self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE) == PROCFLAG.DEFINE_PROCESS_CODE): | |
|
371 | self.nCode = numpy.fromfile(fp,'<u4',1) | |
|
372 | self.nBaud = numpy.fromfile(fp,'<u4',1) | |
|
373 | self.code = numpy.fromfile(fp,'<f4',self.nCode*self.nBaud).reshape(self.nBaud,self.nCode) | |
|
374 | ||
|
375 | if ((self.processFlags & PROCFLAG.SHIFT_FFT_DATA) == PROCFLAG.SHIFT_FFT_DATA): | |
|
376 | self.shif_fft = True | |
|
377 | else: | |
|
378 | self.shif_fft = False | |
|
379 | ||
|
380 | if ((self.processFlags & PROCFLAG.SAVE_CHANNELS_DC) == PROCFLAG.SAVE_CHANNELS_DC): | |
|
381 | self.flag_dc = True | |
|
382 | ||
|
383 | nChannels = 0 | |
|
384 | nPairs = 0 | |
|
385 | pairList = [] | |
|
386 | ||
|
387 | for i in range( 0, self.totalSpectra*2, 2 ): | |
|
388 | if self.spectraComb[i] == self.spectraComb[i+1]: | |
|
389 | nChannels = nChannels + 1 #par de canales iguales | |
|
390 | else: | |
|
391 | nPairs = nPairs + 1 #par de canales diferentes | |
|
392 | pairList.append( (self.spectraComb[i], self.spectraComb[i+1]) ) | |
|
393 | ||
|
394 | self.flag_cspc = False | |
|
395 | if nPairs > 0: | |
|
396 | self.flag_cspc = True | |
|
397 | ||
|
398 | except: | |
|
399 | return 0 | |
|
400 | ||
|
401 | return 1 | |
|
402 | ||
|
403 | def write(self, fp): | |
|
404 | headerTuple = (self.size, | |
|
405 | self.dtype, | |
|
406 | self.blockSize, | |
|
407 | self.profilesPerBlock, | |
|
408 | self.dataBlocksPerFile, | |
|
409 | self.nWindows, | |
|
410 | self.processFlags, | |
|
411 | self.nCohInt, | |
|
412 | self.nIncohInt, | |
|
413 | self.totalSpectra) | |
|
414 | ||
|
415 | header = numpy.array(headerTuple,self.struct) | |
|
416 | header.tofile(fp) | |
|
417 | ||
|
418 | if self.nWindows != 0: | |
|
419 | sampleWindowTuple = (self.firstHeight,self.deltaHeight,self.samplesWin) | |
|
420 | samplingWindow = numpy.array(sampleWindowTuple,self.structSamplingWindow) | |
|
421 | samplingWindow.tofile(fp) | |
|
422 | ||
|
423 | ||
|
424 | if self.totalSpectra != 0: | |
|
425 | spectraComb = numpy.array([],numpy.dtype('u1')) | |
|
426 | spectraComb = self.spectraComb | |
|
427 | spectraComb.tofile(fp) | |
|
428 | ||
|
429 | ||
|
430 | if self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE == PROCFLAG.DEFINE_PROCESS_CODE: | |
|
431 | nCode = self.nCode #Probar con un dato que almacene codigo, hasta el momento no se hizo la prueba | |
|
432 | nCode.tofile(fp) | |
|
433 | ||
|
434 | nBaud = self.nBaud | |
|
435 | nBaud.tofile(fp) | |
|
436 | ||
|
437 | code = self.code.reshape(nCode*nBaud) | |
|
438 | code.tofile(fp) | |
|
439 | ||
|
440 | return 1 | |
|
441 | ||
|
442 | class RCfunction: | |
|
443 | NONE=0 | |
|
444 | FLIP=1 | |
|
445 | CODE=2 | |
|
446 | SAMPLING=3 | |
|
447 | LIN6DIV256=4 | |
|
448 | SYNCHRO=5 | |
|
449 | ||
|
450 | class nCodeType: | |
|
451 | NONE=0 | |
|
452 | USERDEFINE=1 | |
|
453 | BARKER2=2 | |
|
454 | BARKER3=3 | |
|
455 | BARKER4=4 | |
|
456 | BARKER5=5 | |
|
457 | BARKER7=6 | |
|
458 | BARKER11=7 | |
|
459 | BARKER13=8 | |
|
460 | AC128=9 | |
|
461 | COMPLEMENTARYCODE2=10 | |
|
462 | COMPLEMENTARYCODE4=11 | |
|
463 | COMPLEMENTARYCODE8=12 | |
|
464 | COMPLEMENTARYCODE16=13 | |
|
465 | COMPLEMENTARYCODE32=14 | |
|
466 | COMPLEMENTARYCODE64=15 | |
|
467 | COMPLEMENTARYCODE128=16 | |
|
468 | CODE_BINARY28=17 | |
|
469 | ||
|
470 | class PROCFLAG: | |
|
471 | COHERENT_INTEGRATION = numpy.uint32(0x00000001) | |
|
472 | DECODE_DATA = numpy.uint32(0x00000002) | |
|
473 | SPECTRA_CALC = numpy.uint32(0x00000004) | |
|
474 | INCOHERENT_INTEGRATION = numpy.uint32(0x00000008) | |
|
475 | POST_COHERENT_INTEGRATION = numpy.uint32(0x00000010) | |
|
476 | SHIFT_FFT_DATA = numpy.uint32(0x00000020) | |
|
477 | ||
|
478 | DATATYPE_CHAR = numpy.uint32(0x00000040) | |
|
479 | DATATYPE_SHORT = numpy.uint32(0x00000080) | |
|
480 | DATATYPE_LONG = numpy.uint32(0x00000100) | |
|
481 | DATATYPE_INT64 = numpy.uint32(0x00000200) | |
|
482 | DATATYPE_FLOAT = numpy.uint32(0x00000400) | |
|
483 | DATATYPE_DOUBLE = numpy.uint32(0x00000800) | |
|
484 | ||
|
485 | DATAARRANGE_CONTIGUOUS_CH = numpy.uint32(0x00001000) | |
|
486 | DATAARRANGE_CONTIGUOUS_H = numpy.uint32(0x00002000) | |
|
487 | DATAARRANGE_CONTIGUOUS_P = numpy.uint32(0x00004000) | |
|
488 | ||
|
489 | SAVE_CHANNELS_DC = numpy.uint32(0x00008000) | |
|
490 | DEFLIP_DATA = numpy.uint32(0x00010000) | |
|
491 | DEFINE_PROCESS_CODE = numpy.uint32(0x00020000) | |
|
492 | ||
|
493 | ACQ_SYS_NATALIA = numpy.uint32(0x00040000) | |
|
494 | ACQ_SYS_ECHOTEK = numpy.uint32(0x00080000) | |
|
495 | ACQ_SYS_ADRXD = numpy.uint32(0x000C0000) | |
|
496 | ACQ_SYS_JULIA = numpy.uint32(0x00100000) | |
|
497 | ACQ_SYS_XXXXXX = numpy.uint32(0x00140000) | |
|
498 | ||
|
499 | EXP_NAME_ESP = numpy.uint32(0x00200000) | |
|
500 | CHANNEL_NAMES_ESP = numpy.uint32(0x00400000) | |
|
501 | ||
|
502 | OPERATION_MASK = numpy.uint32(0x0000003F) | |
|
503 | DATATYPE_MASK = numpy.uint32(0x00000FC0) | |
|
504 | DATAARRANGE_MASK = numpy.uint32(0x00007000) | |
|
505 | ACQ_SYS_MASK = numpy.uint32(0x001C0000) No newline at end of file |
|
1 | NO CONTENT: new file 100644 |
@@ -0,0 +1,423 | |||
|
1 | ''' | |
|
2 | ||
|
3 | $Author: dsuarez $ | |
|
4 | $Id: Processor.py 1 2012-11-12 18:56:07Z dsuarez $ | |
|
5 | ''' | |
|
6 | import os | |
|
7 | import numpy | |
|
8 | import datetime | |
|
9 | import time | |
|
10 | ||
|
11 | from JROData import * | |
|
12 | from JRODataIO import * | |
|
13 | from JROPlot import * | |
|
14 | ||
|
15 | class ProcessingUnit: | |
|
16 | ||
|
17 | """ | |
|
18 | Esta es la clase base para el procesamiento de datos. | |
|
19 | ||
|
20 | Contiene el metodo "call" para llamar operaciones. Las operaciones pueden ser: | |
|
21 | - Metodos internos (callMethod) | |
|
22 | - Objetos del tipo Operation (callObject). Antes de ser llamados, estos objetos | |
|
23 | tienen que ser agreagados con el metodo "add". | |
|
24 | ||
|
25 | """ | |
|
26 | # objeto de datos de entrada (Voltage, Spectra o Correlation) | |
|
27 | dataIn = None | |
|
28 | ||
|
29 | # objeto de datos de entrada (Voltage, Spectra o Correlation) | |
|
30 | dataOut = None | |
|
31 | ||
|
32 | ||
|
33 | objectDict = None | |
|
34 | ||
|
35 | def __init__(self): | |
|
36 | ||
|
37 | self.objectDict = {} | |
|
38 | ||
|
39 | def addOperation(self, object, objId): | |
|
40 | ||
|
41 | """ | |
|
42 | Agrega el objeto "object" a la lista de objetos "self.objectList" y retorna el | |
|
43 | identificador asociado a este objeto. | |
|
44 | ||
|
45 | Input: | |
|
46 | ||
|
47 | object : objeto de la clase "Operation" | |
|
48 | ||
|
49 | Return: | |
|
50 | ||
|
51 | objId : identificador del objeto, necesario para ejecutar la operacion | |
|
52 | """ | |
|
53 | ||
|
54 | self.object[objId] = object | |
|
55 | ||
|
56 | return objId | |
|
57 | ||
|
58 | def operation(self, **kwargs): | |
|
59 | ||
|
60 | """ | |
|
61 | Operacion directa sobre la data (dataout.data). Es necesario actualizar los valores de los | |
|
62 | atributos del objeto dataOut | |
|
63 | ||
|
64 | Input: | |
|
65 | ||
|
66 | **kwargs : Diccionario de argumentos de la funcion a ejecutar | |
|
67 | """ | |
|
68 | ||
|
69 | if self.dataIn.isEmpty(): | |
|
70 | return None | |
|
71 | ||
|
72 | raise ValueError, "ImplementedError" | |
|
73 | ||
|
74 | def callMethod(self, name, **kwargs): | |
|
75 | ||
|
76 | """ | |
|
77 | Ejecuta el metodo con el nombre "name" y con argumentos **kwargs de la propia clase. | |
|
78 | ||
|
79 | Input: | |
|
80 | name : nombre del metodo a ejecutar | |
|
81 | ||
|
82 | **kwargs : diccionario con los nombres y valores de la funcion a ejecutar. | |
|
83 | ||
|
84 | """ | |
|
85 | ||
|
86 | if self.dataIn.isEmpty(): | |
|
87 | return None | |
|
88 | ||
|
89 | methodToCall = getattr(self, name) | |
|
90 | ||
|
91 | methodToCall(**kwargs) | |
|
92 | ||
|
93 | def callObject(self, objId, **kwargs): | |
|
94 | ||
|
95 | """ | |
|
96 | Ejecuta la operacion asociada al identificador del objeto "objId" | |
|
97 | ||
|
98 | Input: | |
|
99 | ||
|
100 | objId : identificador del objeto a ejecutar | |
|
101 | ||
|
102 | **kwargs : diccionario con los nombres y valores de la funcion a ejecutar. | |
|
103 | ||
|
104 | Return: | |
|
105 | ||
|
106 | None | |
|
107 | """ | |
|
108 | ||
|
109 | if self.dataIn.isEmpty(): | |
|
110 | return None | |
|
111 | ||
|
112 | object = self.objectList[objId] | |
|
113 | ||
|
114 | object.run(self.dataOut, **kwargs) | |
|
115 | ||
|
116 | def call(self, operation, **kwargs): | |
|
117 | ||
|
118 | """ | |
|
119 | Ejecuta la operacion "operation" con los argumentos "**kwargs". La operacion puede | |
|
120 | ser de dos tipos: | |
|
121 | ||
|
122 | 1. Un metodo propio de esta clase: | |
|
123 | ||
|
124 | operation.type = "self" | |
|
125 | ||
|
126 | 2. El metodo "run" de un objeto del tipo Operation o de un derivado de ella: | |
|
127 | operation.type = "other". | |
|
128 | ||
|
129 | Este objeto de tipo Operation debe de haber sido agregado antes con el metodo: | |
|
130 | "addOperation" e identificado con el operation.id | |
|
131 | ||
|
132 | ||
|
133 | con el id de la operacion. | |
|
134 | """ | |
|
135 | if self.dataIn.isEmpty(): | |
|
136 | return None | |
|
137 | ||
|
138 | if operation.type == 'self': | |
|
139 | self.callMethod(operation.name, **kwargs) | |
|
140 | return | |
|
141 | ||
|
142 | if operation.type == 'other': | |
|
143 | self.callObject(operation.id, **kwargs) | |
|
144 | return | |
|
145 | ||
|
146 | class Operation(): | |
|
147 | ||
|
148 | """ | |
|
149 | Clase base para definir las operaciones adicionales que se pueden agregar a la clase ProcessingUnit | |
|
150 | y necesiten acumular información previa de los datos a procesar. De preferencia usar un buffer de | |
|
151 | acumulacion dentro de esta clase | |
|
152 | ||
|
153 | Ejemplo: Integraciones coherentes, necesita la información previa de los n perfiles anteriores (bufffer) | |
|
154 | ||
|
155 | """ | |
|
156 | ||
|
157 | __buffer = None | |
|
158 | ||
|
159 | def __init__(self): | |
|
160 | ||
|
161 | pass | |
|
162 | ||
|
163 | def run(self, dataIn, **kwargs): | |
|
164 | ||
|
165 | """ | |
|
166 | Realiza las operaciones necesarias sobre la dataIn.data y actualiza los atributos del objeto dataIn. | |
|
167 | ||
|
168 | Input: | |
|
169 | ||
|
170 | dataIn : objeto del tipo JROData | |
|
171 | ||
|
172 | Return: | |
|
173 | ||
|
174 | None | |
|
175 | ||
|
176 | Affected: | |
|
177 | __buffer : buffer de recepcion de datos. | |
|
178 | ||
|
179 | """ | |
|
180 | ||
|
181 | raise ValueError, "ImplementedError" | |
|
182 | ||
|
183 | class VoltageProc(ProcessingUnit): | |
|
184 | ||
|
185 | ||
|
186 | def __init__(self): | |
|
187 | ||
|
188 | pass | |
|
189 | ||
|
190 | def setup(self, dataInObj=None, dataOutObj=None): | |
|
191 | ||
|
192 | self.dataInObj = dataInObj | |
|
193 | ||
|
194 | if self.dataOutObj == None: | |
|
195 | dataOutObj = Voltage() | |
|
196 | ||
|
197 | self.dataOutObj = dataOutObj | |
|
198 | ||
|
199 | return self.dataOutObj | |
|
200 | ||
|
201 | def init(self): | |
|
202 | ||
|
203 | if self.dataInObj.isEmpty(): | |
|
204 | return 0 | |
|
205 | ||
|
206 | self.dataOutObj.copy(self.dataInObj) | |
|
207 | # No necesita copiar en cada init() los atributos de dataInObj | |
|
208 | # la copia deberia hacerse por cada nuevo bloque de datos | |
|
209 | ||
|
210 | def selectChannels(self, channelList): | |
|
211 | ||
|
212 | if self.dataInObj.isEmpty(): | |
|
213 | return 0 | |
|
214 | ||
|
215 | self.selectChannelsByIndex(channelList) | |
|
216 | ||
|
217 | def selectChannelsByIndex(self, channelIndexList): | |
|
218 | """ | |
|
219 | Selecciona un bloque de datos en base a canales segun el channelIndexList | |
|
220 | ||
|
221 | Input: | |
|
222 | channelIndexList : lista sencilla de canales a seleccionar por ej. [2,3,7] | |
|
223 | ||
|
224 | Affected: | |
|
225 | self.dataOutObj.data | |
|
226 | self.dataOutObj.channelIndexList | |
|
227 | self.dataOutObj.nChannels | |
|
228 | self.dataOutObj.m_ProcessingHeader.totalSpectra | |
|
229 | self.dataOutObj.systemHeaderObj.numChannels | |
|
230 | self.dataOutObj.m_ProcessingHeader.blockSize | |
|
231 | ||
|
232 | Return: | |
|
233 | None | |
|
234 | """ | |
|
235 | ||
|
236 | for channel in channelIndexList: | |
|
237 | if channel not in self.dataOutObj.channelIndexList: | |
|
238 | raise ValueError, "The value %d in channelIndexList is not valid" %channel | |
|
239 | ||
|
240 | nChannels = len(channelIndexList) | |
|
241 | ||
|
242 | data = self.dataOutObj.data[channelIndexList,:] | |
|
243 | ||
|
244 | self.dataOutObj.data = data | |
|
245 | self.dataOutObj.channelIndexList = channelIndexList | |
|
246 | self.dataOutObj.channelList = [self.dataOutObj.channelList[i] for i in channelIndexList] | |
|
247 | self.dataOutObj.nChannels = nChannels | |
|
248 | ||
|
249 | return 1 | |
|
250 | ||
|
251 | class CohInt(Operation): | |
|
252 | ||
|
253 | __profIndex = 0 | |
|
254 | __withOverapping = False | |
|
255 | ||
|
256 | __byTime = False | |
|
257 | __initime = None | |
|
258 | __integrationtime = None | |
|
259 | ||
|
260 | __buffer = None | |
|
261 | ||
|
262 | __dataReady = False | |
|
263 | ||
|
264 | nCohInt = None | |
|
265 | ||
|
266 | ||
|
267 | def __init__(self): | |
|
268 | ||
|
269 | pass | |
|
270 | ||
|
271 | def setup(self, nCohInt=None, timeInterval=None, overlapping=False): | |
|
272 | """ | |
|
273 | Set the parameters of the integration class. | |
|
274 | ||
|
275 | Inputs: | |
|
276 | ||
|
277 | nCohInt : Number of coherent integrations | |
|
278 | timeInterval : Time of integration. If the parameter "nCohInt" is selected this one does not work | |
|
279 | overlapping : | |
|
280 | ||
|
281 | """ | |
|
282 | ||
|
283 | self.__initime = None | |
|
284 | self.__buffer = None | |
|
285 | self.__dataReady = False | |
|
286 | ||
|
287 | ||
|
288 | if nCohInt == None and timeInterval == None: | |
|
289 | raise ValueError, "nCohInt or timeInterval should be specified ..." | |
|
290 | ||
|
291 | if nCohInt != None: | |
|
292 | self.nCohInt = nCohInt | |
|
293 | self.__byTime = False | |
|
294 | else: | |
|
295 | self.__integrationtime = timeInterval * 60. #if (type(timeInterval)!=integer) -> change this line | |
|
296 | self.nCohInt = 9999 | |
|
297 | self.__byTime = True | |
|
298 | ||
|
299 | if overlapping: | |
|
300 | self.__withOverapping = True | |
|
301 | self.__buffer = None | |
|
302 | else: | |
|
303 | self.__withOverapping = False | |
|
304 | self.__buffer = 0 | |
|
305 | ||
|
306 | self.__profIndex = 0 | |
|
307 | ||
|
308 | def putData(self, data): | |
|
309 | ||
|
310 | """ | |
|
311 | Add a profile to the __buffer and increase in one the __profileIndex | |
|
312 | ||
|
313 | """ | |
|
314 | if self.__initime == None: | |
|
315 | self.__initime = datatime | |
|
316 | ||
|
317 | if not self.__withOverapping: | |
|
318 | self.__buffer += data | |
|
319 | self.__profIndex += 1 | |
|
320 | return | |
|
321 | ||
|
322 | #Overlapping data | |
|
323 | nChannels, nHeis = data.shape | |
|
324 | data = numpy.reshape(data, (1, nChannels, nHeis)) | |
|
325 | ||
|
326 | #If the buffer is empty then it takes the data value | |
|
327 | if self.__buffer == None: | |
|
328 | self.__buffer = data | |
|
329 | self.__profIndex += 1 | |
|
330 | return | |
|
331 | ||
|
332 | #If the buffer length is lower than nCohInt then stakcing the data value | |
|
333 | if self.__profIndex < self.nCohInt: | |
|
334 | self.__buffer = numpy.vstack((self.__buffer, data)) | |
|
335 | self.__profIndex += 1 | |
|
336 | return | |
|
337 | ||
|
338 | #If the buffer length is equal to nCohInt then replacing the last buffer value with the data value | |
|
339 | self.__buffer = numpy.roll(self.__buffer, -1, axis=0) | |
|
340 | self.__buffer[self.nCohInt-1] = data | |
|
341 | self.__profIndex = self.nCohInt | |
|
342 | return | |
|
343 | ||
|
344 | ||
|
345 | def pushData(self): | |
|
346 | """ | |
|
347 | Return the sum of the last profiles and the profiles used in the sum. | |
|
348 | ||
|
349 | Affected: | |
|
350 | ||
|
351 | self.__profileIndex | |
|
352 | ||
|
353 | """ | |
|
354 | ||
|
355 | self.__initime = None | |
|
356 | ||
|
357 | if not self.__withOverapping: | |
|
358 | data = self.__buffer | |
|
359 | nCohInt = self.__profIndex | |
|
360 | ||
|
361 | self.__buffer = 0 | |
|
362 | self.__profIndex = 0 | |
|
363 | ||
|
364 | return data, nCohInt | |
|
365 | ||
|
366 | #Integration with Overlapping | |
|
367 | data = numpy.sum(self.__buffer, axis=0) | |
|
368 | nCohInt = self.__profIndex | |
|
369 | ||
|
370 | return data, nCohInt | |
|
371 | ||
|
372 | def byProfiles(self, data): | |
|
373 | ||
|
374 | self.__dataReady = False | |
|
375 | avg_data = None | |
|
376 | ||
|
377 | self.putData(data) | |
|
378 | ||
|
379 | if self.__profIndex == self.nCohInt: | |
|
380 | ||
|
381 | avgdata, nCohInt = self.pushData() | |
|
382 | self.__dataReady = True | |
|
383 | ||
|
384 | return avgdata, nCohInt | |
|
385 | ||
|
386 | def byTime(self, data, datatime): | |
|
387 | ||
|
388 | self.__dataReady = False | |
|
389 | avg_data = None | |
|
390 | ||
|
391 | self.putData(data) | |
|
392 | ||
|
393 | if (datatime - self.__initime) >= self.__integrationtime: | |
|
394 | avgdata, nCohInt = self.pushData() | |
|
395 | self.nCohInt = nCohInt | |
|
396 | self.__dataReady = True | |
|
397 | ||
|
398 | return avgdata, nCohInt | |
|
399 | ||
|
400 | def integrate(self, data, datatime=None): | |
|
401 | ||
|
402 | if not self.__byTime: | |
|
403 | avg_data = self.byProfiles(data) | |
|
404 | else: | |
|
405 | avg_data = self.byTime(data, datatime) | |
|
406 | ||
|
407 | self.data = avg_data | |
|
408 | ||
|
409 | ||
|
410 | def run(self, dataOut, nCohInt=None, timeInterval=None, overlapping=False): | |
|
411 | ||
|
412 | ||
|
413 | # self.dataOutObj.timeInterval *= nCohInt | |
|
414 | self.dataOutObj.flagNoData = True | |
|
415 | ||
|
416 | if myCohIntObj.__dataReady: | |
|
417 | self.dataOutObj.data = myCohIntObj.data | |
|
418 | self.dataOutObj.timeInterval *= myCohIntObj.nCohInt | |
|
419 | self.dataOutObj.nCohInt = myCohIntObj.nCohInt * self.dataInObj.nCohInt | |
|
420 | self.dataOutObj.utctime = myCohIntObj.firstdatatime | |
|
421 | self.dataOutObj.flagNoData = False | |
|
422 | ||
|
423 | return avg_data No newline at end of file |
@@ -0,0 +1,104 | |||
|
1 | ''' | |
|
2 | Created on Jul 31, 2012 | |
|
3 | ||
|
4 | @author $Author$ | |
|
5 | @version $Id$ | |
|
6 | ''' | |
|
7 | ||
|
8 | import os, sys | |
|
9 | import time, datetime | |
|
10 | #import pylab as pl | |
|
11 | ||
|
12 | from Data.JROData import Voltage | |
|
13 | from IO.VoltageIO import * | |
|
14 | ||
|
15 | from Data.JROData import SpectraHeis | |
|
16 | from IO.SpectraIO import * | |
|
17 | ||
|
18 | from Processing.VoltageProcessor import * | |
|
19 | from Processing.SpectraProcessor import * | |
|
20 | ||
|
21 | #from Graphics.BaseGraph_mpl import LinearPlot | |
|
22 | ||
|
23 | class TestHeis(): | |
|
24 | i=None | |
|
25 | def __init__(self): | |
|
26 | self.setValues() | |
|
27 | self.createObjects() | |
|
28 | self.testSChain() | |
|
29 | self.i=0 | |
|
30 | ||
|
31 | def setValues( self ): | |
|
32 | ||
|
33 | self.path="/home/roj-idl71/data" | |
|
34 | self.path = "/Data/Data/RAWDATA/ASTRONOMIA" | |
|
35 | ||
|
36 | #self.path = "" | |
|
37 | self.startDate = datetime.date(2012,4,1) | |
|
38 | self.endDate = datetime.date(2012,12,30) | |
|
39 | ||
|
40 | self.startTime = datetime.time(0,0,0) | |
|
41 | self.endTime = datetime.time(23,0,0) | |
|
42 | ||
|
43 | ||
|
44 | def createObjects( self ): | |
|
45 | ||
|
46 | self.readerObj = VoltageReader() | |
|
47 | self.specProcObj = SpectraHeisProcessor() | |
|
48 | ||
|
49 | self.voltObj1 = self.readerObj.setup( | |
|
50 | path = self.path, | |
|
51 | startDate = self.startDate, | |
|
52 | endDate = self.endDate, | |
|
53 | startTime = self.startTime, | |
|
54 | endTime = self.endTime, | |
|
55 | expLabel = '', | |
|
56 | online = 1) | |
|
57 | ||
|
58 | if not(self.voltObj1): | |
|
59 | sys.exit(0) | |
|
60 | ||
|
61 | self.specObj1 = self.specProcObj.setup(dataInObj = self.voltObj1,nFFTPoints=self.voltObj1.nHeights) | |
|
62 | ||
|
63 | ||
|
64 | # | |
|
65 | ||
|
66 | # | |
|
67 | ||
|
68 | def testSChain( self ): | |
|
69 | ||
|
70 | ini = time.time() | |
|
71 | counter = 0 | |
|
72 | while(True): | |
|
73 | self.readerObj.getData() | |
|
74 | self.specProcObj.init() | |
|
75 | ||
|
76 | self.specProcObj.integrator(N=32) ## return self.dataOutObj | |
|
77 | ||
|
78 | ||
|
79 | ||
|
80 | ||
|
81 | self.specProcObj.plotScope(idfigure=1, | |
|
82 | wintitle='test plot library', | |
|
83 | driver='plplot', | |
|
84 | minvalue = 30000.0, | |
|
85 | maxvalue = 5000000.0, | |
|
86 | save=False, | |
|
87 | gpath="/home/roj-idl71/PlotImage") | |
|
88 | ||
|
89 | ||
|
90 | if self.readerObj.flagNoMoreFiles: | |
|
91 | break | |
|
92 | ||
|
93 | ||
|
94 | ||
|
95 | if self.readerObj.flagIsNewBlock: | |
|
96 | print 'Block No %04d, Time: %s' %(self.readerObj.nTotalBlocks, | |
|
97 | datetime.datetime.fromtimestamp(self.readerObj.basicHeaderObj.utc),) | |
|
98 | ||
|
99 | ||
|
100 | ||
|
101 | if __name__ == '__main__': | |
|
102 | TestHeis() | |
|
103 | ||
|
104 | No newline at end of file |
@@ -0,0 +1,120 | |||
|
1 | ''' | |
|
2 | ||
|
3 | $Author: murco $ | |
|
4 | $Id: testSchainExp.py 158 2012-11-08 21:31:03Z murco $ | |
|
5 | ''' | |
|
6 | import os, sys | |
|
7 | import time, datetime | |
|
8 | ||
|
9 | path = os.path.split(os.getcwd())[0] | |
|
10 | sys.path.append(path) | |
|
11 | ||
|
12 | from Data.JROData import Voltage | |
|
13 | from IO.VoltageIO import * | |
|
14 | ||
|
15 | from Processing.VoltageProcessor import * | |
|
16 | from Processing.SpectraProcessor import * | |
|
17 | ||
|
18 | class TestSChain(): | |
|
19 | ||
|
20 | def __init__(self): | |
|
21 | self.setValues() | |
|
22 | self.createObjects() | |
|
23 | self.testSChain() | |
|
24 | ||
|
25 | def setValues(self): | |
|
26 | self.path = "/home/roj-idl71/Data/RAWDATA/Meteors" | |
|
27 | self.path = "/remote/puma/2012_06/Meteors" | |
|
28 | ||
|
29 | self.startDate = datetime.date(2012,06,19) | |
|
30 | self.endDate = datetime.date(2012,12,30) | |
|
31 | ||
|
32 | self.startTime = datetime.time(11,0,0) | |
|
33 | self.endTime = datetime.time(23,59,59) | |
|
34 | ||
|
35 | self.nFFTPoints = 32 | |
|
36 | ||
|
37 | self.wrpath = "/home/roj-idl71/tmp/results" | |
|
38 | self.profilesPerBlock = 40 | |
|
39 | self.blocksPerFile = 50 | |
|
40 | ||
|
41 | def createObjects(self): | |
|
42 | ||
|
43 | self.readerObj = VoltageReader() | |
|
44 | self.voltProcObj = VoltageProcessor() | |
|
45 | self.specProcObj = SpectraProcessor() | |
|
46 | ||
|
47 | self.voltObj1 = self.readerObj.setup( | |
|
48 | path = self.path, | |
|
49 | startDate = self.startDate, | |
|
50 | endDate = self.endDate, | |
|
51 | startTime = self.startTime, | |
|
52 | endTime = self.endTime, | |
|
53 | expLabel = '', | |
|
54 | online = True) | |
|
55 | ||
|
56 | self.voltObj2 = self.voltProcObj.setup(dataInObj = self.voltObj1) | |
|
57 | self.specObj1 = self.specProcObj.setup(dataInObj = self.voltObj2, nFFTPoints = self.nFFTPoints) | |
|
58 | ||
|
59 | def testSChain(self): | |
|
60 | ||
|
61 | ini = time.time() | |
|
62 | ||
|
63 | while(True): | |
|
64 | self.readerObj.getData() | |
|
65 | ||
|
66 | self.voltProcObj.init() | |
|
67 | ||
|
68 | self.voltProcObj.integrator(25, overlapping=False) | |
|
69 | # | |
|
70 | # self.voltProcObj.writeData(self.wrpath,self.profilesPerBlock,self.blocksPerFile) | |
|
71 | self.voltProcObj.selectChannels([0,1,2]) | |
|
72 | ||
|
73 | # self.voltProcObj.plotScope(idfigure=0, | |
|
74 | # wintitle='test plot library', | |
|
75 | # driver='plplot', | |
|
76 | # save=False, | |
|
77 | # gpath=None, | |
|
78 | # type="power") | |
|
79 | ||
|
80 | # self.voltProcObj.plotRti(idfigure=1, | |
|
81 | # starttime=self.startTime, | |
|
82 | # endtime=self.endTime, | |
|
83 | # minvalue=0, | |
|
84 | # maxvalue=50, | |
|
85 | # wintitle='', | |
|
86 | # driver='plplot', | |
|
87 | # colormap='jet', | |
|
88 | # colorbar=True, | |
|
89 | # showprofile=False, | |
|
90 | # xrangestep=2, | |
|
91 | # save=False, | |
|
92 | # gpath=None) | |
|
93 | # | |
|
94 | # if self.voltProcObj.dataOutObj.flagNoData ==False: | |
|
95 | # print self.readerObj.dataOutObj.nProfiles | |
|
96 | ||
|
97 | self.specProcObj.init() | |
|
98 | ||
|
99 | self.specProcObj.plotSpc(idfigure=2, | |
|
100 | minvalue=30, | |
|
101 | maxvalue=70, | |
|
102 | wintitle='Spectra', | |
|
103 | driver='plplot', | |
|
104 | colormap='jet', | |
|
105 | colorbar=True, | |
|
106 | showprofile=True, | |
|
107 | save=False, | |
|
108 | gpath=None) | |
|
109 | ||
|
110 | if self.readerObj.flagNoMoreFiles: | |
|
111 | break | |
|
112 | ||
|
113 | if self.readerObj.flagIsNewBlock: | |
|
114 | # print 'Block No %04d, Time: %s' %(self.readerObj.nTotalBlocks, datetime.datetime.fromtimestamp(self.readerObj.basicHeaderObj.utc),) | |
|
115 | print 'Block No %04d, Time: %s' %(self.readerObj.nTotalBlocks, | |
|
116 | datetime.datetime.fromtimestamp(self.readerObj.basicHeaderObj.utc + self.readerObj.basicHeaderObj.miliSecond/1000.0),) | |
|
117 | ||
|
118 | ||
|
119 | if __name__ == '__main__': | |
|
120 | TestSChain() No newline at end of file |
@@ -0,0 +1,85 | |||
|
1 | ''' | |
|
2 | ||
|
3 | $Author: murco $ | |
|
4 | $Id: testSchainSpecExp.py 147 2012-10-30 22:50:56Z murco $ | |
|
5 | ''' | |
|
6 | ||
|
7 | import os, sys | |
|
8 | import time, datetime | |
|
9 | ||
|
10 | path = os.path.split(os.getcwd())[0] | |
|
11 | sys.path.append(path) | |
|
12 | ||
|
13 | ||
|
14 | from Data.JROData import Spectra | |
|
15 | from IO.SpectraIO import * | |
|
16 | from Processing.SpectraProcessor import * | |
|
17 | ||
|
18 | ||
|
19 | ||
|
20 | class TestSChain: | |
|
21 | ||
|
22 | def __init__(self): | |
|
23 | self.setValues() | |
|
24 | self.createObjects() | |
|
25 | self.testSChain() | |
|
26 | ||
|
27 | def setValues(self): | |
|
28 | # self.path = "/Users/jro/Documents/RadarData/MST_ISR/MST" | |
|
29 | ## self.path = "/home/roj-idl71/Data/RAWDATA/IMAGING" | |
|
30 | # self.path = "/Users/danielangelsuarezmunoz/Data/EW_Drifts" | |
|
31 | # self.path = "/Users/danielangelsuarezmunoz/Data/IMAGING" | |
|
32 | self.path = "/home/daniel/RadarData/IMAGING" | |
|
33 | ||
|
34 | self.startDate = datetime.date(2012,3,1) | |
|
35 | self.endDate = datetime.date(2012,3,30) | |
|
36 | ||
|
37 | self.startTime = datetime.time(0,0,0) | |
|
38 | self.endTime = datetime.time(14,1,1) | |
|
39 | ||
|
40 | # paramatros para Escritura de Pdata | |
|
41 | self.wrpath = "/home/daniel/RadarData/test_wr2" | |
|
42 | self.blocksPerFile = 5 | |
|
43 | ||
|
44 | ||
|
45 | ||
|
46 | def createObjects(self): | |
|
47 | ||
|
48 | self.readerObj = SpectraReader() | |
|
49 | ||
|
50 | self.specObj1 = self.readerObj.setup( | |
|
51 | path = self.path, | |
|
52 | startDate = self.startDate, | |
|
53 | endDate = self.endDate, | |
|
54 | startTime = self.startTime, | |
|
55 | endTime = self.endTime, | |
|
56 | expLabel = '', | |
|
57 | online = 0) | |
|
58 | ||
|
59 | self.specObjProc = SpectraProcessor() | |
|
60 | ||
|
61 | self.specObj2 = self.specObjProc.setup(dataInObj = self.specObj1) | |
|
62 | ||
|
63 | ||
|
64 | ||
|
65 | def testSChain(self): | |
|
66 | ||
|
67 | ini = time.time() | |
|
68 | ||
|
69 | while(True): | |
|
70 | self.readerObj.getData() | |
|
71 | ||
|
72 | self.specObjProc.init() | |
|
73 | ||
|
74 | self.specObjProc.writeData(self.wrpath,self.blocksPerFile) | |
|
75 | # | |
|
76 | if self.readerObj.flagNoMoreFiles: | |
|
77 | break | |
|
78 | ||
|
79 | if self.readerObj.flagIsNewBlock: | |
|
80 | print 'Block No %04d, Time: %s' %(self.readerObj.nTotalBlocks, | |
|
81 | datetime.datetime.fromtimestamp(self.readerObj.basicHeaderObj.utc)) | |
|
82 | ||
|
83 | ||
|
84 | if __name__ == '__main__': | |
|
85 | TestSChain() No newline at end of file |
General Comments 0
You need to be logged in to leave comments.
Login now