This diff has been collapsed as it changes many lines, (517 lines changed) Show them Hide them | |||
@@ -0,0 +1,517 | |||
|
1 | ''' | |
|
2 | Created on Jul 3, 2014 | |
|
3 | ||
|
4 | @author: roj-idl71 | |
|
5 | ''' | |
|
6 | import datetime | |
|
7 | import numpy | |
|
8 | ||
|
9 | try: | |
|
10 | from gevent import sleep | |
|
11 | except: | |
|
12 | from time import sleep | |
|
13 | ||
|
14 | from schainpy.model.data.jroheaderIO import RadarControllerHeader, SystemHeader | |
|
15 | from schainpy.model.data.jrodata import Voltage | |
|
16 | from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation | |
|
17 | ||
|
18 | try: | |
|
19 | import digital_rf_hdf5 | |
|
20 | except: | |
|
21 | print 'You should install "digital_rf_hdf5" module if you want to read USRP data' | |
|
22 | ||
|
23 | class USRPReader(ProcessingUnit): | |
|
24 | ''' | |
|
25 | classdocs | |
|
26 | ''' | |
|
27 | ||
|
28 | def __init__(self): | |
|
29 | ''' | |
|
30 | Constructor | |
|
31 | ''' | |
|
32 | ||
|
33 | ProcessingUnit.__init__(self) | |
|
34 | ||
|
35 | self.dataOut = Voltage() | |
|
36 | self.__printInfo = True | |
|
37 | self.__flagDiscontinuousBlock = False | |
|
38 | self.__bufferIndex = 9999999 | |
|
39 | ||
|
40 | self.__ippKm = None | |
|
41 | self.__codeType = 0 | |
|
42 | self.__nCode = None | |
|
43 | self.__nBaud = None | |
|
44 | self.__code = None | |
|
45 | ||
|
46 | def __getCurrentSecond(self): | |
|
47 | ||
|
48 | return self.__thisUnixSample/self.__sample_rate | |
|
49 | ||
|
50 | thisSecond = property(__getCurrentSecond, "I'm the 'thisSecond' property.") | |
|
51 | ||
|
52 | def __setFileHeader(self): | |
|
53 | ''' | |
|
54 | In this method will be initialized every parameter of dataOut object (header, no data) | |
|
55 | ''' | |
|
56 | ||
|
57 | self.dataOut.radarControllerHeaderObj = RadarControllerHeader(ippKm=self.__ippKm, | |
|
58 | txA=0, | |
|
59 | txB=0, | |
|
60 | nWindows=1, | |
|
61 | nHeights=self.__nSamples, | |
|
62 | firstHeight=self.__firstHeigth, | |
|
63 | deltaHeight=self.__deltaHeigth, | |
|
64 | codeType=self.__codeType, | |
|
65 | nCode=self.__nCode, nBaud=self.__nBaud, | |
|
66 | code = self.__code) | |
|
67 | ||
|
68 | self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples, | |
|
69 | nProfiles=1024, | |
|
70 | nChannels=len(self.__channelList), | |
|
71 | adcResolution=14) | |
|
72 | ||
|
73 | self.dataOut.type = "Voltage" | |
|
74 | ||
|
75 | self.dataOut.data = None | |
|
76 | ||
|
77 | self.dataOut.dtype = numpy.dtype([('real','<i8'),('imag','<i8')]) | |
|
78 | ||
|
79 | # self.dataOut.nChannels = 0 | |
|
80 | ||
|
81 | # self.dataOut.nHeights = 0 | |
|
82 | ||
|
83 | self.dataOut.nProfiles = 1 | |
|
84 | ||
|
85 | self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype = numpy.float)*self.__deltaHeigth | |
|
86 | ||
|
87 | self.dataOut.channelList = self.__channelList | |
|
88 | ||
|
89 | self.dataOut.blocksize = self.dataOut.getNChannels() * self.dataOut.getNHeights() | |
|
90 | ||
|
91 | # self.dataOut.channelIndexList = None | |
|
92 | ||
|
93 | self.dataOut.flagNoData = True | |
|
94 | ||
|
95 | #Set to TRUE if the data is discontinuous | |
|
96 | self.dataOut.flagDiscontinuousBlock = False | |
|
97 | ||
|
98 | self.dataOut.utctime = None | |
|
99 | ||
|
100 | self.dataOut.timeZone = self.__timezone/60 #timezone like jroheader, difference in minutes between UTC and localtime | |
|
101 | ||
|
102 | self.dataOut.dstFlag = 0 | |
|
103 | ||
|
104 | self.dataOut.errorCount = 0 | |
|
105 | ||
|
106 | self.dataOut.nCohInt = 1 | |
|
107 | ||
|
108 | self.dataOut.flagDecodeData = False #asumo que la data esta decodificada | |
|
109 | ||
|
110 | self.dataOut.flagDeflipData = False #asumo que la data esta sin flip | |
|
111 | ||
|
112 | self.dataOut.flagShiftFFT = False | |
|
113 | ||
|
114 | self.dataOut.ippSeconds = 1.0*self.__nSamples/self.__sample_rate | |
|
115 | ||
|
116 | #Time interval between profiles | |
|
117 | #self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt | |
|
118 | ||
|
119 | self.dataOut.frequency = self.__frequency | |
|
120 | ||
|
121 | self.dataOut.realtime = self.__online | |
|
122 | ||
|
123 | def setup(self, path = None, | |
|
124 | startDate = None, | |
|
125 | endDate = None, | |
|
126 | startTime = datetime.time(0,0,0), | |
|
127 | endTime = datetime.time(23,59,59), | |
|
128 | channelList = None, | |
|
129 | nSamples = None, | |
|
130 | ippKm = None, | |
|
131 | online = False, | |
|
132 | wait = 60, | |
|
133 | nbuffer = 1024*4): | |
|
134 | ''' | |
|
135 | In this method we should set all initial parameters. | |
|
136 | ||
|
137 | Inputs: | |
|
138 | path | |
|
139 | startDate | |
|
140 | endDate | |
|
141 | startTime | |
|
142 | endTime | |
|
143 | set | |
|
144 | expLabel | |
|
145 | ext | |
|
146 | online | |
|
147 | wait | |
|
148 | ''' | |
|
149 | try: | |
|
150 | self.digitalReadObj = digital_rf_hdf5.read_hdf5(path, load_all_metadata=True) | |
|
151 | except: | |
|
152 | self.digitalReadObj = digital_rf_hdf5.read_hdf5(path) | |
|
153 | ||
|
154 | channelNameList = self.digitalReadObj.get_channels() | |
|
155 | ||
|
156 | if not channelNameList: | |
|
157 | raise IOError, "[Reading] The path doesn,t have any files .. " | |
|
158 | ||
|
159 | if not channelList: | |
|
160 | channelList = range(len(channelNameList)) | |
|
161 | ||
|
162 | ########## Reading metadata ###################### | |
|
163 | ||
|
164 | metadata_dict = self.digitalReadObj.get_rf_file_metadata(channelNameList[channelList[0]]) | |
|
165 | ||
|
166 | self.__sample_rate = metadata_dict['sample_rate'][0] | |
|
167 | self.__samples_per_file = metadata_dict['samples_per_file'][0] | |
|
168 | self.__deltaHeigth = 1e6*0.15/self.__sample_rate | |
|
169 | ||
|
170 | this_metadata_file = self.digitalReadObj.get_metadata(channelNameList[channelList[0]]) | |
|
171 | ||
|
172 | self.__frequency = this_metadata_file['center_frequencies'].value | |
|
173 | try: | |
|
174 | self.__timezone = this_metadata_file['timezone'].value | |
|
175 | except: | |
|
176 | self.__timezone = 0 | |
|
177 | ||
|
178 | self.__firstHeigth = 0 | |
|
179 | ||
|
180 | try: | |
|
181 | codeType = this_metadata_file['codeType'].value | |
|
182 | except: | |
|
183 | codeType = 0 | |
|
184 | ||
|
185 | nCode = 0 | |
|
186 | nBaud = 0 | |
|
187 | code = None | |
|
188 | ||
|
189 | if codeType: | |
|
190 | nCode = this_metadata_file['nCode'].value | |
|
191 | nBaud = this_metadata_file['nBaud'].value | |
|
192 | code = this_metadata_file['code'].value | |
|
193 | ||
|
194 | if not ippKm: | |
|
195 | try: | |
|
196 | #seconds to km | |
|
197 | ippKm = 1e6*0.15*this_metadata_file['ipp'].value | |
|
198 | except: | |
|
199 | ippKm = None | |
|
200 | ||
|
201 | #################################################### | |
|
202 | startUTCSecond = None | |
|
203 | endUTCSecond = None | |
|
204 | ||
|
205 | if startDate: | |
|
206 | startDatetime = datetime.datetime.combine(startDate, startTime) | |
|
207 | startUTCSecond = (startDatetime-datetime.datetime(1970,1,1)).total_seconds() + self.__timezone | |
|
208 | ||
|
209 | if endDate: | |
|
210 | endDatetime = datetime.datetime.combine(endDate, endTime) | |
|
211 | endUTCSecond = (endDatetime-datetime.datetime(1970,1,1)).total_seconds() + self.__timezone | |
|
212 | ||
|
213 | start_index, end_index = self.digitalReadObj.get_bounds(channelNameList[channelList[0]]) | |
|
214 | ||
|
215 | if not startUTCSecond: | |
|
216 | startUTCSecond = start_index/self.__sample_rate | |
|
217 | ||
|
218 | if start_index > startUTCSecond*self.__sample_rate: | |
|
219 | startUTCSecond = start_index/self.__sample_rate | |
|
220 | ||
|
221 | if not endUTCSecond: | |
|
222 | endUTCSecond = end_index/self.__sample_rate | |
|
223 | ||
|
224 | if end_index < endUTCSecond*self.__sample_rate: | |
|
225 | endUTCSecond = end_index/self.__sample_rate | |
|
226 | ||
|
227 | if not nSamples: | |
|
228 | if not ippKm: | |
|
229 | raise ValueError, "[Reading] nSamples or ippKm should be defined" | |
|
230 | ||
|
231 | nSamples = ippKm / (1e6*0.15/self.__sample_rate) | |
|
232 | ||
|
233 | channelBoundList = [] | |
|
234 | channelNameListFiltered = [] | |
|
235 | ||
|
236 | for thisIndexChannel in channelList: | |
|
237 | thisChannelName = channelNameList[thisIndexChannel] | |
|
238 | start_index, end_index = self.digitalReadObj.get_bounds(thisChannelName) | |
|
239 | channelBoundList.append((start_index, end_index)) | |
|
240 | channelNameListFiltered.append(thisChannelName) | |
|
241 | ||
|
242 | self.profileIndex = 0 | |
|
243 | ||
|
244 | self.__ippKm = ippKm | |
|
245 | self.__codeType = codeType | |
|
246 | self.__nCode = nCode | |
|
247 | self.__nBaud = nBaud | |
|
248 | self.__code = code | |
|
249 | ||
|
250 | self.__datapath = path | |
|
251 | self.__online = online | |
|
252 | self.__channelList = channelList | |
|
253 | self.__channelNameList = channelNameListFiltered | |
|
254 | self.__channelBoundList = channelBoundList | |
|
255 | self.__nSamples = nSamples | |
|
256 | self.__samples_to_read = nbuffer*nSamples | |
|
257 | self.__nChannels = len(self.__channelList) | |
|
258 | ||
|
259 | self.__startUTCSecond = startUTCSecond | |
|
260 | self.__endUTCSecond = endUTCSecond | |
|
261 | ||
|
262 | self.__timeInterval = 1.0 * self.__samples_to_read/self.__sample_rate #Time interval | |
|
263 | ||
|
264 | if online: | |
|
265 | # self.__thisUnixSample = int(endUTCSecond*self.__sample_rate - 4*self.__samples_to_read) | |
|
266 | startUTCSecond = numpy.floor(endUTCSecond) | |
|
267 | ||
|
268 | self.__thisUnixSample = int(startUTCSecond*self.__sample_rate) - self.__samples_to_read | |
|
269 | ||
|
270 | self.__data_buffer = numpy.zeros((self.__nChannels, self.__samples_to_read), dtype = numpy.complex) | |
|
271 | ||
|
272 | self.__setFileHeader() | |
|
273 | self.isConfig = True | |
|
274 | ||
|
275 | print "[Reading] USRP Data was found from %s to %s " %( | |
|
276 | datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone), | |
|
277 | datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone) | |
|
278 | ) | |
|
279 | ||
|
280 | print "[Reading] Starting process from ", datetime.datetime.utcfromtimestamp(startUTCSecond - self.__timezone), " to ", datetime.datetime.utcfromtimestamp(endUTCSecond - self.__timezone) | |
|
281 | ||
|
282 | def __reload(self): | |
|
283 | ||
|
284 | if not self.__online: | |
|
285 | return | |
|
286 | ||
|
287 | ||
|
288 | # print "%s not in range [%s, %s]" %( | |
|
289 | # datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), | |
|
290 | # datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone), | |
|
291 | # datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone) | |
|
292 | # ) | |
|
293 | print "[Reading] reloading metadata ..." | |
|
294 | ||
|
295 | self.digitalReadObj.reload(complete_update=True) | |
|
296 | ||
|
297 | start_index, end_index = self.digitalReadObj.get_bounds(self.__channelNameList[self.__channelList[0]]) | |
|
298 | ||
|
299 | if start_index > self.__startUTCSecond*self.__sample_rate: | |
|
300 | self.__startUTCSecond = 1.0*start_index/self.__sample_rate | |
|
301 | ||
|
302 | if end_index > self.__endUTCSecond*self.__sample_rate: | |
|
303 | self.__endUTCSecond = 1.0*end_index/self.__sample_rate | |
|
304 | ||
|
305 | print "[Reading] New timerange found [%s, %s] " %( | |
|
306 | datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone), | |
|
307 | datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone) | |
|
308 | ) | |
|
309 | ||
|
310 | return True | |
|
311 | ||
|
312 | return False | |
|
313 | ||
|
314 | def __readNextBlock(self, seconds=30, volt_scale = 218776): | |
|
315 | ''' | |
|
316 | ''' | |
|
317 | ||
|
318 | #Set the next data | |
|
319 | self.__flagDiscontinuousBlock = False | |
|
320 | self.__thisUnixSample += self.__samples_to_read | |
|
321 | ||
|
322 | if self.__thisUnixSample + 2*self.__samples_to_read > self.__endUTCSecond*self.__sample_rate: | |
|
323 | print "[Reading] There are no more data into selected timerange" | |
|
324 | ||
|
325 | self.__reload() | |
|
326 | ||
|
327 | if self.__thisUnixSample + 2*self.__samples_to_read > self.__endUTCSecond*self.__sample_rate: | |
|
328 | self.__thisUnixSample -= self.__samples_to_read | |
|
329 | return False | |
|
330 | ||
|
331 | indexChannel = 0 | |
|
332 | ||
|
333 | dataOk = False | |
|
334 | ||
|
335 | for thisChannelName in self.__channelNameList: | |
|
336 | ||
|
337 | try: | |
|
338 | result = self.digitalReadObj.read_vector_c81d(self.__thisUnixSample, | |
|
339 | self.__samples_to_read, | |
|
340 | thisChannelName) | |
|
341 | ||
|
342 | except IOError, e: | |
|
343 | #read next profile | |
|
344 | self.__flagDiscontinuousBlock = True | |
|
345 | print e | |
|
346 | break | |
|
347 | ||
|
348 | if result.shape[0] != self.__samples_to_read: | |
|
349 | self.__flagDiscontinuousBlock = True | |
|
350 | print "[Reading] %s: Too few samples were found, just %d samples" %(datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), | |
|
351 | result.shape[0]) | |
|
352 | break | |
|
353 | ||
|
354 | self.__data_buffer[indexChannel,:] = result*volt_scale | |
|
355 | ||
|
356 | indexChannel += 1 | |
|
357 | ||
|
358 | dataOk = True | |
|
359 | ||
|
360 | self.__utctime = self.__thisUnixSample/self.__sample_rate | |
|
361 | ||
|
362 | if not dataOk: | |
|
363 | return False | |
|
364 | ||
|
365 | print "[Reading] %s: %d samples <> %f sec" %(datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), | |
|
366 | self.__samples_to_read, | |
|
367 | self.__timeInterval) | |
|
368 | ||
|
369 | self.__bufferIndex = 0 | |
|
370 | ||
|
371 | return True | |
|
372 | ||
|
373 | def __isBufferEmpty(self): | |
|
374 | ||
|
375 | if self.__bufferIndex <= self.__samples_to_read - self.__nSamples: | |
|
376 | return False | |
|
377 | ||
|
378 | return True | |
|
379 | ||
|
380 | def getData(self, seconds=30, nTries=5): | |
|
381 | ||
|
382 | ''' | |
|
383 | This method gets the data from files and put the data into the dataOut object | |
|
384 | ||
|
385 | In addition, increase el the buffer counter in one. | |
|
386 | ||
|
387 | Return: | |
|
388 | data : retorna un perfil de voltages (alturas * canales) copiados desde el | |
|
389 | buffer. Si no hay mas archivos a leer retorna None. | |
|
390 | ||
|
391 | Affected: | |
|
392 | self.dataOut | |
|
393 | self.profileIndex | |
|
394 | self.flagDiscontinuousBlock | |
|
395 | self.flagIsNewBlock | |
|
396 | ''' | |
|
397 | ||
|
398 | err_counter = 0 | |
|
399 | self.dataOut.flagNoData = True | |
|
400 | ||
|
401 | if self.__isBufferEmpty(): | |
|
402 | ||
|
403 | self.__flagDiscontinuousBlock = False | |
|
404 | ||
|
405 | while True: | |
|
406 | if self.__readNextBlock(): | |
|
407 | break | |
|
408 | ||
|
409 | if self.__thisUnixSample > self.__endUTCSecond*self.__sample_rate: | |
|
410 | return False | |
|
411 | ||
|
412 | if self.__flagDiscontinuousBlock: | |
|
413 | print '[Reading] discontinuous block found ... continue with the next block' | |
|
414 | continue | |
|
415 | ||
|
416 | if not self.__online: | |
|
417 | return False | |
|
418 | ||
|
419 | err_counter += 1 | |
|
420 | if err_counter > nTries: | |
|
421 | return False | |
|
422 | ||
|
423 | print '[Reading] waiting %d seconds to read a new block' %seconds | |
|
424 | sleep(seconds) | |
|
425 | ||
|
426 | self.dataOut.data = self.__data_buffer[:,self.__bufferIndex:self.__bufferIndex+self.__nSamples] | |
|
427 | self.dataOut.utctime = (self.__thisUnixSample + self.__bufferIndex)/self.__sample_rate | |
|
428 | self.dataOut.flagNoData = False | |
|
429 | self.dataOut.flagDiscontinuousBlock = self.__flagDiscontinuousBlock | |
|
430 | ||
|
431 | self.__bufferIndex += self.__nSamples | |
|
432 | self.profileIndex += 1 | |
|
433 | ||
|
434 | return True | |
|
435 | ||
|
436 | def printInfo(self): | |
|
437 | ''' | |
|
438 | ''' | |
|
439 | if self.__printInfo == False: | |
|
440 | return | |
|
441 | ||
|
442 | # self.systemHeaderObj.printInfo() | |
|
443 | # self.radarControllerHeaderObj.printInfo() | |
|
444 | ||
|
445 | self.__printInfo = False | |
|
446 | ||
|
447 | def printNumberOfBlock(self): | |
|
448 | ''' | |
|
449 | ''' | |
|
450 | ||
|
451 | print self.profileIndex | |
|
452 | ||
|
453 | def run(self, **kwargs): | |
|
454 | ''' | |
|
455 | This method will be called many times so here you should put all your code | |
|
456 | ''' | |
|
457 | ||
|
458 | if not self.isConfig: | |
|
459 | self.setup(**kwargs) | |
|
460 | ||
|
461 | self.getData() | |
|
462 | ||
|
463 | return | |
|
464 | ||
|
465 | class USRPWriter(Operation): | |
|
466 | ''' | |
|
467 | classdocs | |
|
468 | ''' | |
|
469 | ||
|
470 | def __init__(self): | |
|
471 | ''' | |
|
472 | Constructor | |
|
473 | ''' | |
|
474 | self.dataOut = None | |
|
475 | ||
|
476 | def setup(self, dataIn, path, blocksPerFile, set=0, ext=None): | |
|
477 | ''' | |
|
478 | In this method we should set all initial parameters. | |
|
479 | ||
|
480 | Input: | |
|
481 | dataIn : Input data will also be outputa data | |
|
482 | ||
|
483 | ''' | |
|
484 | self.dataOut = dataIn | |
|
485 | ||
|
486 | ||
|
487 | ||
|
488 | ||
|
489 | ||
|
490 | self.isConfig = True | |
|
491 | ||
|
492 | return | |
|
493 | ||
|
494 | def run(self, dataIn, **kwargs): | |
|
495 | ''' | |
|
496 | This method will be called many times so here you should put all your code | |
|
497 | ||
|
498 | Inputs: | |
|
499 | ||
|
500 | dataIn : object with the data | |
|
501 | ||
|
502 | ''' | |
|
503 | ||
|
504 | if not self.isConfig: | |
|
505 | self.setup(dataIn, **kwargs) | |
|
506 | ||
|
507 | ||
|
508 | if __name__ == '__main__': | |
|
509 | ||
|
510 | readObj = USRPReader() | |
|
511 | ||
|
512 | while True: | |
|
513 | readObj.run(path='/Volumes/DATA/haystack/passive_radar/') | |
|
514 | # readObj.printInfo() | |
|
515 | readObj.printNumberOfBlock() | |
|
516 | ||
|
517 | No newline at end of file |
@@ -0,0 +1,135 | |||
|
1 | ''' | |
|
2 | Created on Jul 15, 2014 | |
|
3 | ||
|
4 | @author: roj-idl71 | |
|
5 | ''' | |
|
6 | import time | |
|
7 | import threading | |
|
8 | import cPickle | |
|
9 | ||
|
10 | try: | |
|
11 | from gevent import sleep | |
|
12 | except: | |
|
13 | from time import sleep | |
|
14 | ||
|
15 | SERIALIZER = cPickle | |
|
16 | ||
|
17 | # from schainpy.serializer import DynamicSerializer | |
|
18 | from schainpy.model.io.jroIO_usrp import USRPReader | |
|
19 | from schainpy.serializer.DataTranslate import obj2Serial | |
|
20 | ||
|
21 | class USRPReaderAPI(USRPReader, threading.Thread): | |
|
22 | ||
|
23 | # __isBufferEmpty = True | |
|
24 | ||
|
25 | __DATAKEYLIST = ['data','utctime','flagNoData'] | |
|
26 | ||
|
27 | def __init__(self, serializer='msgpack'): | |
|
28 | ||
|
29 | threading.Thread.__init__(self) | |
|
30 | USRPReader.__init__(self) | |
|
31 | ||
|
32 | # self.__serializerObj = DynamicSerializer.DynamicSerializer('msgpack') | |
|
33 | self.__mySerial = None | |
|
34 | self.__isBufferEmpty = True | |
|
35 | ||
|
36 | self.setSerializer(serializer) | |
|
37 | ||
|
38 | def setSerializer(self, serializer): | |
|
39 | ||
|
40 | self.__serializer = serializer | |
|
41 | ||
|
42 | def getSerializer(self): | |
|
43 | ||
|
44 | return self.__serializer | |
|
45 | ||
|
46 | def getProfileIndex(self): | |
|
47 | ||
|
48 | return self.profileIndex | |
|
49 | ||
|
50 | def getSerialMetaData(self): | |
|
51 | ||
|
52 | if self.__isBufferEmpty: | |
|
53 | ini = time.time() | |
|
54 | ||
|
55 | while True: | |
|
56 | ||
|
57 | if not self.__isBufferEmpty: | |
|
58 | break | |
|
59 | ||
|
60 | if time.time() - ini > 20: | |
|
61 | break | |
|
62 | ||
|
63 | sleep(1e-12) | |
|
64 | ||
|
65 | ||
|
66 | # if not self.getData(): | |
|
67 | # self.__isBufferEmpty = False | |
|
68 | # return None | |
|
69 | ||
|
70 | if self.dataOut.flagNoData: | |
|
71 | return None | |
|
72 | ||
|
73 | myMetadataSerial = obj2Serial(self.dataOut, | |
|
74 | serializer = self.__serializer) | |
|
75 | ||
|
76 | return myMetadataSerial | |
|
77 | ||
|
78 | def getSerialData(self): | |
|
79 | ||
|
80 | if self.__isBufferEmpty: | |
|
81 | ini = time.time() | |
|
82 | ||
|
83 | while True: | |
|
84 | ||
|
85 | if not self.__isBufferEmpty: | |
|
86 | break | |
|
87 | ||
|
88 | if time.time() - ini > 20: | |
|
89 | break | |
|
90 | ||
|
91 | sleep(1e-12) | |
|
92 | ||
|
93 | ||
|
94 | # if not self.getData(): | |
|
95 | # self.__isBufferEmpty = False | |
|
96 | # return None | |
|
97 | ||
|
98 | if self.dataOut.flagNoData: | |
|
99 | return None | |
|
100 | ||
|
101 | self.__isBufferEmpty = True | |
|
102 | ||
|
103 | return self.__mySerial | |
|
104 | ||
|
105 | def run(self): | |
|
106 | ||
|
107 | ''' | |
|
108 | This method will be called many times so here you should put all your code | |
|
109 | ''' | |
|
110 | ||
|
111 | if not self.isConfig: | |
|
112 | raise IOError, 'setup() method has to be called before start()' | |
|
113 | ||
|
114 | while True: | |
|
115 | ||
|
116 | if not self.__isBufferEmpty: | |
|
117 | sleep(1e-12) | |
|
118 | continue | |
|
119 | ||
|
120 | if not self.getData(): | |
|
121 | break | |
|
122 | ||
|
123 | print ".", | |
|
124 | ||
|
125 | self.__mySerial = obj2Serial(self.dataOut, | |
|
126 | keyList = self.__DATAKEYLIST, | |
|
127 | serializer = self.__serializer) | |
|
128 | self.__isBufferEmpty = False | |
|
129 | ||
|
130 | # print self.profileIndex | |
|
131 | # print 'wait 1 second' | |
|
132 | ||
|
133 | # sleep(0.1) | |
|
134 | ||
|
135 | return No newline at end of file |
@@ -0,0 +1,115 | |||
|
1 | ''' | |
|
2 | Created on Jul 15, 2014 | |
|
3 | ||
|
4 | @author: Miguel Urco | |
|
5 | ''' | |
|
6 | from JROSerializer import DynamicSerializer | |
|
7 | ||
|
8 | PICKLE_SERIALIZER = DynamicSerializer('cPickle') | |
|
9 | MSGPACK_SERIALIZER = DynamicSerializer('msgpack') | |
|
10 | ||
|
11 | from schainpy.model.data.jrodata import * | |
|
12 | ||
|
13 | CLASSNAME_KEY = 'classname__' | |
|
14 | ||
|
15 | def isNotClassVar(myObj): | |
|
16 | ||
|
17 | return not hasattr(myObj,'__dict__') | |
|
18 | ||
|
19 | def isDictFormat(thisValue): | |
|
20 | ||
|
21 | if type(thisValue) != type({}): | |
|
22 | return False | |
|
23 | ||
|
24 | if CLASSNAME_KEY not in thisValue.keys(): | |
|
25 | return False | |
|
26 | ||
|
27 | return True | |
|
28 | ||
|
29 | def obj2Dict(myObj, keyList=[]): | |
|
30 | ||
|
31 | if not keyList: | |
|
32 | keyList = myObj.__dict__.keys() | |
|
33 | ||
|
34 | myDict = {} | |
|
35 | ||
|
36 | myDict[CLASSNAME_KEY] = myObj.__class__.__name__ | |
|
37 | ||
|
38 | for thisKey, thisValue in myObj.__dict__.items(): | |
|
39 | ||
|
40 | if thisKey not in keyList: | |
|
41 | continue | |
|
42 | ||
|
43 | if isNotClassVar(thisValue): | |
|
44 | myDict[thisKey] = thisValue | |
|
45 | continue | |
|
46 | ||
|
47 | ## If this value is another class instance | |
|
48 | myNewDict = obj2Dict(thisValue) | |
|
49 | myDict[thisKey] = myNewDict | |
|
50 | ||
|
51 | return myDict | |
|
52 | ||
|
53 | def dict2Obj(myDict): | |
|
54 | ''' | |
|
55 | ''' | |
|
56 | ||
|
57 | if CLASSNAME_KEY not in myDict.keys(): | |
|
58 | return None | |
|
59 | ||
|
60 | className = eval(myDict[CLASSNAME_KEY]) | |
|
61 | ||
|
62 | myObj = className() | |
|
63 | ||
|
64 | for thisKey, thisValue in myDict.items(): | |
|
65 | ||
|
66 | if thisKey == CLASSNAME_KEY: | |
|
67 | continue | |
|
68 | ||
|
69 | if not isDictFormat(thisValue): | |
|
70 | setattr(myObj, thisKey, thisValue) | |
|
71 | continue | |
|
72 | ||
|
73 | myNewObj = dict2Obj(thisValue) | |
|
74 | setattr(myObj, thisKey, myNewObj) | |
|
75 | ||
|
76 | return myObj | |
|
77 | ||
|
78 | def obj2Serial(myObj, serializer='msgpack', **kwargs): | |
|
79 | ||
|
80 | if serializer == 'cPickle': | |
|
81 | SERIALIZER = PICKLE_SERIALIZER | |
|
82 | else: | |
|
83 | SERIALIZER = MSGPACK_SERIALIZER | |
|
84 | ||
|
85 | myDict = obj2Dict(myObj, **kwargs) | |
|
86 | mySerial = SERIALIZER.dumps(myDict) | |
|
87 | ||
|
88 | return mySerial | |
|
89 | ||
|
90 | def serial2Dict(mySerial, serializer='msgpack'): | |
|
91 | ||
|
92 | if serializer == 'cPickle': | |
|
93 | SERIALIZER = PICKLE_SERIALIZER | |
|
94 | else: | |
|
95 | SERIALIZER = MSGPACK_SERIALIZER | |
|
96 | ||
|
97 | return SERIALIZER.loads(mySerial) | |
|
98 | ||
|
99 | def serial2Obj(mySerial, metadataDict = {}, serializer='msgpack'): | |
|
100 | ||
|
101 | if serializer == 'cPickle': | |
|
102 | SERIALIZER = PICKLE_SERIALIZER | |
|
103 | else: | |
|
104 | SERIALIZER = MSGPACK_SERIALIZER | |
|
105 | ||
|
106 | myDataDict = SERIALIZER.loads(mySerial) | |
|
107 | ||
|
108 | if not metadataDict: | |
|
109 | myObj = dict2Obj(myDataDict) | |
|
110 | return myObj | |
|
111 | ||
|
112 | metadataDict.update(myDataDict) | |
|
113 | myObj = dict2Obj(metadataDict) | |
|
114 | ||
|
115 | return myObj No newline at end of file |
@@ -0,0 +1,93 | |||
|
1 | ''' | |
|
2 | Created on Jul 17, 2014 | |
|
3 | ||
|
4 | @author: roj-idl71 | |
|
5 | ''' | |
|
6 | ||
|
7 | import cPickle | |
|
8 | import msgpack_numpy | |
|
9 | import jsonpickle | |
|
10 | import yaml | |
|
11 | ||
|
12 | # import JROMsgpack | |
|
13 | # import JROJsonpickle | |
|
14 | ||
|
15 | class Serializer(object): | |
|
16 | ||
|
17 | def __init__(self): | |
|
18 | ||
|
19 | self.serializer = None | |
|
20 | ||
|
21 | def dumps(self, obj, **kwargs): | |
|
22 | ||
|
23 | return self.serializer.dumps(obj, **kwargs) | |
|
24 | ||
|
25 | def loads(self, obj, **kwargs): | |
|
26 | return self.serializer.loads(obj, **kwargs) | |
|
27 | ||
|
28 | class cPickleSerializer(Serializer): | |
|
29 | ||
|
30 | def __init__(self): | |
|
31 | self.serializer = cPickle | |
|
32 | ||
|
33 | def dumps(self, obj, **kwargs): | |
|
34 | return self.serializer.dumps(obj, 2) | |
|
35 | ||
|
36 | def loads(self, obj, **kwargs): | |
|
37 | return self.serializer.loads(obj) | |
|
38 | ||
|
39 | class msgpackSerializer(Serializer): | |
|
40 | ||
|
41 | def __init__(self): | |
|
42 | ||
|
43 | self.serializer = msgpack_numpy | |
|
44 | ||
|
45 | def dumps(self, obj, **kwargs): | |
|
46 | return self.serializer.packb(obj) | |
|
47 | ||
|
48 | def loads(self, obj, **kwargs): | |
|
49 | return self.serializer.unpackb(obj) | |
|
50 | ||
|
51 | class jsonpickleSerializer(Serializer): | |
|
52 | ||
|
53 | def __init__(self): | |
|
54 | ||
|
55 | self.serializer = jsonpickle | |
|
56 | ||
|
57 | def dumps(self, obj, **kwargs): | |
|
58 | return self.serializer.encode(obj, **kwargs) | |
|
59 | ||
|
60 | def loads(self, obj, **kwargs): | |
|
61 | return self.serializer.decode(obj, **kwargs) | |
|
62 | ||
|
63 | class yamlSerializer(Serializer): | |
|
64 | ||
|
65 | def __init__(self): | |
|
66 | ||
|
67 | self.serializer = yaml | |
|
68 | ||
|
69 | def dumps(self, obj, **kwargs): | |
|
70 | return self.serializer.dump(obj, **kwargs) | |
|
71 | ||
|
72 | def loads(self, obj, **kwargs): | |
|
73 | return self.serializer.load(obj, **kwargs) | |
|
74 | ||
|
75 | class DynamicSerializer(Serializer): | |
|
76 | ||
|
77 | def __init__(self, mode = 'cPickle'): | |
|
78 | ||
|
79 | if mode == 'cPickle': | |
|
80 | self.serializer = cPickleSerializer() | |
|
81 | ||
|
82 | if mode == 'jsonpickle': | |
|
83 | self.serializer = jsonpickleSerializer() | |
|
84 | ||
|
85 | if mode == 'yaml': | |
|
86 | self.serializer = yamlSerializer() | |
|
87 | ||
|
88 | if mode == 'msgpack': | |
|
89 | self.serializer = msgpackSerializer() | |
|
90 | ||
|
91 | ||
|
92 | if __name__ == '__main__': | |
|
93 | pass No newline at end of file |
|
1 | NO CONTENT: new file 100644 |
This diff has been collapsed as it changes many lines, (816 lines changed) Show them Hide them | |||
@@ -0,0 +1,816 | |||
|
1 | ''' | |
|
2 | The DynamicObject module supports dynamic loading of YAML | |
|
3 | defined objects into Python class objects. Object can | |
|
4 | be sub-classed to allow direct binding of methods having | |
|
5 | matching signatures. | |
|
6 | ||
|
7 | $Id$ | |
|
8 | ''' | |
|
9 | ||
|
10 | import urllib | |
|
11 | import os | |
|
12 | import re | |
|
13 | import yaml # YAML Ain't Markup Language | |
|
14 | import numpy as np | |
|
15 | import copy | |
|
16 | import inspect | |
|
17 | import PrecisionTime | |
|
18 | import time | |
|
19 | import sys | |
|
20 | import datetime | |
|
21 | import collections | |
|
22 | ||
|
23 | # Replacement Loader for PyYAML to keep dictionaries in-order: | |
|
24 | import OrderedYAML | |
|
25 | #OrderedYAML.collections | |
|
26 | ||
|
27 | class Object(object): | |
|
28 | """ Loads a YAML defined python class dynamically using the supplied URI, | |
|
29 | which may be a file, directory, web hyper-link, or hyper-linked directory. """ | |
|
30 | ||
|
31 | # Dictionary containing all known Object class names and corresponding class objects | |
|
32 | dynamicClasses = collections.OrderedDict() | |
|
33 | ||
|
34 | def __init__(self, object_uri=None, revision=None, recursive=False): | |
|
35 | if isinstance(object_uri, file): | |
|
36 | # URI is a yaml file - read it. | |
|
37 | self.yaml = file.read() | |
|
38 | elif object_uri == None: | |
|
39 | self.yaml = None | |
|
40 | elif isinstance(object_uri, str): | |
|
41 | if object_uri.endswith('.yml'): | |
|
42 | # URI is a web hyper-linked yaml file - read it. | |
|
43 | self.yaml = urllib.urlopen(object_uri).read() | |
|
44 | else: | |
|
45 | # URI is a (hyper-linked?) directory - try reading it. | |
|
46 | #print "URI is a directory." | |
|
47 | try: | |
|
48 | self.files = self.__parseLink(object_uri, recursive) | |
|
49 | except IOError: | |
|
50 | # URI is a local directory - get a list of YAML files in it | |
|
51 | self.files = self.__getYamlFiles(object_uri, recursive) | |
|
52 | ||
|
53 | # For each YAML file found, create a new DynamicObject of it: | |
|
54 | self.yaml = [] | |
|
55 | for fn in self.files: | |
|
56 | self.yaml.append(Object(fn)) | |
|
57 | else: | |
|
58 | print "Invalid URI supplied: %s"%(object_uri,) | |
|
59 | ||
|
60 | def __parseLink(self, object_uri, recursive): | |
|
61 | """ Returns a listing of all YAML files located in the | |
|
62 | hyper-link directory given by page. """ | |
|
63 | page = urllib.urlopen(object_uri).read() | |
|
64 | #print "URI is a URL directory: %s"%(object_uri,) | |
|
65 | pattern = re.compile(r'<a href="[^"]*">') | |
|
66 | ||
|
67 | # List of files contained in the directory at the given URL, ignoring | |
|
68 | # any "?" / GET query-string locations given: | |
|
69 | files = [x[9:-2] for x in pattern.findall(page) if not x[9:-2].startswith('?')] | |
|
70 | #print files | |
|
71 | ||
|
72 | yamlFiles = [] | |
|
73 | dirs = [] | |
|
74 | for fn in files: | |
|
75 | if not fn.startswith('/'): # Ignore absolute paths... | |
|
76 | path = os.path.join(object_uri, fn) | |
|
77 | #print path | |
|
78 | ||
|
79 | # Keep list of YAML files found... | |
|
80 | if fn.endswith('.yml'): | |
|
81 | yamlFiles.append(path) | |
|
82 | ||
|
83 | # Keep list of directories found... | |
|
84 | elif recursive and fn.endswith('/'): | |
|
85 | dirs.append(path) | |
|
86 | ||
|
87 | if recursive: | |
|
88 | #print dirs | |
|
89 | for path in dirs: | |
|
90 | yamlFiles += self.__parseLink(path,recursive) | |
|
91 | ||
|
92 | return yamlFiles | |
|
93 | ||
|
94 | def __getYamlFiles(self, local_dir, recursive): | |
|
95 | """ Returns a listing of all YAML files located in the given | |
|
96 | directory, recursing if requested. """ | |
|
97 | yamlFiles = [] | |
|
98 | dirs = [] | |
|
99 | for fn in os.listdir(local_dir): | |
|
100 | path = os.path.join(local_dir, fn) | |
|
101 | ||
|
102 | # List of YAML files found... | |
|
103 | if fn.endswith('.yml'): | |
|
104 | yamlFiles.append(path) | |
|
105 | ||
|
106 | # List of directories found... | |
|
107 | elif recursive and os.path.isdir(path): | |
|
108 | dirs.append(path) | |
|
109 | ||
|
110 | # Recurse if desired: | |
|
111 | if recursive: | |
|
112 | for path in dirs: | |
|
113 | yamlFiles += self.__getYamlFiles(path,recursive) | |
|
114 | ||
|
115 | return yamlFiles | |
|
116 | ||
|
117 | def equals(self, obj, compare_time_created=True): | |
|
118 | """ Returns True iff self has identical attributes | |
|
119 | (numerically) to obj (no extras) """ | |
|
120 | ||
|
121 | if not isinstance(obj, Object): return False | |
|
122 | ||
|
123 | self_keys = self.__dict__.keys() | |
|
124 | obj_keys = obj.__dict__.keys() | |
|
125 | if not self_keys == obj_keys: | |
|
126 | return False | |
|
127 | for key in self_keys: | |
|
128 | obj_keys.remove(key) | |
|
129 | ||
|
130 | self_value, obj_value = self.__dict__[key], obj.__dict__[key] | |
|
131 | if isinstance(self_value, Object): | |
|
132 | if not self_value.equals(obj_value, compare_time_created): | |
|
133 | return False | |
|
134 | elif isinstance(self_value, np.ndarray): | |
|
135 | m1 = map(repr,self_value.flat) | |
|
136 | m2 = map(repr,obj_value.flat) | |
|
137 | ret = m1 == m2 | |
|
138 | if not ret: | |
|
139 | return False | |
|
140 | else: | |
|
141 | if not self_value == obj_value: | |
|
142 | # Return False iff the different times are important | |
|
143 | return key == '__time_created' and not compare_time_created | |
|
144 | ||
|
145 | return obj_keys == [] # no more keys --> the objects are identical | |
|
146 | ||
|
147 | def sizeof(self): | |
|
148 | """ Recursively computes the size in bytes of the given Dynamic Object """ | |
|
149 | sz = 0 | |
|
150 | values = self.__dict__.values() | |
|
151 | for val in values: | |
|
152 | if isinstance(val, Object): sz += val.sizeof() | |
|
153 | elif isinstance(val, np.ndarray): sz += val.nbytes | |
|
154 | elif hasattr(val, 'dtype') and hasattr(val.dtype, 'itemsize'): sz += val.dtype.itemsize | |
|
155 | else: sz += sys.getsizeof(val) | |
|
156 | return sz | |
|
157 | ||
|
158 | # Automatic methods for accessing meta-data | |
|
159 | getters = ['__object_name', '__revision_number', '__revision_id', '__revision_source', '__revision_tag', '__time_created'] | |
|
160 | def getObjectName(self): return self.__class__.meta_attributes['__object_name'] | |
|
161 | def getRevisionNumber(self): return self.__class__.meta_attributes['__revision_number'] | |
|
162 | def getRevisionId(self): return self.__class__.meta_attributes['__revision_id'] | |
|
163 | def getRevisionSource(self): return self.__class__.meta_attributes['__revision_source'] | |
|
164 | def getRevisionTag(self): return self.__class__.meta_attributes['__revision_tag'] | |
|
165 | def getTimeCreated(self): return getattr(self, "__time_created") | |
|
166 | ||
|
167 | """ | |
|
168 | __getters = [('ObjectName', getObjectName), ('RevisionNumber', getRevisionNumber), | |
|
169 | ('RevisionId', getRevisionId), ('RevisionSource', getRevisionSource), | |
|
170 | ('RevisionTag', getRevisionTag)] | |
|
171 | def __repr__(self): | |
|
172 | meta_atts = repr([(x[0], x[1](self)) for x in Object.__getters]) | |
|
173 | atts = repr(self.__dict__) | |
|
174 | return "Object(%s, %s)"%(atts, meta_atts) | |
|
175 | """ | |
|
176 | ||
|
177 | ||
|
178 | class SignatureException(Exception): | |
|
179 | """ Exception thrown when a data or method signature is unknown or invalid | |
|
180 | for a particular Object. """ | |
|
181 | def __init__(self, value): self.value = value | |
|
182 | def __str__(self): return repr(self.value) | |
|
183 | ||
|
184 | class _IDLTag(object): | |
|
185 | """ IDLTag (aka Interface Definition Language Tag) is an abstract helper class | |
|
186 | used by the Factory to define built-in tags used | |
|
187 | specifically for our IDL """ | |
|
188 | def __init__(self, yamlString): | |
|
189 | self.yamlString = yamlString | |
|
190 | def __repr__(self): | |
|
191 | return self.yamlString | |
|
192 | ||
|
193 | class _Reference(_IDLTag): | |
|
194 | """ Helper class for Factory: Objects can be composed | |
|
195 | of other objects, requiring a Reference to the other object. """ | |
|
196 | def __repr__(self): | |
|
197 | return "Ref(%s)"%(self.yamlString,) | |
|
198 | ||
|
199 | class _Method(_IDLTag): | |
|
200 | """ Helper class for Factory: Objects have methods | |
|
201 | associated with them - this tag tells the Factory that a method | |
|
202 | signature follows (in dict format) """ | |
|
203 | def __repr__(self): | |
|
204 | return "Method(%r)"%(self.yamlString,) | |
|
205 | ||
|
206 | class Binary(Object): | |
|
207 | def __init__(self, binary_type, value=None): | |
|
208 | self.binary_type = binary_type | |
|
209 | self.value = value | |
|
210 | ||
|
211 | import Lookup | |
|
212 | ||
|
213 | class BuiltinDtype(_IDLTag): | |
|
214 | """ Helper class for Factory: Object parameters each | |
|
215 | have a certain data type (either dtype.xxxx for numpy compatible data | |
|
216 | types, or one of the generic python data types (i.e. int, bool, str...) | |
|
217 | ||
|
218 | __addYamlConstructor in Factory registers all of the tags | |
|
219 | listed as keys in the dtypes dictionary.""" | |
|
220 | ||
|
221 | def __init__(self, yamlString, tag=None): | |
|
222 | self.tag = tag[1:] | |
|
223 | super(BuiltinDtype, self).__init__(yamlString) | |
|
224 | #print self.tag | |
|
225 | try: self.dtype = Lookup.numpy_dtypes[self.tag] | |
|
226 | except KeyError: self.dtype = Lookup.builtin_objects[self.tag] | |
|
227 | ||
|
228 | def __repr__(self): | |
|
229 | return "_BuiltinType(%s,%s)"%(self.yamlString, self.tag) | |
|
230 | ||
|
231 | # Register hexadecimal representation of numpy dtypes in YAML | |
|
232 | ||
|
233 | class _Parameter: | |
|
234 | """ Helper class for Factory: Contains the name, default | |
|
235 | value, and length (if an array) of an object initialization parameter. """ | |
|
236 | ||
|
237 | def __init__(self, name, hasDefault=False, default=None, length=None, classType=None): | |
|
238 | self.name = name | |
|
239 | self.hasDefault = hasDefault | |
|
240 | self.default = default | |
|
241 | self.length = length | |
|
242 | if isinstance(classType, None.__class__) and not isinstance(default, None.__class__): | |
|
243 | self.classType = default.__class__ | |
|
244 | else: | |
|
245 | self.classType = classType | |
|
246 | ||
|
247 | class _UnresolvedType: | |
|
248 | """ Used to indicate a data type which has not yet been parsed (i.e. for | |
|
249 | recursive data-types. """ | |
|
250 | ||
|
251 | def __init__(self, yamlObject): | |
|
252 | # Either the name of the class we couldn't resolve, or a dictionary | |
|
253 | # containing the name and a default value | |
|
254 | self.yamlObject = yamlObject | |
|
255 | ||
|
256 | class UnresolvedTypeException(Exception): | |
|
257 | """ Raised when a !ref tag is used, but the reference cannot be resolved """ | |
|
258 | pass | |
|
259 | ||
|
260 | def get_class(kls): | |
|
261 | """ Returns a pointer to the class instance with the name kls | |
|
262 | Function acquired from http://stackoverflow.com/questions/452969/ """ | |
|
263 | parts = kls.split('.') | |
|
264 | module = ".".join(parts[:-1]) | |
|
265 | m = __import__( module ) | |
|
266 | for comp in parts[1:]: | |
|
267 | m = getattr(m, comp) | |
|
268 | return m | |
|
269 | ||
|
270 | # Aliased constructor & representer adders for easily swapping between Ordered and non-Ordered: | |
|
271 | def add_constructor(tag, constructor): | |
|
272 | #yaml.add_constructor(tag, constructor) | |
|
273 | OrderedYAML.Loader.add_constructor(tag, constructor) | |
|
274 | def add_representer(cls, representer): | |
|
275 | #yaml.add_representer(cls, representer) | |
|
276 | OrderedYAML.Dumper.add_representer(cls, representer) | |
|
277 | ||
|
278 | # Implicit constructor for _Reference objects using the !ref tag: | |
|
279 | def __ref_constructor(loader, node): | |
|
280 | if isinstance(node, yaml.nodes.MappingNode): | |
|
281 | return _Reference(loader.construct_mapping(node)) | |
|
282 | else: | |
|
283 | return _Reference(loader.construct_scalar(node)) | |
|
284 | add_constructor(u'!ref', __ref_constructor) | |
|
285 | ||
|
286 | # Method constructor using !method tag: | |
|
287 | def __method_constructor(loader, node): | |
|
288 | if isinstance(node, yaml.nodes.MappingNode): | |
|
289 | return _Method(loader.construct_mapping(node)) | |
|
290 | else: | |
|
291 | return _Method(loader.construct_scalar(node)) | |
|
292 | add_constructor(u'!method', __method_constructor) | |
|
293 | ||
|
294 | # Generic constructor for any _BuiltinDtype | |
|
295 | def __dtype_constructor(loader, node): | |
|
296 | if isinstance(node, yaml.nodes.SequenceNode): | |
|
297 | ret = BuiltinDtype(loader.construct_sequence(node), tag=node.tag) | |
|
298 | elif isinstance(node, yaml.nodes.MappingNode): | |
|
299 | ret = BuiltinDtype(loader.construct_mapping(node), tag=node.tag) | |
|
300 | else: | |
|
301 | ret = BuiltinDtype(loader.construct_scalar(node), tag=node.tag) | |
|
302 | return ret | |
|
303 | ||
|
304 | # Register YAML constructors for each builtin type: | |
|
305 | for dtype in Lookup.numpy_dtypes.keys() + Lookup.builtin_objects.keys(): | |
|
306 | add_constructor(u'!%s'%(dtype,), __dtype_constructor) | |
|
307 | ||
|
308 | class FactoryLoader(OrderedYAML.Loader): | |
|
309 | """ A YAML Loader specifically designed to load YAML object definitions | |
|
310 | (as opposed to actual instances of the objects) """ | |
|
311 | ||
|
312 | def construct_yaml_timestamp(self, node): | |
|
313 | """ Make empty timestamps (None/null) acceptable, otherwise parse the timestamp """ | |
|
314 | if node.value == u'': | |
|
315 | name = 'YAML_DEFN_LOADED_INCORRECTLY' # in case we forget to fix the name... | |
|
316 | return _Parameter(name, hasDefault=False, classType=datetime.datetime) | |
|
317 | else: | |
|
318 | return yaml.constructor.SafeConstructor.construct_yaml_timestamp(self, node) | |
|
319 | ||
|
320 | # Override default timestamp constructor: | |
|
321 | FactoryLoader.add_constructor( | |
|
322 | u'tag:yaml.org,2002:timestamp', | |
|
323 | FactoryLoader.construct_yaml_timestamp | |
|
324 | ) | |
|
325 | ||
|
326 | import DynamicYAML | |
|
327 | class Factory: | |
|
328 | """ Load a YAML defined python class and create a class with initialization | |
|
329 | provided by this factory. This is intended as an abstract class to be sub-classed | |
|
330 | to enable complex initialization on object instantiation. | |
|
331 | ||
|
332 | Factory subclasses should override __buildClass().""" | |
|
333 | ||
|
334 | def __init__(self, dynamic_object=None, yaml=None, typeCheck='strong', parse=True, revision_dict=None): | |
|
335 | if revision_dict != None: self.revision_dict = revision_dict # Remember for when we build each individual class | |
|
336 | else: | |
|
337 | self.revision_dict = {\ | |
|
338 | "__revision_number": 0, | |
|
339 | "__revision_id": 'unknown', | |
|
340 | "__revision_source": 'unknown', | |
|
341 | "__revision_tag": 'unknown'} | |
|
342 | if parse: | |
|
343 | if dynamic_object: | |
|
344 | self.parse(dynamic_object, typeCheck=typeCheck) | |
|
345 | else: | |
|
346 | dyno = Object() | |
|
347 | dyno.yaml = yaml | |
|
348 | self.parse(dyno, typeCheck=typeCheck) | |
|
349 | ||
|
350 | def parse(self, dynamic_object, typeCheck='strong'): | |
|
351 | """ | |
|
352 | Initializer for a Factory, converting the given dynamic_object | |
|
353 | containing a (text) YAML object definition into the corresponding class-type | |
|
354 | with initializer. | |
|
355 | ||
|
356 | typeCheck parameter can be one of 'strong' or 'cast': | |
|
357 | 'strong': Class initializer should raise a TypeError when given | |
|
358 | anything but the correct type | |
|
359 | 'cast': Class initializer should attempt to cast any input to the correct type | |
|
360 | """ | |
|
361 | ||
|
362 | # Remember what kind of type-checking to do: | |
|
363 | if typeCheck not in ['strong', 'cast']: | |
|
364 | raise Exception('Incorrect input for typeCheck: %s\nExpected "strong" or "cast"'%(typeCheck)) | |
|
365 | self.typeCheck = typeCheck | |
|
366 | ||
|
367 | # Get a list of the objects to build: | |
|
368 | if isinstance(dynamic_object.yaml, list): | |
|
369 | objects = dynamic_object.yaml | |
|
370 | else: | |
|
371 | objects = [dynamic_object] | |
|
372 | ||
|
373 | # Generate a dictionary of classes from the DynamicObjects given: | |
|
374 | self.classes = dict() | |
|
375 | for obj in objects: | |
|
376 | ||
|
377 | # This loader breaks nothing anymore #everything currently | |
|
378 | loader = FactoryLoader(obj.yaml) | |
|
379 | #loader = yaml.Loader(obj.yaml) | |
|
380 | ||
|
381 | # Dictionary with method and data signatures for the current object: | |
|
382 | objDefn = [] | |
|
383 | while loader.check_data(): | |
|
384 | objDefn.append(loader.get_data()) | |
|
385 | loader.dispose() | |
|
386 | ||
|
387 | # Parse the dictionary into a class definition: | |
|
388 | objClass = self.__buildClass(objDefn) | |
|
389 | self.classes.update(objClass) | |
|
390 | ||
|
391 | def parseMethodSignature(self, sigName, methDict): | |
|
392 | """ Returns the python method corresponding to the given signature | |
|
393 | (given signature should be in the loaded YAML dict format. | |
|
394 | ||
|
395 | Override this method for recognizing complex method signatures. """ | |
|
396 | ||
|
397 | raise SignatureException("Object abstract base class doesn't support any method signatures.") | |
|
398 | ||
|
399 | def parseDataSignature(self, sigName, sig): | |
|
400 | """ Returns the Parameter object corresponding to the given signature. | |
|
401 | ||
|
402 | This method should be overridden for recognizing complex data signatures | |
|
403 | (don't forget to call super(sig) for built-in data types though!) """ | |
|
404 | ||
|
405 | # Is the object an array with explicit default elements?: | |
|
406 | if isinstance(sig.yamlString, list): | |
|
407 | #length = len(sig.yamlString) | |
|
408 | if 'dtype' in sig.tag: | |
|
409 | default = np.array(sig.yamlString, dtype=sig.dtype) | |
|
410 | elif 'binary' == sig.tag: | |
|
411 | default = Binary(sig.yamlString["type"]) | |
|
412 | else: | |
|
413 | default = sig.yamlString | |
|
414 | return _Parameter(sigName, True, default, length=None) | |
|
415 | ||
|
416 | # Is the object an array with length and default value given?: | |
|
417 | if isinstance(sig.yamlString, dict) and "len" in sig.yamlString.keys(): | |
|
418 | length = sig.yamlString["len"] | |
|
419 | ||
|
420 | # Shape is given as something like [[],[]], not [2,2] - convert | |
|
421 | if isinstance(length, list): | |
|
422 | ||
|
423 | def get_shape(lst): | |
|
424 | """ Gets the shape of a list recursively filled with empty lists """ | |
|
425 | if lst == []: return [0] | |
|
426 | return [len(lst)] + get_shape(lst[0]) | |
|
427 | ||
|
428 | if len(length) > 0: | |
|
429 | if isinstance(length[0], list): | |
|
430 | length = get_shape(length) | |
|
431 | else: | |
|
432 | pass | |
|
433 | else: | |
|
434 | length = [0] # convert [] to [0] (numpy interprets [] as [1] for shapes) | |
|
435 | ||
|
436 | ||
|
437 | if 'complex' in sig.tag: | |
|
438 | imag = sig.yamlString["default"]["imag"] | |
|
439 | real = sig.yamlString["default"]["real"] | |
|
440 | default = sig.dtype(real) + sig.dtype(imag*1j) | |
|
441 | elif 'binary' == sig.tag: | |
|
442 | default = Binary(sig.yamlString["type"]) | |
|
443 | else: | |
|
444 | default = sig.dtype(sig.yamlString["default"]) | |
|
445 | ||
|
446 | return _Parameter(sigName, True, default, length) | |
|
447 | ||
|
448 | # The object is singular, with a given value: | |
|
449 | if 'complex' in sig.tag: | |
|
450 | imag = sig.yamlString["imag"] | |
|
451 | real = sig.yamlString["real"] | |
|
452 | default = sig.dtype(real) + sig.dtype(imag*1j) | |
|
453 | return _Parameter(sigName, True, default) | |
|
454 | elif 'binary' == sig.tag: | |
|
455 | default = Binary(sig.yamlString["type"]) | |
|
456 | return _Parameter(sigName, False, default, classType=Binary) | |
|
457 | elif 'timestamp' in sig.tag: | |
|
458 | if isinstance(sig.yamlString, dict): | |
|
459 | if sig.tag in ['timestamp_picosecond', 'timestamp_ps']: | |
|
460 | try: s = sig.yamlString['second'] | |
|
461 | except KeyError: s = sig.yamlString['s'] | |
|
462 | try: ps = sig.yamlString['picosecond'] | |
|
463 | except KeyError: ps = sig.yamlString['ps'] | |
|
464 | return _Parameter(sigName, True, PrecisionTime.psTime(s, ps)) | |
|
465 | elif sig.tag in ['timestamp_nanosecond', 'timestamp_ns']: | |
|
466 | try: s = sig.yamlString['second'] | |
|
467 | except KeyError: s = sig.yamlString['s'] | |
|
468 | try: ns = sig.yamlString['nanosecond'] | |
|
469 | except KeyError: ns = sig.yamlString['ns'] | |
|
470 | return _Parameter(sigName, True, PrecisionTime.nsTime(s, ns)) | |
|
471 | else: | |
|
472 | if sig.tag in ['timestamp_picosecond', 'timestamp_ps']: | |
|
473 | return _Parameter(sigName, False, classType=PrecisionTime.psTime) | |
|
474 | elif sig.tag in ['timestamp_nanosecond', 'timestamp_ns']: | |
|
475 | return _Parameter(sigName, False, classType=PrecisionTime.nsTime) | |
|
476 | else: | |
|
477 | default = sig.dtype(sig.yamlString) | |
|
478 | return _Parameter(sigName, True, default) # not binary | |
|
479 | ||
|
480 | ||
|
481 | ||
|
482 | def __parsePythonType(self, sigName, sig): | |
|
483 | """ Returns a _Parameter object, similar to parseDataSignature, but | |
|
484 | for a basic python type. """ | |
|
485 | ||
|
486 | if isinstance(sig, collections.OrderedDict): | |
|
487 | default = dict(sig) # Type-check user-defined !!maps as dicts, not OrderedDicts. | |
|
488 | else: | |
|
489 | default = sig # The signature sig is the default value itself | |
|
490 | return _Parameter(sigName, True, default) | |
|
491 | ||
|
492 | def __parseReferenceSignature(self, sigName, ref_object, objClasses): | |
|
493 | """ Takes a reference object ref_object to be named sigName, and | |
|
494 | produces a _Parameter object with default value of None. """ | |
|
495 | ||
|
496 | # List of names of classes we've created so far: | |
|
497 | #print [x for x in objClasses] | |
|
498 | names = objClasses.keys() | |
|
499 | ||
|
500 | if ref_object.yamlString in names: | |
|
501 | defaultType = objClasses[ref_object.yamlString] | |
|
502 | return _Parameter(sigName, classType=defaultType) | |
|
503 | else: | |
|
504 | try: | |
|
505 | # Try to find the class type in globals: | |
|
506 | className = objClasses[str(ref_object.yamlString)] | |
|
507 | defaultType = get_class(className) | |
|
508 | except (ValueError, KeyError): | |
|
509 | defaultType = _UnresolvedType(ref_object.yamlString) | |
|
510 | #raise NameError("Invalid reference to module %s"%(className,)) | |
|
511 | ||
|
512 | return _Parameter(sigName, classType=defaultType) | |
|
513 | ||
|
514 | def __buildInitializer(self, className, classData): | |
|
515 | """ Constructs the initializer for an object which expects parameters | |
|
516 | listed in classData as input upon initialization. """ | |
|
517 | ||
|
518 | # Type of type-checking to use: | |
|
519 | strong = (self.typeCheck == 'strong') | |
|
520 | #cast = (self.typeCheck == 'cast') | |
|
521 | ||
|
522 | def typeCheck(param, arg): | |
|
523 | """ | |
|
524 | Checks to see if the type of arg matches that of the corresponding param, | |
|
525 | casting arg to the correct type if desired. | |
|
526 | """ | |
|
527 | if isinstance(arg, param.classType): return arg | |
|
528 | if isinstance(arg, np.ndarray) and arg.dtype.type == param.classType: | |
|
529 | if not param.hasDefault: return arg | |
|
530 | if param.default.shape == (): return arg | |
|
531 | if param.default.shape[-1] == 0: return arg | |
|
532 | if arg.shape == param.default.shape: return arg | |
|
533 | if isinstance(arg, None.__class__): return arg | |
|
534 | if strong: | |
|
535 | raise TypeError("Incorrect input type on strong type-checking."+\ | |
|
536 | " Expected %s - got %s"%(param.classType,arg.__class__)) | |
|
537 | else: | |
|
538 | # If the parameter corresponding to the given argument has a non-NoneType default | |
|
539 | # value, then attempt to cast the argument into the correct parameter type | |
|
540 | if param.hasDefault and param.default != None: | |
|
541 | if isinstance(param.default, np.ndarray): | |
|
542 | return np.array(arg, dtype=param.default.dtype) | |
|
543 | else: | |
|
544 | return param.default.__class__(arg) | |
|
545 | else: | |
|
546 | return param.classType(arg) | |
|
547 | ||
|
548 | """ | |
|
549 | attributes = {"__object_name": className, | |
|
550 | "__revision_number": self.svn_revision_number, | |
|
551 | "__revision_id": 'unknown', | |
|
552 | "__revision_source": 'unknown', | |
|
553 | "__revision_tag": 'unknown'} | |
|
554 | """ | |
|
555 | attributes = {} # Create new attributes dict for this particular class object | |
|
556 | attributes.update(self.revision_dict) # Revision info now passed into the factory | |
|
557 | attributes['__object_name'] = className | |
|
558 | ||
|
559 | def init(_self, *args, **kwargs): | |
|
560 | """ Dynamically generated initializer. """ | |
|
561 | ||
|
562 | # meta-data goes in the class, not the objects (commented the following out): | |
|
563 | """ | |
|
564 | # Initialize automatic class data | |
|
565 | for attr,value in attributes.items(): | |
|
566 | try: | |
|
567 | value = kwargs[attr] # Are we given a value to over-ride with? | |
|
568 | del kwargs[attr] # Ignore the meta attribute later | |
|
569 | except KeyError: | |
|
570 | pass | |
|
571 | setattr(_self, attr, value) | |
|
572 | """ | |
|
573 | ||
|
574 | # Set default values first (assume no parameters): | |
|
575 | for param in classData: | |
|
576 | if param.length: | |
|
577 | if isinstance(param.length, int): param.length = [param.length] | |
|
578 | default = np.empty(param.length, dtype=param.classType) | |
|
579 | if param.hasDefault: | |
|
580 | # Initialize array with default array value given: | |
|
581 | flatIter = default.flat | |
|
582 | for i in range(len(flatIter)): | |
|
583 | flatIter[i] = copy.deepcopy(param.default) | |
|
584 | else: | |
|
585 | # Initialize to None if no default given: | |
|
586 | default.fill(None) | |
|
587 | else: | |
|
588 | default = param.default | |
|
589 | setattr(_self, param.name, copy.deepcopy(default)) | |
|
590 | ||
|
591 | # Set attributes given by standard args: | |
|
592 | for i in range(len(args)): | |
|
593 | arg = typeCheck(classData[i], args[i]) | |
|
594 | setattr(_self, classData[i].name, arg) | |
|
595 | ||
|
596 | # Set named attributes (given by dictionary kwargs): | |
|
597 | for key,value in kwargs.items(): | |
|
598 | ||
|
599 | try: keyIndex = [param.name for param in classData].index(key) | |
|
600 | except ValueError: | |
|
601 | raise TypeError("'%s' is an invalid keyword argument"%(key,)) | |
|
602 | arg = typeCheck(classData[keyIndex],value) | |
|
603 | #setattr(_self, key, value) | |
|
604 | setattr(_self, key, arg) | |
|
605 | ||
|
606 | ||
|
607 | # Object instantiation / creation time (if not already present): | |
|
608 | if not kwargs.has_key('__time_created'): | |
|
609 | setattr(_self, "__time_created", np.float64(time.time())) | |
|
610 | ||
|
611 | return init, attributes | |
|
612 | ||
|
613 | def __findClass(self, className, localClasses): | |
|
614 | """ Looks for the given className first in the given dictionary of localClasses | |
|
615 | then in the global definitions, returning the corresponding class object. Raises | |
|
616 | a KeyError if the class cannot be found. """ | |
|
617 | ||
|
618 | # If class definition was in the YAML file, extend that one: | |
|
619 | if className in localClasses.keys(): | |
|
620 | return localClasses[className] | |
|
621 | ||
|
622 | # Else try finding the class definition in our global scope: | |
|
623 | try: classObj = get_class(className) | |
|
624 | except KeyError: | |
|
625 | raise KeyError("Class '%s' not found in given YAML scope or global scope."%(className,)) | |
|
626 | return classObj | |
|
627 | ||
|
628 | def __buildClass(self, objDefn): | |
|
629 | """ Takes an object definition list / dictionary objDefn (loaded from a YAML | |
|
630 | object definition file) and creates a class, dynamically binding | |
|
631 | method and data signatures to the new class. | |
|
632 | ||
|
633 | This method only performs a basic binding of method and data signatures to | |
|
634 | the new class. Object(s) having more complex initialization requirements | |
|
635 | should be given their own Factory subclass, overriding this | |
|
636 | and other methods.""" | |
|
637 | ||
|
638 | # objDefn is a list of dictionaries found in the YAML file - build each one... | |
|
639 | objClasses = dict() | |
|
640 | objClassesRev = dict() | |
|
641 | ||
|
642 | # A list of all _Parameter objects created, used to resolve recursive | |
|
643 | # or "tangled" data structures | |
|
644 | allClassData = [] | |
|
645 | ||
|
646 | for document in objDefn: | |
|
647 | # Each document can contain multiple objects - build each one. | |
|
648 | # (NOTE: objects can cross reference each other in the same document | |
|
649 | # need to resolve Reference objects as last step) | |
|
650 | for objClassName in document.keys(): | |
|
651 | ||
|
652 | # The dictionary containing method & data signatures: | |
|
653 | objDict = document[objClassName] | |
|
654 | ||
|
655 | # Extract data / attribute definitions (signatures) from the YAML dictionary | |
|
656 | # as well as method signatures and which classes this class extends: | |
|
657 | classData = [] | |
|
658 | classMethods = dict() | |
|
659 | classBases = [Object] | |
|
660 | ||
|
661 | # List structured documents result in a list of dicts each with one key: | |
|
662 | if isinstance(objDict, list): keys = [param.keys()[0] for param in objDict] | |
|
663 | # Otherwise the parameter names are just the keys of the dict | |
|
664 | else: keys = objDict.keys() # if key not found, raises AttributeError | |
|
665 | ||
|
666 | for sigName in keys: | |
|
667 | #print sigName | |
|
668 | sig = objDict[sigName] | |
|
669 | #for f in _BuiltinDtype.python_dtypes: print f.__class__ | |
|
670 | if sigName == '__extends': | |
|
671 | if isinstance(sig, str): | |
|
672 | sig = [sig] | |
|
673 | if isinstance(sig, list): | |
|
674 | for className in sig: | |
|
675 | newBase = self.__findClass(className, objClasses) | |
|
676 | ||
|
677 | # Remove Object extension if newBase extends it already: | |
|
678 | if Object in classBases and Object in inspect.getmro(newBase): | |
|
679 | classBases.remove(Object) | |
|
680 | classBases += [newBase] | |
|
681 | else: | |
|
682 | raise TypeError("Incorrect format for extending classes - %s"%(sig,)) | |
|
683 | elif isinstance(sig, BuiltinDtype): | |
|
684 | classData.append(self.parseDataSignature(sigName, sig)) | |
|
685 | elif isinstance(sig, Lookup.python_dtypes): | |
|
686 | classData.append(self.__parsePythonType(sigName, sig)) | |
|
687 | elif isinstance(sig, _Reference): | |
|
688 | classData.append(self.__parseReferenceSignature(sigName, sig, objClasses)) | |
|
689 | elif isinstance(sig, _Method): | |
|
690 | classMethods[sigName] = self.parseMethodSignature(sigName, sig.yamlString) | |
|
691 | elif isinstance(sig, (PrecisionTime.nsTime, PrecisionTime.psTime)): | |
|
692 | classData.append(_Parameter(sigName, True, sig)) | |
|
693 | elif isinstance(sig, _Parameter): # sig is already a parameter (we skipped a step) | |
|
694 | sig.name = sigName # we didn't know the name during load time - fill that in now | |
|
695 | classData.append(sig) | |
|
696 | else: | |
|
697 | msg = "Factory abstract base class doesn't " +\ | |
|
698 | "support the following signature: %r \"%s\""%(sig.__class__,str(sig)) | |
|
699 | print sig.__class__ | |
|
700 | raise SignatureException(msg) | |
|
701 | ||
|
702 | # Built-in attribute for all Dynamic Objects: | |
|
703 | classData.append(_Parameter('__time_created', classType=np.float64)) | |
|
704 | ||
|
705 | # Turn the object data / attributes into a usable __init__ method: | |
|
706 | classMethods["__init__"], meta_attributes = self.__buildInitializer(objClassName, classData) | |
|
707 | ||
|
708 | # Keep a record of the _Parameters created for later type resolution | |
|
709 | allClassData.extend(classData) | |
|
710 | ||
|
711 | """ | |
|
712 | __automaticMethods = { | |
|
713 | "getObjectName": lambda _self: getattr(_self, '__object_name'), | |
|
714 | "getRevisionNumber": lambda _self: getattr(_self, '__revision_number'), | |
|
715 | "getRevisionId": lambda _self: getattr(_self, '__revision_id'), | |
|
716 | "getRevisionSource": lambda _self: getattr(_self, '__revision_source'), | |
|
717 | "getRevisionTag": lambda _self: getattr(_self, '__revision_tag') | |
|
718 | } | |
|
719 | classMethods.update(__automaticMethods) | |
|
720 | """ | |
|
721 | ||
|
722 | # Put the method signatures into a namespace for the new class, | |
|
723 | # then dynamically build the class from this namespace. | |
|
724 | classNamespace = classMethods | |
|
725 | classNamespace["meta_attributes"] = meta_attributes | |
|
726 | cls = type(str(objClassName), tuple(classBases), classNamespace) | |
|
727 | objClasses[objClassName] = cls | |
|
728 | objClassesRev['%s.%s'%(objClassName,cls.meta_attributes["__revision_number"])] = cls | |
|
729 | ||
|
730 | # Create and register a constructor (loading) and representer (dumping) for the new class cls | |
|
731 | def construct_dynamic_object(loader, node): | |
|
732 | kwargs = loader.construct_mapping(node) | |
|
733 | # Remove revision control from loaded objects (info is in the class object!) | |
|
734 | for arg in kwargs.keys(): | |
|
735 | if arg in getattr(Object, 'getters') and arg != '__time_created': | |
|
736 | del kwargs[arg] | |
|
737 | return cls(**kwargs) | |
|
738 | revision = cls.meta_attributes["__revision_number"] | |
|
739 | DynamicYAML.Loader.add_constructor(u'!%s.%s'%(str(objClassName),revision), construct_dynamic_object) | |
|
740 | ||
|
741 | represent_dynamic_object = DynamicYAML.Dumper.represent_dynamic_object | |
|
742 | DynamicYAML.Dumper.add_representer(cls, represent_dynamic_object) | |
|
743 | ||
|
744 | def findClass(className): | |
|
745 | """ Search for the most recently added class object with given className """ | |
|
746 | try: | |
|
747 | return objClasses[className] # Look for reference to object in same YAML defn file: | |
|
748 | except KeyError: | |
|
749 | # Now look for reference to class object loaded from any YAML defn file, loading the | |
|
750 | # most recent version / revision (number) of the definition | |
|
751 | for dynClass in Object.dynamicClasses.keys()[::-1]: | |
|
752 | if dynClass.startswith(className): | |
|
753 | return Object.dynamicClasses[dynClass] | |
|
754 | ||
|
755 | # Still unresolved - raise exception: | |
|
756 | allDynamicClasses = repr(objClasses.keys() + Object.dynamicClasses.keys()) | |
|
757 | raise UnresolvedTypeException("Cannot resolve type '%s': Name not found in %s"%(className,allDynamicClasses)) | |
|
758 | ||
|
759 | ||
|
760 | def resolve(param): | |
|
761 | ||
|
762 | # Reference is just a string - that's the class name: | |
|
763 | if isinstance(param.classType.yamlObject, (str, unicode)): | |
|
764 | className = str(param.classType.yamlObject) | |
|
765 | param.classType = findClass(className) | |
|
766 | return | |
|
767 | ||
|
768 | # Reference is a dict containing class name and / or default values: | |
|
769 | if not isinstance(param.classType.yamlObject, dict): | |
|
770 | raise UnresolvedTypeException("Cannot resolve reference of type '%s'"%(param.classType.yamlObject.__class__,)) | |
|
771 | ||
|
772 | # Definitely a dict: | |
|
773 | refDict = param.classType.yamlObject | |
|
774 | ||
|
775 | # Determine the name of the class being referenced | |
|
776 | try: | |
|
777 | className = refDict["type"] | |
|
778 | except KeyError: | |
|
779 | raise KeyError("No 'type' key in reference dictionary for parameter '%s'"%(param.name,)) | |
|
780 | ||
|
781 | # Determine the class object corresponding to the class name | |
|
782 | param.classType = findClass(className) | |
|
783 | ||
|
784 | try: | |
|
785 | defaultParams = refDict["default"] | |
|
786 | except KeyError: | |
|
787 | defaultParams = None | |
|
788 | ||
|
789 | if defaultParams != None: | |
|
790 | for sub_param in defaultParams: | |
|
791 | if isinstance(sub_param.classType, _UnresolvedType): | |
|
792 | resolve(sub_param) | |
|
793 | param.default = param.classType( **defaultParams ) # Create the default object | |
|
794 | param.hasDefault = True | |
|
795 | else: | |
|
796 | param.hasDefault = False # for good measure | |
|
797 | ||
|
798 | # Is it an object array?: | |
|
799 | if "len" in refDict.keys(): | |
|
800 | param.length = refDict["len"] | |
|
801 | ||
|
802 | # Resolve any unresolved data-types: | |
|
803 | for param in allClassData: | |
|
804 | if isinstance(param.classType, _UnresolvedType): | |
|
805 | resolve(param) | |
|
806 | ||
|
807 | Object.dynamicClasses.update(objClassesRev) | |
|
808 | return objClasses | |
|
809 | ||
|
810 | def load_defn(yaml): | |
|
811 | """ Shortcut for producing a single DynamicObject class object from | |
|
812 | the provided yaml definition in string format """ | |
|
813 | return Factory(yaml=yaml).classes.values()[0] | |
|
814 | ||
|
815 | ||
|
816 |
@@ -0,0 +1,69 | |||
|
1 | # | |
|
2 | # rps 6/9/2014 | |
|
3 | # mit haystack obs | |
|
4 | # | |
|
5 | # wrapper for Karl's code | |
|
6 | ||
|
7 | import DynamicObject # used for serial/deserial of complex python objects | |
|
8 | import Serializer # used for serial/deserial of complex python | |
|
9 | ||
|
10 | # | |
|
11 | class DynamicSerializer: | |
|
12 | # | |
|
13 | #------------------------------------------------------ | |
|
14 | # | |
|
15 | def __init__(self,which='yaml'): | |
|
16 | # | |
|
17 | # choices are: yaml, msgpack, hdf5, json | |
|
18 | # | |
|
19 | self.err_f = False | |
|
20 | self.whichList = ['yaml', 'msgpack', 'hdf5', 'json'] # from Serialzer.py | |
|
21 | self.err_f,self.serializer = self.initSerializer(which) | |
|
22 | # | |
|
23 | #------------------------------------------------------ | |
|
24 | # | |
|
25 | def initSerializer(self,which): | |
|
26 | # | |
|
27 | # calls REU student code that works but hasn't been walked-through | |
|
28 | # it's a dynamic serializer not strictly a yaml serializer | |
|
29 | # | |
|
30 | err_f = False | |
|
31 | match_f = False | |
|
32 | serializer = None | |
|
33 | ii = 0 | |
|
34 | while ii < len(self.whichList): | |
|
35 | if (self.whichList[ii] == which): | |
|
36 | match_f = True | |
|
37 | break | |
|
38 | ii = ii + 1 | |
|
39 | # end while | |
|
40 | if not match_f: | |
|
41 | err_f = True | |
|
42 | else: | |
|
43 | serializer = which | |
|
44 | serializer = Serializer.serializers[serializer]() | |
|
45 | ||
|
46 | return err_f,serializer | |
|
47 | # end initSerializer | |
|
48 | # | |
|
49 | # -------------------------------------------------- | |
|
50 | # | |
|
51 | def loads(self,element): # borrows name from json module (json - to - python) | |
|
52 | retval = self.serializer.fromSerial(element) # de-serialize | |
|
53 | return retval | |
|
54 | # end loads | |
|
55 | # | |
|
56 | # -------------------------------------------------- | |
|
57 | # | |
|
58 | def dumps(self,element): # borrows name from json module (python - to - json) | |
|
59 | retval = self.serializer.toSerial(element) # serialize | |
|
60 | return retval | |
|
61 | # end dumps | |
|
62 | # | |
|
63 | # -------------------------------------------------- | |
|
64 | # | |
|
65 | # end class DynamicSerializer | |
|
66 | ||
|
67 | if __name__ == "__main__": | |
|
68 | DynamicSerializer() | |
|
69 | print "DynamicSerializer ran" No newline at end of file |
@@ -0,0 +1,221 | |||
|
1 | ''' | |
|
2 | Module containing YAML Loader and Dumper for DynamicObjects | |
|
3 | as well as built-in data types (numpy, PrecisionTime, datetime, Binary, ...) | |
|
4 | ||
|
5 | $Id$ | |
|
6 | ''' | |
|
7 | ||
|
8 | import yaml | |
|
9 | import OrderedYAML | |
|
10 | import DynamicObject | |
|
11 | import binascii | |
|
12 | import numpy as np | |
|
13 | import PrecisionTime | |
|
14 | import Lookup | |
|
15 | import pysvn | |
|
16 | ||
|
17 | def load_defn(source, rev='head', repo=""): | |
|
18 | """ Import YAML definition(s) from given 'source' SVN location | |
|
19 | with specific revision number 'rev'. Returns a dict of the object | |
|
20 | names -> class object instances. | |
|
21 | ||
|
22 | NOTE: Object defns with same name & revision number will conflict / | |
|
23 | cause issues (regardless of svn location). """ | |
|
24 | client = pysvn.Client() | |
|
25 | ||
|
26 | if rev == 'head': | |
|
27 | #yaml = client.cat(source) | |
|
28 | rev = client.info(repo).revision.number | |
|
29 | ||
|
30 | if source.startswith('http'): | |
|
31 | yaml = client.cat("%s?p=%d"%(source, rev)) | |
|
32 | else: | |
|
33 | pysvn_rev = pysvn.Revision(pysvn.opt_revision_kind.number, rev) | |
|
34 | yaml = client.cat(source, pysvn_rev) | |
|
35 | ||
|
36 | revision_dict = {\ | |
|
37 | "__revision_number": rev, | |
|
38 | "__revision_id": 'unknown', | |
|
39 | "__revision_source": source, | |
|
40 | "__revision_tag": 'unknown'} | |
|
41 | ||
|
42 | return DynamicObject.Factory(yaml=yaml, revision_dict=revision_dict).classes | |
|
43 | ||
|
44 | class Loader(OrderedYAML.Loader): | |
|
45 | ||
|
46 | def __init__(self, stream): | |
|
47 | OrderedYAML.Loader.__init__(self, stream) | |
|
48 | ||
|
49 | def construct_object(self, node, deep=False): | |
|
50 | """ Unresolved tags on mapping nodes come from un-imported YAML definitions - import it """ | |
|
51 | resolved = node.tag in self.yaml_constructors | |
|
52 | resolved = resolved or any([node.tag.startswith(x) for x in self.yaml_multi_constructors]) | |
|
53 | if isinstance(node, yaml.nodes.MappingNode) and not resolved: | |
|
54 | data = self.construct_mapping(self, node) | |
|
55 | self.constructed_objects[node] = data | |
|
56 | del self.recursive_objects[node] | |
|
57 | if data.has_key('__revision_source'): | |
|
58 | # TODO: Handle password authentication | |
|
59 | client = pysvn.Client() | |
|
60 | source = data['__revision_source'] | |
|
61 | if source.startswith('http'): | |
|
62 | rev = data['__revision_number'] | |
|
63 | defn = client.cat("%s?p=%d"%(source, rev)) | |
|
64 | else: | |
|
65 | rev = pysvn.Revision(pysvn.opt_revision_kind.number, data['__revision_number']) | |
|
66 | defn = client.cat(source, revision=rev) | |
|
67 | DynamicObject.Factory(yaml=defn) # Register the object | |
|
68 | ||
|
69 | constructor = self.yaml_constructors["%s.%s"%(data['__revision_name'], data['__revision_number'])] | |
|
70 | return constructor(node) | |
|
71 | else: | |
|
72 | raise Exception("Cannot load object with tag '%s' - cannot find YAML object definition (no __revision_source included)") | |
|
73 | else: | |
|
74 | return yaml.Loader.construct_object(self, node, deep=deep) | |
|
75 | ||
|
76 | class Dumper(OrderedYAML.Dumper): | |
|
77 | ||
|
78 | def __init__(self, stream, *args, **kwargs): | |
|
79 | OrderedYAML.Dumper.__init__(self, stream, *args, **kwargs) | |
|
80 | ||
|
81 | def represent_dynamic_object(self, obj): | |
|
82 | """ | |
|
83 | Override the !!python/object:__main__.xxx syntax with | |
|
84 | !ObjectName.zzz where zzz is the revision number of the Object obj | |
|
85 | """ | |
|
86 | ||
|
87 | state = {} | |
|
88 | state.update(obj.__dict__.items()) | |
|
89 | state.update(obj.__class__.meta_attributes.items()) | |
|
90 | name = obj.getObjectName() # obj.__class__.__name__ | |
|
91 | revision = obj.getRevisionNumber() | |
|
92 | return self.represent_mapping(u'!%s.%s' % (name, revision), state) | |
|
93 | ||
|
94 | # Dtypes to be stored as hex in YAML streams / strings | |
|
95 | hex_dtypes = ['float', 'complex', 'half', 'single', 'double'] | |
|
96 | ||
|
97 | # Register hex constructors for the numpy / built-in dtypes: | |
|
98 | dtypes = Lookup.numpy_dtypes | |
|
99 | ||
|
100 | # Inverse lookup for accessing tags given a class instance: | |
|
101 | cls_dtypes = dict([(v,k) for (k,v) in dtypes.items()]) | |
|
102 | ||
|
103 | # Representer for numpy arrays: | |
|
104 | def ndarray_representer(dumper, obj): | |
|
105 | #if isinstance(obj, np.ndarray): | |
|
106 | tag = 'dtype.'+obj.dtype.type.__name__ | |
|
107 | hexlify = any([x in tag for x in hex_dtypes]) | |
|
108 | np_ary = obj | |
|
109 | #hex_ary = np.empty(np_ary.shape, dtype=yaml.nodes.ScalarNode) | |
|
110 | np_flat, hex_flat = np_ary.flat, [] #hex_ary.flat | |
|
111 | hex_flat.append(dumper.represent_sequence(u'tag:yaml.org,2002:seq', list(np_ary.shape), flow_style=True)) | |
|
112 | if hexlify: | |
|
113 | lst = [] | |
|
114 | for i in range(len(np_flat)): | |
|
115 | value = u'%s'%(np_flat[i],) | |
|
116 | node = dumper.represent_scalar(u'tag:yaml.org,2002:str', value, style='') | |
|
117 | lst.append(node) | |
|
118 | hex_flat.append(yaml.nodes.SequenceNode(u'tag:yaml.org,2002:seq', lst, flow_style=True)) | |
|
119 | lst = [] | |
|
120 | for i in range(len(np_flat)): | |
|
121 | if hexlify: value = u'%s'%(binascii.hexlify(np_flat[i]),) | |
|
122 | else: value = u'%s'%(np_flat[i],) | |
|
123 | node = dumper.represent_scalar(u'tag:yaml.org,2002:str', value, style='') | |
|
124 | if hexlify: lst.append(node) | |
|
125 | else: hex_flat.append(node) | |
|
126 | if hexlify: hex_flat.append(yaml.nodes.SequenceNode(u'tag:yaml.org,2002:seq', lst, flow_style=True)) | |
|
127 | return yaml.nodes.SequenceNode(u'!%s'%(tag,), hex_flat, flow_style=True) | |
|
128 | Dumper.add_representer(np.ndarray, ndarray_representer) | |
|
129 | ||
|
130 | # Constructor for ndarrays with arbitrary (specified) dtype: | |
|
131 | def ndarray_constructor(loader, node, dtype, hexlify=False): | |
|
132 | shape = loader.construct_sequence(node.value.pop(0)) | |
|
133 | np_ary = np.empty(shape, dtype=dtype) | |
|
134 | np_flat = np_ary.flat # Flat iterator | |
|
135 | if hexlify: | |
|
136 | node.value[1].tag = node.tag | |
|
137 | node = node.value[1] # only look at hexlified values | |
|
138 | for i in range(len(node.value)): | |
|
139 | # Over-ride the 'tag:yaml.org,2002:str' tag with correct data type | |
|
140 | node.value[i].tag = node.tag | |
|
141 | value = loader.construct_object(node.value[i]) | |
|
142 | #if hexlify: | |
|
143 | # value = binascii.unhexlify(value) | |
|
144 | # value = np.frombuffer(value, dtype=dtype) | |
|
145 | np_flat[i] = value | |
|
146 | return np_ary | |
|
147 | ||
|
148 | class __dtype_con: | |
|
149 | ||
|
150 | def __init__(self, tag): | |
|
151 | # Whether or not to convert to hex: | |
|
152 | hexlify = any([x in tag for x in hex_dtypes]) | |
|
153 | dtype = dtypes[tag] | |
|
154 | ||
|
155 | # Mutable list containing constructor & representer info | |
|
156 | self.fncn_attributes = [tag, hexlify, dtype] | |
|
157 | ||
|
158 | def dtype_constructor(loader, node): | |
|
159 | tag, hexlify, dtype = self.fncn_attributes | |
|
160 | if isinstance(node, yaml.nodes.SequenceNode): | |
|
161 | return ndarray_constructor(loader, node, dtype, hexlify=hexlify) | |
|
162 | else: # isinstance(node, yaml.nodes.ScalarNode): | |
|
163 | value = loader.construct_scalar(node) | |
|
164 | dtype = dtypes[node.tag[1:]] | |
|
165 | if hexlify: | |
|
166 | value = binascii.unhexlify(value) | |
|
167 | value = np.frombuffer(value, dtype=dtype)[0] | |
|
168 | else: | |
|
169 | value = dtype(value) | |
|
170 | return value | |
|
171 | ||
|
172 | def dtype_representer(dumper, obj): | |
|
173 | tag, hexlify, dtype = self.fncn_attributes | |
|
174 | if isinstance(obj, float): obj = np.float64(obj) | |
|
175 | if hexlify: value = u'%s'%(binascii.hexlify(obj),) | |
|
176 | else: value = u'%s'%(obj,) | |
|
177 | try: tag = u'!%s'%(cls_dtypes[obj.__class__]) # 'dtype.'+obj.__class__.__name__ # bullshit... | |
|
178 | except KeyError: tag = '' | |
|
179 | node = dumper.represent_scalar(tag, value, style='') | |
|
180 | return node | |
|
181 | ||
|
182 | self.dtype_constructor = dtype_constructor | |
|
183 | self.dtype_representer = dtype_representer | |
|
184 | ||
|
185 | keys = [x for x in dtypes.keys() if x != 'dtype.int' and x != 'dtype.bool'] | |
|
186 | print keys | |
|
187 | ||
|
188 | n = len(keys) | |
|
189 | print n | |
|
190 | i=0 | |
|
191 | ||
|
192 | for tag in keys: | |
|
193 | dtype = __dtype_con(tag) | |
|
194 | dtype_constructor = dtype.dtype_constructor | |
|
195 | dtype_representer = dtype.dtype_representer | |
|
196 | Loader.add_constructor(u'!%s'%(tag,), dtype_constructor) | |
|
197 | Dumper.add_representer(dtypes[tag], dtype_representer) | |
|
198 | ||
|
199 | # Precision time constructors & representers: | |
|
200 | def ns_rep(dumper, obj): | |
|
201 | state = {'second': obj.__dict__['second'], 'nanosecond': obj.__dict__['nanosecond']} | |
|
202 | return dumper.represent_mapping(u'!timestamp_ns', state) | |
|
203 | def ps_rep(dumper, obj): | |
|
204 | state = {'second': obj.__dict__['second'], 'picosecond': obj.__dict__['picosecond']} | |
|
205 | return dumper.represent_mapping(u'!timestamp_ps', state) | |
|
206 | def ns_con(loader, node): return PrecisionTime.nsTime(**loader.construct_mapping(node)) | |
|
207 | def ps_con(loader, node): return PrecisionTime.psTime(**loader.construct_mapping(node)) | |
|
208 | ||
|
209 | Dumper.add_representer(PrecisionTime.nsTime, ns_rep) | |
|
210 | Dumper.add_representer(PrecisionTime.psTime, ps_rep) | |
|
211 | Loader.add_constructor(u'!timestamp_ns', ns_con) | |
|
212 | Loader.add_constructor(u'!timestamp_nanosecond', ns_con) | |
|
213 | Loader.add_constructor(u'!timestamp_ps', ps_con) | |
|
214 | Loader.add_constructor(u'!timestamp_picosecond', ps_con) | |
|
215 | ||
|
216 | # Binary object constructor & representer: | |
|
217 | def bin_rep(dumper, obj): return dumper.represent_mapping(u'!binary', obj.__dict__) | |
|
218 | def bin_con(loader, node): return DynamicObject.Binary(**loader.construct_mapping(node)) | |
|
219 | Dumper.add_representer(DynamicObject.Binary, bin_rep) | |
|
220 | Loader.add_constructor(u'!binary', bin_con) | |
|
221 |
@@ -0,0 +1,62 | |||
|
1 | ''' | |
|
2 | Helper module for DynamicObject module - contains dictionaries | |
|
3 | of data types built-in to our YAML IDL, converting backing and forth between | |
|
4 | strings / YAML tags and python class instances. | |
|
5 | ||
|
6 | $Id$ | |
|
7 | ''' | |
|
8 | ||
|
9 | import datetime | |
|
10 | import numpy as np | |
|
11 | import PrecisionTime | |
|
12 | import DynamicObject | |
|
13 | Binary = DynamicObject.Binary | |
|
14 | import platform | |
|
15 | import collections | |
|
16 | ||
|
17 | # Implicit Types: | |
|
18 | python_dtypes = tuple([bool,int,long,float,str,datetime.datetime,list, | |
|
19 | set,dict,tuple,unicode]) | |
|
20 | ||
|
21 | # Numpy Data-types: | |
|
22 | numpy_dtypes = {'dtype.bool': bool, 'dtype.int': np.int, 'dtype.int8': np.int8, | |
|
23 | 'dtype.int16': np.int16, 'dtype.int32': np.int32, 'dtype.int64': np.int64, | |
|
24 | 'dtype.uint8': np.uint8, 'dtype.uint16': np.uint16, 'dtype.uint32': np.uint32, | |
|
25 | 'dtype.uint64': np.uint64, 'dtype.float': np.float, 'dtype.float16': np.float16, | |
|
26 | 'dtype.float32': np.float32, 'dtype.float64': np.float64, 'dtype.complex': np.complex, | |
|
27 | 'dtype.complex64': np.complex64, 'dtype.complex128': np.complex128, | |
|
28 | 'dtype.byte': np.byte, 'dtype.short': np.short, 'dtype.intc': np.intc, | |
|
29 | 'dtype.longlong': np.longlong, 'dtype.intp': np.intp, 'dtype.ubyte': np.ubyte, | |
|
30 | 'dtype.ushort': np.ushort, 'dtype.uintc': np.uintc, 'dtype.uint': np.uint, | |
|
31 | 'dtype.uintc': np.uintc, 'dtype.uint': np.uint, 'dtype.ulonglong': np.ulonglong, | |
|
32 | 'dtype.uintp': np.uintp, 'dtype.half': np.half, 'dtype.single': np.single, | |
|
33 | 'dtype.double': np.double, 'dtype.longfloat': np.longfloat, | |
|
34 | 'dtype.csingle': np.csingle, 'dtype.clongfloat': np.clongfloat, 'dtype.long': np.long} | |
|
35 | ||
|
36 | if platform.architecture()[0] != '32bit': # 64bit - certain numpy types exist | |
|
37 | numpy_dtypes.update({'dtype.float128': np.float128, 'dtype.complex256': np.complex256}) | |
|
38 | bit32 = False | |
|
39 | else: | |
|
40 | bit32 = True | |
|
41 | #else: # 32 bit - fix 32 bit integer issue. | |
|
42 | # np.int32 = np.int | |
|
43 | # bit32 = True | |
|
44 | ||
|
45 | # Built-in objects: | |
|
46 | builtin_objects = {'binary': Binary, 'nsTime': PrecisionTime.nsTime, 'psTime': PrecisionTime.psTime, | |
|
47 | 'timestamp_ns': PrecisionTime.nsTime, 'timestamp_ps': PrecisionTime.psTime, | |
|
48 | 'timestamp_nanosecond': PrecisionTime.nsTime, 'timestamp_picosecond': PrecisionTime.psTime, | |
|
49 | 'datetime': datetime.datetime, 'Binary': Binary} | |
|
50 | ||
|
51 | builtin_objects_simple = {'nsTime': PrecisionTime.nsTime, 'psTime': PrecisionTime.psTime, | |
|
52 | 'binary': Binary, 'datetime': datetime.datetime, | |
|
53 | 'Binary': Binary} | |
|
54 | ||
|
55 | # Inverse lookup for accessing tags given a class instance: | |
|
56 | cls_dtypes = dict([(v,k) for (k,v) in numpy_dtypes.items()]) | |
|
57 | obj_dtypes = dict([(v,k) for (k,v) in builtin_objects_simple.items()]) | |
|
58 | ||
|
59 | # Pointer to the list of all Object classes created, as located in the Object module / class: | |
|
60 | dynamicClasses = DynamicObject.Object.dynamicClasses | |
|
61 | ||
|
62 |
@@ -0,0 +1,89 | |||
|
1 | ''' | |
|
2 | A YAML Loader and Dumper which provide ordered dictionaries in place | |
|
3 | of dictionaries (to keep the order of attributes as | |
|
4 | found in the original YAML object file). | |
|
5 | ||
|
6 | This module is modified from a submission on pyyaml.org: | |
|
7 | http://pyyaml.org/attachment/ticket/161/use_ordered_dict.py | |
|
8 | ||
|
9 | $Id$ | |
|
10 | ''' | |
|
11 | ||
|
12 | import yaml | |
|
13 | import collections | |
|
14 | ||
|
15 | class Loader(yaml.loader.Loader): | |
|
16 | """ YAML Loader producing OrderedDicts in place of dicts """ | |
|
17 | ||
|
18 | def __init__(self, stream): | |
|
19 | yaml.loader.Loader.__init__(self, stream) | |
|
20 | ||
|
21 | def construct_ordered_mapping(self, node, deep=False): | |
|
22 | """ Replacement mapping constructor producing an OrderedDict """ | |
|
23 | if not isinstance(node, yaml.MappingNode): | |
|
24 | raise yaml.constructor.ConstructorError(None, None, | |
|
25 | "expected a mapping node, but found %s" % node.id, | |
|
26 | node.start_mark) | |
|
27 | mapping = collections.OrderedDict() | |
|
28 | for key_node, value_node in node.value: | |
|
29 | key = self.construct_object(key_node, deep=deep) | |
|
30 | if not isinstance(key, collections.Hashable): | |
|
31 | raise yaml.constructor.ConstructorError("while constructing a mapping", node.start_mark, | |
|
32 | "found unhashable key", key_node.start_mark) | |
|
33 | value = self.construct_object(value_node, deep=deep) | |
|
34 | mapping[key] = value | |
|
35 | return mapping | |
|
36 | # yaml.constructor.BaseConstructor.construct_mapping = construct_ordered_mapping | |
|
37 | ||
|
38 | def construct_mapping(self, node, deep=False): | |
|
39 | return self.construct_ordered_mapping(node, deep=deep) | |
|
40 | ||
|
41 | def construct_yaml_map_with_ordered_dict(self, node): | |
|
42 | data = collections.OrderedDict() | |
|
43 | yield data | |
|
44 | value = self.construct_mapping(node) | |
|
45 | data.update(value) | |
|
46 | ||
|
47 | class Dumper(yaml.dumper.Dumper): | |
|
48 | """ YAML Dumper producing documents from OrderedDicts """ | |
|
49 | ||
|
50 | def __init__(self, stream, *args, **kwargs): | |
|
51 | yaml.dumper.Dumper.__init__(self, stream, *args, **kwargs) | |
|
52 | ||
|
53 | def represent_ordered_mapping(self, tag, mapping, flow_style=None): | |
|
54 | """ Replacement mapping representer for OrderedDicts """ | |
|
55 | value = [] | |
|
56 | node = yaml.MappingNode(tag, value, flow_style=flow_style) | |
|
57 | if self.alias_key is not None: | |
|
58 | self.represented_objects[self.alias_key] = node | |
|
59 | best_style = True | |
|
60 | if hasattr(mapping, 'items'): | |
|
61 | mapping = list(mapping.items()) | |
|
62 | for item_key, item_value in mapping: | |
|
63 | node_key = self.represent_data(item_key) | |
|
64 | node_value = self.represent_data(item_value) | |
|
65 | if not (isinstance(node_key, yaml.ScalarNode) and not node_key.style): | |
|
66 | best_style = False | |
|
67 | if not (isinstance(node_value, yaml.ScalarNode) and not node_value.style): | |
|
68 | best_style = False | |
|
69 | value.append((node_key, node_value)) | |
|
70 | if flow_style is None: | |
|
71 | if self.default_flow_style is not None: | |
|
72 | node.flow_style = self.default_flow_style | |
|
73 | else: | |
|
74 | node.flow_style = best_style | |
|
75 | return node | |
|
76 | # yaml.representer.BaseRepresenter.represent_mapping = represent_ordered_mapping | |
|
77 | ||
|
78 | def represent_mapping(self, tag, mapping, flow_style=None): | |
|
79 | return self.represent_ordered_mapping(tag, mapping, flow_style=flow_style) | |
|
80 | ||
|
81 | # Loader.add_constructor( | |
|
82 | # u'tag:yaml.org,2002:map', | |
|
83 | # Loader.construct_yaml_map_with_ordered_dict | |
|
84 | # ) | |
|
85 | # | |
|
86 | # Dumper.add_representer( | |
|
87 | # collections.OrderedDict, | |
|
88 | # yaml.representer.SafeRepresenter.represent_dict | |
|
89 | # ) |
@@ -0,0 +1,211 | |||
|
1 | #!/usr/local/midas/bin/python | |
|
2 | ||
|
3 | """PrecisionTime.py is a collection of python classes to manipulate times with high | |
|
4 | precision using integer logic. | |
|
5 | ||
|
6 | Written by "Bill Rideout":mailto:wrideout@haystack.mit.edu May 24, 2007 | |
|
7 | ||
|
8 | $Id$ | |
|
9 | """ | |
|
10 | import types | |
|
11 | ||
|
12 | class nsTime: | |
|
13 | """nsTime is a class to handle times given as UT second (integer) and nanosecond (integer) | |
|
14 | ||
|
15 | If nanosecond > 1E9, seconds will be added to second | |
|
16 | """ | |
|
17 | ||
|
18 | def __init__(self, second, nanosecond): | |
|
19 | self.second = int(second) | |
|
20 | if self.second < 0: | |
|
21 | raise ValueError, 'seconds must be greater than 0, not %i' % (self.second) | |
|
22 | nanosecond = long(nanosecond) | |
|
23 | if nanosecond < 0: | |
|
24 | raise ValueError, 'nanoseconds must be greater 0, not %i' % (nanosecond) | |
|
25 | addSec = nanosecond / 1000000000 | |
|
26 | if addSec > 0: | |
|
27 | self.second += addSec | |
|
28 | self.nanosecond = nanosecond % 1000000000 | |
|
29 | self.totalNS = long(self.nanosecond) + long(self.second) * 1000000000 | |
|
30 | ||
|
31 | ||
|
32 | def __add__(self, other): | |
|
33 | """__add__ another nsTime to this one and return a new one as result | |
|
34 | """ | |
|
35 | nsResult = self.nanosecond + other.nanosecond | |
|
36 | addSec = int(nsResult / 1000000000) | |
|
37 | newSec = self.second + other.second + addSec | |
|
38 | newNS = nsResult % 1000000000 | |
|
39 | return(nsTime(newSec, newNS)) | |
|
40 | ||
|
41 | ||
|
42 | def increase(self, other): | |
|
43 | """increase adds other to self, changing self (rather than creating a new object) | |
|
44 | """ | |
|
45 | nsResult = self.nanosecond + other.nanosecond | |
|
46 | addSec = int(nsResult / 1000000000) | |
|
47 | self.second = self.second + other.second + addSec | |
|
48 | self.nanosecond = nsResult % 1000000000 | |
|
49 | self.totalNS = long(self.nanosecond) + long(self.second) * 1000000000 | |
|
50 | ||
|
51 | ||
|
52 | def __sub__(self, other): | |
|
53 | """__sub__ another nsTime from this one and return a new one as result | |
|
54 | """ | |
|
55 | nsResult = self.nanosecond - other.nanosecond | |
|
56 | if nsResult < 0: | |
|
57 | addSec = 1 | |
|
58 | nsResult += 1000000000 | |
|
59 | else: | |
|
60 | addSec = 0 | |
|
61 | newSec = (self.second - other.second) - addSec | |
|
62 | return(nsTime(newSec, nsResult)) | |
|
63 | ||
|
64 | ||
|
65 | def multiply(self, factor): | |
|
66 | """multiply this nsTime times an integer | |
|
67 | """ | |
|
68 | if type(factor) not in (types.IntType, types.LongType): | |
|
69 | raise ValueError, 'Illegal type %s passed into nsTime.multiply' % (str(type(factor))) | |
|
70 | newTotalNS = self.totalNS * factor | |
|
71 | newSeconds = int(newTotalNS / 1000000000) | |
|
72 | newNanoseconds = int(newTotalNS - (newSeconds * 1000000000)) | |
|
73 | return(nsTime(newSeconds, newNanoseconds)) | |
|
74 | ||
|
75 | ||
|
76 | def integerDivision(self, other): | |
|
77 | """integerDivision returns the total number of other nsTimes that fit in self | |
|
78 | """ | |
|
79 | return(int(self.totalNS / other.totalNS)) | |
|
80 | ||
|
81 | def getUnixTime(self): | |
|
82 | """ getUnixTime() returns a Unix style time as a float. """ | |
|
83 | return(float(self.second) + float(self.nanosecond)/1.0e9) | |
|
84 | ||
|
85 | def __mod__(self, other): | |
|
86 | """__mod__ implements self % other. | |
|
87 | """ | |
|
88 | if type(other) in (types.IntType, types.LongType): | |
|
89 | return self.totalNS % other | |
|
90 | else: | |
|
91 | return self.totalNS % other.totalNS | |
|
92 | ||
|
93 | def __eq__(self, other): | |
|
94 | """ equality of two nsTime objects """ | |
|
95 | if not (hasattr(other, 'second') and hasattr(other, 'nanosecond')): return False | |
|
96 | return self.__cmp__(other) == 0 | |
|
97 | ||
|
98 | def __cmp__(self, other): | |
|
99 | """compare two nsTime objects | |
|
100 | """ | |
|
101 | result = cmp(self.second, other.second) | |
|
102 | if result != 0: | |
|
103 | return(result) | |
|
104 | ||
|
105 | return(cmp(self.nanosecond, other.nanosecond)) | |
|
106 | ||
|
107 | ||
|
108 | def __str__(self): | |
|
109 | return '%d.%09d' % (self.second, self.nanosecond) | |
|
110 | ||
|
111 | ||
|
112 | class psTime: | |
|
113 | """psTime is a class to handle times given as UT second (integer) and picosecond (integer) | |
|
114 | ||
|
115 | If picosecond > 1E12, seconds will be added to second | |
|
116 | """ | |
|
117 | ||
|
118 | def __init__(self, second, picosecond): | |
|
119 | self.second = int(second) | |
|
120 | if self.second < 0: | |
|
121 | raise ValueError, 'seconds must be greater than 0, not %i' % (self.second) | |
|
122 | picosecond = long(picosecond) | |
|
123 | if picosecond < 0: | |
|
124 | raise ValueError, 'picoseconds must be greater 0, not %i' % (picosecond) | |
|
125 | addSec = picosecond / 1000000000000 | |
|
126 | if addSec > 0: | |
|
127 | self.second += addSec | |
|
128 | self.picosecond = picosecond % 1000000000000 | |
|
129 | self.totalPS = long(self.picosecond) + long(self.second) * 1000000000000 | |
|
130 | ||
|
131 | ||
|
132 | def __add__(self, other): | |
|
133 | """__add__ another psTime to this one and return a new one as result | |
|
134 | """ | |
|
135 | psResult = self.picosecond + other.picosecond | |
|
136 | addSec = int(psResult / 1000000000000) | |
|
137 | newSec = self.second + other.second + addSec | |
|
138 | newPS = psResult % 1000000000000 | |
|
139 | return(psTime(newSec, newPS)) | |
|
140 | ||
|
141 | ||
|
142 | def increase(self, other): | |
|
143 | """increase adds other to self, changing self (rather than creating a new object) | |
|
144 | """ | |
|
145 | psResult = self.picosecond + other.picosecond | |
|
146 | addSec = int(psResult / 1000000000000) | |
|
147 | self.second = self.second + other.second + addSec | |
|
148 | self.picosecond = psResult % 1000000000000 | |
|
149 | self.totalPS = long(self.picosecond) + long(self.second) * 1000000000000 | |
|
150 | ||
|
151 | ||
|
152 | def __sub__(self, other): | |
|
153 | """__sub__ another psTime from this one and return a new one as result | |
|
154 | """ | |
|
155 | psResult = self.picosecond - other.picosecond | |
|
156 | if psResult < 0: | |
|
157 | addSec = 1 | |
|
158 | psResult += 1000000000000 | |
|
159 | else: | |
|
160 | addSec = 0 | |
|
161 | newSec = (self.second - other.second) - addSec | |
|
162 | return(psTime(newSec, psResult)) | |
|
163 | ||
|
164 | ||
|
165 | def multiply(self, factor): | |
|
166 | """multiply this psTime times an integer | |
|
167 | """ | |
|
168 | if type(factor) not in (types.IntType, types.LongType): | |
|
169 | raise ValueError, 'Illegal type %s passed into psTime.multiply' % (str(type(factor))) | |
|
170 | newTotalPS = self.totalPS * factor | |
|
171 | newSeconds = int(newTotalPS / 1000000000000) | |
|
172 | newPicoseconds = int(newTotalPS - (newSeconds * 1000000000000)) | |
|
173 | return(psTime(newSeconds, newPicoseconds)) | |
|
174 | ||
|
175 | ||
|
176 | def integerDivision(self, other): | |
|
177 | """integerDivision returns the total number of other psTimes that fit in self | |
|
178 | """ | |
|
179 | return(int(self.totalPS / other.totalPS)) | |
|
180 | ||
|
181 | def getUnixTime(self): | |
|
182 | """ getUnixTime() returns a Unix style time as a float. """ | |
|
183 | return(float(self.second) + float(self.picosecond)/1.0e12) | |
|
184 | ||
|
185 | def __mod__(self, other): | |
|
186 | """__mod__ implements self % other. | |
|
187 | """ | |
|
188 | if type(other) in (types.IntType, types.LongType): | |
|
189 | return self.totalPS % other | |
|
190 | else: | |
|
191 | return self.totalPS % other.totalPS | |
|
192 | ||
|
193 | def __eq__(self, other): | |
|
194 | """ equality of two psTime objects """ | |
|
195 | if not (hasattr(other, 'second') and hasattr(other, 'picosecond')): return False | |
|
196 | return self.__cmp__(other) == 0 | |
|
197 | ||
|
198 | def __cmp__(self, other): | |
|
199 | """compare two psTime objects | |
|
200 | """ | |
|
201 | result = cmp(self.second, other.second) | |
|
202 | if result != 0: | |
|
203 | return(result) | |
|
204 | ||
|
205 | return(cmp(self.picosecond, other.picosecond)) | |
|
206 | ||
|
207 | ||
|
208 | def __str__(self): | |
|
209 | return '%d.%12d' % (self.second, self.picosecond) | |
|
210 | ||
|
211 |
@@ -0,0 +1,373 | |||
|
1 | ''' | |
|
2 | Module containing classes with serialization and de-serialization services. | |
|
3 | ||
|
4 | $Id$ | |
|
5 | ''' | |
|
6 | ||
|
7 | import Lookup | |
|
8 | import numpy as np | |
|
9 | import zlib | |
|
10 | import binascii | |
|
11 | import yaml | |
|
12 | import DynamicObject | |
|
13 | import DynamicYAML | |
|
14 | import PrecisionTime | |
|
15 | import datetime | |
|
16 | import re | |
|
17 | import os | |
|
18 | #import json | |
|
19 | import jsonpickle | |
|
20 | import jpickle | |
|
21 | import h5py | |
|
22 | import msgpack | |
|
23 | ||
|
24 | class CompressionException(Exception): pass | |
|
25 | ||
|
26 | class Serializer: | |
|
27 | """ Base class for pickle-like serialization | |
|
28 | of DynamicObjects (with compression available) """ | |
|
29 | ||
|
30 | def __init__(self): | |
|
31 | pass | |
|
32 | ||
|
33 | def dump(self, obj, file_name, compression=None): | |
|
34 | """ Dumps obj to file_name, serializing the obj with toSerial() """ | |
|
35 | string = self.dumps(obj, compression) | |
|
36 | open(file_name, 'w').write(string) | |
|
37 | ||
|
38 | def dumps(self, obj, compression=None): | |
|
39 | """ Returns serialized string representing obj, using toSerial() to serialize """ | |
|
40 | if compression == 'gzip': | |
|
41 | return zlib.compress(self.toSerial(obj)) | |
|
42 | elif compression in [None, '']: | |
|
43 | return self.toSerial(obj) | |
|
44 | else: | |
|
45 | raise CompressionException("Invalid decompression type '%r'"%(compression,)) | |
|
46 | ||
|
47 | def load(self, file_name, compression=None): | |
|
48 | """ Returns the Object located in file_name, using fromSerial() to deserialize """ | |
|
49 | string = open(file_name, 'r').read() | |
|
50 | return self.loads(string, compression) | |
|
51 | ||
|
52 | def loads(self, string, compression=None): | |
|
53 | """ Returns the Object serialized as the given string """ | |
|
54 | if compression == 'gzip': | |
|
55 | return self.fromSerial(zlib.decompress(string)) | |
|
56 | elif compression in [None, '']: | |
|
57 | return self.fromSerial(string) | |
|
58 | else: | |
|
59 | raise CompressionException("Invalid compression type '%r'"%(compression,)) | |
|
60 | ||
|
61 | def fromSerial(self, string): | |
|
62 | """ Deserializes the given string """ | |
|
63 | return string | |
|
64 | ||
|
65 | def toSerial(self, obj): | |
|
66 | """ Serializes the given object """ | |
|
67 | return repr(obj) | |
|
68 | ||
|
69 | class YAMLSerializer(Serializer): | |
|
70 | """ Serializes a Object to/from YAML format """ | |
|
71 | ||
|
72 | def __init__(self): | |
|
73 | Serializer.__init__(self) | |
|
74 | ||
|
75 | def fromSerial(self, string): | |
|
76 | loader = DynamicYAML.Loader | |
|
77 | return yaml.load(string, Loader=loader) | |
|
78 | ||
|
79 | def toSerial(self, obj): | |
|
80 | dumper = DynamicYAML.Dumper | |
|
81 | return yaml.dump(obj, Dumper=dumper) | |
|
82 | ||
|
83 | # Regular expression taken from yaml.constructor.py | |
|
84 | timestamp_regexp_str = str(\ | |
|
85 | ur'^(?P<year>[0-9][0-9][0-9][0-9])' | |
|
86 | ur'-(?P<month>[0-9][0-9]?)' | |
|
87 | ur'-(?P<day>[0-9][0-9]?)' | |
|
88 | ur'(?:(?:[Tt]|[ \t]+)' | |
|
89 | ur'(?P<hour>[0-9][0-9]?)' | |
|
90 | ur':(?P<minute>[0-9][0-9])' | |
|
91 | ur':(?P<second>[0-9][0-9])' | |
|
92 | ur'(?:\.(?P<fraction>[0-9]*))?' | |
|
93 | ur'(?:[ \t]*(?P<tz>Z|(?P<tz_sign>[-+])(?P<tz_hour>[0-9][0-9]?)' | |
|
94 | ur'(?::(?P<tz_minute>[0-9][0-9]))?))?)?$') | |
|
95 | timestamp_regexp = re.compile(timestamp_regexp_str, re.X) | |
|
96 | ||
|
97 | def construct_timestamp(value): | |
|
98 | """ Taken & modified from yaml.constructor.py """ | |
|
99 | ||
|
100 | match = timestamp_regexp.match(value) | |
|
101 | #print "&%s&"%(value,) | |
|
102 | #print timestamp_regexp_str | |
|
103 | values = match.groupdict() | |
|
104 | year = int(values['year']) | |
|
105 | month = int(values['month']) | |
|
106 | day = int(values['day']) | |
|
107 | if not values['hour']: | |
|
108 | return datetime.date(year, month, day) | |
|
109 | hour = int(values['hour']) | |
|
110 | minute = int(values['minute']) | |
|
111 | second = int(values['second']) | |
|
112 | fraction = 0 | |
|
113 | if values['fraction']: | |
|
114 | fraction = values['fraction'][:6] | |
|
115 | while len(fraction) < 6: | |
|
116 | fraction += '0' | |
|
117 | fraction = int(fraction) | |
|
118 | delta = None | |
|
119 | if values['tz_sign']: | |
|
120 | tz_hour = int(values['tz_hour']) | |
|
121 | tz_minute = int(values['tz_minute'] or 0) | |
|
122 | delta = datetime.timedelta(hours=tz_hour, minutes=tz_minute) | |
|
123 | if values['tz_sign'] == '-': | |
|
124 | delta = -delta | |
|
125 | data = datetime.datetime(year, month, day, hour, minute, second, fraction) | |
|
126 | if delta: | |
|
127 | data -= delta | |
|
128 | return data | |
|
129 | ||
|
130 | class MessagePackSerializer(Serializer): | |
|
131 | """ Serializes a Object to/from MessagePack format """ | |
|
132 | ||
|
133 | def __fromSerial(self, msg_dict): | |
|
134 | if not isinstance(msg_dict, (dict, list, tuple)): | |
|
135 | return msg_dict # msg_dict is a value - return it | |
|
136 | if isinstance(msg_dict, dict) and msg_dict.has_key('__meta_attributes'): | |
|
137 | meta_attr = msg_dict['__meta_attributes'] | |
|
138 | msg_dict.pop('__meta_attributes') | |
|
139 | if meta_attr.has_key('type'): | |
|
140 | if meta_attr['type'] == 'datetime': | |
|
141 | return construct_timestamp(str(msg_dict['ts'])) | |
|
142 | elif meta_attr['type'] == 'nsTime': | |
|
143 | msg_dict.pop('totalNS') | |
|
144 | elif meta_attr['type'] == 'psTime': | |
|
145 | msg_dict.pop('totalPS') | |
|
146 | try: dtype = Lookup.cls_dtypes[meta_attr['type']] | |
|
147 | except KeyError: dtype = Lookup.builtin_objects[meta_attr['type']] | |
|
148 | return dtype(**msg_dict) | |
|
149 | else: | |
|
150 | for key in msg_dict.keys(): | |
|
151 | msg_dict[key] = self.__fromSerial(msg_dict[key]) | |
|
152 | cls = Lookup.dynamicClasses['%s.%s'%(meta_attr['__object_name'],meta_attr['__revision_number'])] | |
|
153 | return cls(**msg_dict) | |
|
154 | elif msg_dict == (): | |
|
155 | return [] | |
|
156 | elif isinstance(msg_dict[0], str) and msg_dict[1] in Lookup.numpy_dtypes and\ | |
|
157 | isinstance(msg_dict, tuple) and len(msg_dict) == 2: | |
|
158 | value = binascii.unhexlify(msg_dict[0]) | |
|
159 | return np.frombuffer(value, dtype=Lookup.numpy_dtypes[msg_dict[1]])[0] | |
|
160 | ||
|
161 | tup = isinstance(msg_dict, tuple) | |
|
162 | if tup and len(msg_dict) > 1 and msg_dict[0] in Lookup.numpy_dtypes.keys(): | |
|
163 | msg_flat = list(msg_dict) | |
|
164 | dtypeName = msg_flat.pop(0) | |
|
165 | dtype = Lookup.numpy_dtypes[dtypeName] | |
|
166 | shape = msg_flat.pop(0) | |
|
167 | obj = np.empty(shape, dtype=dtype) | |
|
168 | np_flat = obj.flat | |
|
169 | for i in range(len(np_flat)): | |
|
170 | if isinstance(msg_flat[i], float): | |
|
171 | value = msg_flat[i] | |
|
172 | else: | |
|
173 | value = self.__fromSerial((msg_flat[i], dtypeName)) | |
|
174 | np_flat[i] = value | |
|
175 | return obj | |
|
176 | else: | |
|
177 | return msg_dict | |
|
178 | ||
|
179 | def fromSerial(self, string): | |
|
180 | msg_dict = msgpack.unpackb(string) | |
|
181 | return self.__fromSerial(msg_dict) | |
|
182 | ||
|
183 | def __toSerial(self, obj): | |
|
184 | ||
|
185 | if isinstance(obj, (PrecisionTime.nsTime, PrecisionTime.psTime, DynamicObject.Binary, datetime.datetime)): | |
|
186 | msg_dict = {} | |
|
187 | if isinstance(obj, datetime.datetime): | |
|
188 | msg_dict['ts'] = obj.isoformat(' ') | |
|
189 | else: | |
|
190 | msg_dict.update(obj.__dict__) | |
|
191 | msg_dict['__meta_attributes'] = {'type': obj.__class__.__name__} | |
|
192 | return msg_dict | |
|
193 | elif isinstance(obj, DynamicObject.Object): | |
|
194 | msg_dict = {} | |
|
195 | for key, value in obj.__dict__.items(): | |
|
196 | msg_dict[key] = self.__toSerial(value) | |
|
197 | ||
|
198 | msg_dict['__meta_attributes'] = obj.__class__.meta_attributes | |
|
199 | return msg_dict | |
|
200 | elif isinstance(obj, np.ndarray): | |
|
201 | np_flat = obj.flat | |
|
202 | msg_flat = [] | |
|
203 | msg_flat.append(Lookup.cls_dtypes[obj.dtype.type]) # dtype is first element | |
|
204 | msg_flat.append(obj.shape) # shape of array is second element | |
|
205 | for i in range(len(np_flat)): | |
|
206 | toSer = self.__toSerial(np_flat[i]) | |
|
207 | if isinstance(toSer, tuple): | |
|
208 | msg_flat.append(toSer[0]) | |
|
209 | else: | |
|
210 | msg_flat.append(toSer) | |
|
211 | return list(msg_flat) | |
|
212 | ||
|
213 | is_builtin = obj.__class__ in Lookup.numpy_dtypes.values() | |
|
214 | #is_python = isinstance(obj, Lookup.python_dtypes) | |
|
215 | if is_builtin: # and not is_python: | |
|
216 | try: | |
|
217 | #print obj.__class__ | |
|
218 | msg_dict = (binascii.hexlify(obj), Lookup.cls_dtypes[obj.__class__]) | |
|
219 | return msg_dict | |
|
220 | except TypeError: # numpy dtype is a built-in python type... force the hexlify: | |
|
221 | if not Lookup.bit32: | |
|
222 | if obj.__class__ == int: return (binascii.hexlify(np.int64(obj)), 'dtype.int64') | |
|
223 | elif obj.__class__ == float: return (binascii.hexlify(np.float64(obj)), 'dtype.float64') | |
|
224 | else: | |
|
225 | #print np.int32(obj).__class__, obj.__class__ | |
|
226 | if obj.__class__ == int: return (binascii.hexlify(np.int32(obj)), 'dtype.int32') | |
|
227 | elif obj.__class__ == float: return (binascii.hexlify(np.float32(obj)), 'dtype.float32') | |
|
228 | raise | |
|
229 | else: | |
|
230 | return obj | |
|
231 | ||
|
232 | def toSerial(self, obj): | |
|
233 | #if Lookup.bit32 and np.int32 != np.int: np.int32 = np.int | |
|
234 | toSer = self.__toSerial(obj) | |
|
235 | #print toSer | |
|
236 | value = msgpack.packb(toSer) | |
|
237 | return value | |
|
238 | ||
|
239 | class HDF5Serializer(Serializer): | |
|
240 | """ Serializes a Object to/from HDF5 format """ | |
|
241 | ||
|
242 | tmp_num = 0 | |
|
243 | ||
|
244 | def __fromSerial(self, grp): | |
|
245 | ||
|
246 | if isinstance(grp, h5py.Dataset): | |
|
247 | return grp.value | |
|
248 | ||
|
249 | elif isinstance(grp, h5py.Group) and '__type' in grp.keys(): | |
|
250 | typ = grp['__type'].value | |
|
251 | if typ == 'datetime': | |
|
252 | return construct_timestamp(str(grp['ts'].value)) | |
|
253 | elif typ == '_null': | |
|
254 | return None | |
|
255 | elif typ == 'tuple': | |
|
256 | return tuple(grp['tuple']) | |
|
257 | elif typ == 'empty_list': | |
|
258 | return [] | |
|
259 | try: cls = Lookup.builtin_objects_simple[typ] | |
|
260 | except KeyError: cls = Lookup.dynamicClasses[typ] | |
|
261 | args = [] | |
|
262 | for key in grp.keys(): | |
|
263 | fromSer = self.__fromSerial(grp[key]) | |
|
264 | args.append((key, fromSer)) | |
|
265 | kwargs = dict(args) | |
|
266 | kwargs.pop('__type') | |
|
267 | return cls(**kwargs) | |
|
268 | #else: | |
|
269 | # return grp.value | |
|
270 | ||
|
271 | ||
|
272 | def fromSerial(self, string): | |
|
273 | HDF5Serializer.tmp_num += 1 | |
|
274 | fn = 'tmp%d.hdf5'%(HDF5Serializer.tmp_num-1,) | |
|
275 | fp = open(fn, 'wb') | |
|
276 | fp.write(string) | |
|
277 | fp.flush(), fp.close() | |
|
278 | ||
|
279 | root = h5py.File(fn, driver='core') | |
|
280 | try: | |
|
281 | fromSer = self.__fromSerial(root['dataset']) | |
|
282 | except: | |
|
283 | root.flush(), root.close() | |
|
284 | os.remove(fn) | |
|
285 | raise | |
|
286 | ||
|
287 | root.flush(), root.close() | |
|
288 | os.remove(fn) | |
|
289 | ||
|
290 | return fromSer | |
|
291 | ||
|
292 | def __toSerial(self, obj, grp, name): | |
|
293 | ||
|
294 | if isinstance(obj, datetime.datetime): | |
|
295 | sub_grp = grp.create_group(name) | |
|
296 | sub_grp['__type'] = 'datetime' | |
|
297 | sub_grp['ts'] = obj.isoformat(' ') | |
|
298 | ||
|
299 | elif isinstance(obj, tuple(Lookup.builtin_objects_simple.values())): | |
|
300 | sub_grp = grp.create_group(name) | |
|
301 | sub_grp['__type'] = Lookup.obj_dtypes[obj.__class__] | |
|
302 | for key, value in obj.__dict__.items(): | |
|
303 | if value != None and key not in ['totalNS', 'totalPS']: | |
|
304 | sub_grp[key] = value | |
|
305 | ||
|
306 | elif obj == None: | |
|
307 | sub_grp = grp.create_group(name) | |
|
308 | sub_grp['__type'] = '_null' | |
|
309 | ||
|
310 | elif isinstance(obj, DynamicObject.Object): | |
|
311 | # Create the new group and assign unique identifier for this type of DynamicObject | |
|
312 | sub_grp = grp.create_group(name) | |
|
313 | tag = '%s.%s'%(obj.getObjectName(), obj.getRevisionNumber()) | |
|
314 | sub_grp['__type'] = tag | |
|
315 | # Put all of the DynamicObject's attributes into the new h5py group | |
|
316 | for key, value in obj.__dict__.items(): | |
|
317 | self.__toSerial(value, sub_grp, key) | |
|
318 | ||
|
319 | elif isinstance(obj, tuple): | |
|
320 | sub_grp = grp.create_group(name) | |
|
321 | sub_grp['__type'] = 'tuple' | |
|
322 | sub_grp['tuple'] = obj | |
|
323 | ||
|
324 | elif isinstance(obj, list) and len(obj) == 0: | |
|
325 | sub_grp = grp.create_group(name) | |
|
326 | sub_grp['__type'] = 'empty_list' | |
|
327 | ||
|
328 | else: | |
|
329 | grp[name] = obj | |
|
330 | ||
|
331 | def toSerial(self, obj): | |
|
332 | HDF5Serializer.tmp_num += 1 | |
|
333 | fn = 'tmp%d.hdf5'%(HDF5Serializer.tmp_num,) | |
|
334 | root = h5py.File(fn, driver='core') | |
|
335 | try: | |
|
336 | self.__toSerial(obj, root, 'dataset') | |
|
337 | except: | |
|
338 | root.flush(), root.close() | |
|
339 | os.remove(fn) | |
|
340 | raise | |
|
341 | root.flush(), root.close() | |
|
342 | ||
|
343 | fp = open(fn, 'rb') | |
|
344 | msg = fp.read() | |
|
345 | fp.close() | |
|
346 | os.remove(fn) | |
|
347 | ||
|
348 | return msg | |
|
349 | ||
|
350 | # Alias for the standard json serializer: | |
|
351 | class jsonSerializer(Serializer): | |
|
352 | def fromSerial(self, string): | |
|
353 | #return json.loads(string) | |
|
354 | return jsonpickle.decode(string) | |
|
355 | def toSerial(self, string): | |
|
356 | #return json.dumps(string) | |
|
357 | return jsonpickle.encode(string, max_depth=500) | |
|
358 | ||
|
359 | # Dict mapping from serializer type to corresponding class object: | |
|
360 | serializers = {'yaml': YAMLSerializer, | |
|
361 | 'msgpack': MessagePackSerializer, | |
|
362 | 'hdf5': HDF5Serializer, | |
|
363 | 'json': jsonSerializer} | |
|
364 | ||
|
365 | instances = {'yaml': YAMLSerializer(), | |
|
366 | 'msgpack': MessagePackSerializer(), | |
|
367 | 'hdf5': HDF5Serializer(), | |
|
368 | 'json': jsonSerializer()} | |
|
369 | ||
|
370 | serial_types = dict([(v,u) for u,v in serializers.items()]) | |
|
371 | ||
|
372 | compression_types = ['gzip', ''] | |
|
373 |
|
1 | NO CONTENT: new file 100644 |
@@ -0,0 +1,187 | |||
|
1 | ||
|
2 | # | |
|
3 | # when Serializer is imported alone, fault indicates this package is | |
|
4 | # dependent on lookup, but don't import Lookup, instead: | |
|
5 | # | |
|
6 | import DynamicObject # dependent on pysvn | |
|
7 | import Serializer # dependent on Dynamic Object | |
|
8 | ||
|
9 | import msgpack | |
|
10 | import redis | |
|
11 | ||
|
12 | import numpy as np | |
|
13 | ||
|
14 | class NamedObject(): | |
|
15 | ||
|
16 | #--------------------------------------------- | |
|
17 | @staticmethod | |
|
18 | def indexListMatch(list_of_lists, name, value, name2=None, value2=None, unique_f=False): | |
|
19 | # | |
|
20 | # for each list's <element> attribute compare with value | |
|
21 | # if match, return True plus list | |
|
22 | # else return False plus empty list | |
|
23 | # | |
|
24 | # search needs to be named part of class for object else .<value> is unrecognized | |
|
25 | # | |
|
26 | # unique_f finds non-uniqueness | |
|
27 | ||
|
28 | ||
|
29 | index = [] # return empty indices | |
|
30 | list_data = [] # return empty list | |
|
31 | ii = 0 | |
|
32 | for theList in list_of_lists: | |
|
33 | ||
|
34 | cmd0 = "theList.%s == value" % (name) | |
|
35 | cmd1 = "isInlist(theList.%s,value)" % name | |
|
36 | # if name is valid then | |
|
37 | # match name against value | |
|
38 | # match name (as list) against value | |
|
39 | if (eval(cmd0) or eval(cmd1)): | |
|
40 | if (name2 != None): | |
|
41 | cmd2 = "theList.%s == value2" % name2 | |
|
42 | cmd3 = "isInlist(theList.%s,value2)" % name2 | |
|
43 | if (eval(cmd2) or eval(cmd3)): | |
|
44 | if (unique_f): | |
|
45 | index = index + [ii] | |
|
46 | list_data = list_data + [theList] # save list of lists if non-unique | |
|
47 | # don't exit on match, may be non-unique | |
|
48 | else: | |
|
49 | list_data = theList # save the list | |
|
50 | index = [ii] | |
|
51 | break | |
|
52 | else: | |
|
53 | if (unique_f): | |
|
54 | index = index + [ii] | |
|
55 | list_data = list_data + [theList] # list of lists if non-unique | |
|
56 | else: | |
|
57 | list_data = theList | |
|
58 | index = [ii] | |
|
59 | break # exit on match | |
|
60 | #endif | |
|
61 | ii = ii + 1 | |
|
62 | #end for | |
|
63 | ||
|
64 | return index, list_data # return indices of matches and list (or list of lists) | |
|
65 | ||
|
66 | #end indexListMatch | |
|
67 | ||
|
68 | #--------------------------------------------- | |
|
69 | @staticmethod | |
|
70 | def namedListMatch(list_of_lists, name, value, name2=None, value2=None, unique_f=None): | |
|
71 | # | |
|
72 | # for each list's <element> attribute compare with value | |
|
73 | # if match, return True plus list | |
|
74 | # else return False plus empty list | |
|
75 | # | |
|
76 | # search needs to be named part of class for object else .<value> is unrecognized | |
|
77 | # | |
|
78 | # unique_f finds non-uniqueness ('None' is same as False) | |
|
79 | ||
|
80 | match_f = False | |
|
81 | list_data = [] # initialize | |
|
82 | ||
|
83 | for theList in list_of_lists: | |
|
84 | ||
|
85 | cmd0 = "theList.%s == value" % (name) | |
|
86 | cmd1 = "isInlist(theList.%s,value)" % name | |
|
87 | # if name is valid then | |
|
88 | # match name against value | |
|
89 | # match name (as list) against value | |
|
90 | if (eval(cmd0) or eval(cmd1)): | |
|
91 | if (name2 != None): | |
|
92 | cmd2 = "theList.%s == value2" % name2 | |
|
93 | cmd3 = "isInlist(theList.%s,value2)" % name2 | |
|
94 | if (eval(cmd2) or eval(cmd3)): | |
|
95 | match_f = True | |
|
96 | if (unique_f): | |
|
97 | list_data = list_data + [theList] # save list of lists if non-unique | |
|
98 | # don't exit on match, may be non-unique | |
|
99 | else: | |
|
100 | list_data = theList # save the list | |
|
101 | break | |
|
102 | else: | |
|
103 | match_f = True | |
|
104 | if (unique_f): | |
|
105 | list_data = list_data + [theList] # list of lists if non-unique | |
|
106 | else: | |
|
107 | list_data = theList | |
|
108 | break # exit on match | |
|
109 | #endif | |
|
110 | #end for | |
|
111 | ||
|
112 | return match_f, list_data # return match, and list (or list of lists) | |
|
113 | ||
|
114 | #end namedListMatch | |
|
115 | ||
|
116 | #--------------------------------------------- | |
|
117 | @staticmethod | |
|
118 | def combineLists(object): | |
|
119 | # | |
|
120 | # used for dumping elements in list of lists for debugging | |
|
121 | # | |
|
122 | ret_list =[] | |
|
123 | ii = 0 | |
|
124 | while ii < len(object): | |
|
125 | ret_list = ret_list + [object[ii].list] # not a real list, so can't use built-in list iterator | |
|
126 | ii = ii + 1 | |
|
127 | return ret_list | |
|
128 | ||
|
129 | # end combineLists | |
|
130 | ||
|
131 | ||
|
132 | ||
|
133 | class StateListObject(NamedObject): | |
|
134 | def __init__(self, concurrent=None, hierarchical=None, history=None, state=None): | |
|
135 | self.concurrent = concurrent | |
|
136 | self.hierarchical = hierarchical | |
|
137 | self.history = history | |
|
138 | self.state = state | |
|
139 | self.list = [self.concurrent, self.hierarchical, self.history, self.state ] | |
|
140 | #end class StateListObject | |
|
141 | ||
|
142 | source_object = "my test string" | |
|
143 | ||
|
144 | serializer = "yaml" | |
|
145 | # | |
|
146 | # python versioning issue (ver 2.7 -> else path) | |
|
147 | # | |
|
148 | if isinstance(serializer,Serializer.Serializer): | |
|
149 | serial_type = Serializer.serial_types[serializer] | |
|
150 | else: | |
|
151 | serial_type = serializer | |
|
152 | serializer = Serializer.serializers[serializer]() | |
|
153 | ||
|
154 | ||
|
155 | ||
|
156 | datastr = serializer.toSerial(source_object) | |
|
157 | ||
|
158 | dest_object = serializer.fromSerial(datastr) | |
|
159 | ||
|
160 | print "dest_object=",dest_object | |
|
161 | ||
|
162 | myObject = StateListObject(hierarchical="yes",state=np.array([1,2,3.0])) | |
|
163 | ||
|
164 | datastr = serializer.toSerial(myObject) | |
|
165 | ||
|
166 | packed = msgpack.packb(datastr) | |
|
167 | ||
|
168 | try: | |
|
169 | r= redis.StrictRedis(host='localhost',port=6379,db=0) | |
|
170 | except Exception as eobj: | |
|
171 | print "is the redis server running?",eobj | |
|
172 | else: | |
|
173 | ||
|
174 | r.set('baz',packed) # converts to string | |
|
175 | x = r.get('baz') | |
|
176 | ||
|
177 | unpacked = msgpack.unpackb(x) | |
|
178 | ||
|
179 | dest_object = serializer.fromSerial(unpacked) | |
|
180 | ||
|
181 | print "val1=",dest_object.hierarchical | |
|
182 | val2 = dest_object.state | |
|
183 | print "val2=",val2 | |
|
184 | # can numpy array be used as array? | |
|
185 | print val2.shape | |
|
186 | ||
|
187 |
@@ -0,0 +1,175 | |||
|
1 | ''' | |
|
2 | Created on Jul 16, 2014 | |
|
3 | ||
|
4 | @author: roj-idl71 | |
|
5 | ''' | |
|
6 | """ | |
|
7 | Dependencies: | |
|
8 | ||
|
9 | pip install tabulate simplejson python-cjson ujson yajl msgpack-python | |
|
10 | ||
|
11 | """ | |
|
12 | ||
|
13 | from timeit import timeit | |
|
14 | from tabulate import tabulate | |
|
15 | ||
|
16 | setup = '''d = { | |
|
17 | 'words': """ | |
|
18 | Lorem ipsum dolor sit amet, consectetur adipiscing | |
|
19 | elit. Mauris adipiscing adipiscing placerat. | |
|
20 | Vestibulum augue augue, | |
|
21 | pellentesque quis sollicitudin id, adipiscing. | |
|
22 | """, | |
|
23 | 'boolean' : False, | |
|
24 | 'list': range(10), | |
|
25 | 'dict': dict((str(i),'a') for i in xrange(10)), | |
|
26 | 'int': 100, | |
|
27 | 'float': 100.123456, | |
|
28 | }''' | |
|
29 | ||
|
30 | setup = '''import numpy; | |
|
31 | import datetime; | |
|
32 | d = { | |
|
33 | 'words': """ | |
|
34 | Lorem ipsum dolor sit amet, consectetur adipiscing | |
|
35 | elit. Mauris adipiscing adipiscing placerat. | |
|
36 | Vestibulum augue augue, | |
|
37 | pellentesque quis sollicitudin id, adipiscing. | |
|
38 | """, | |
|
39 | 'boolean' : False, | |
|
40 | 'list': range(10), | |
|
41 | 'dict': dict((str(i),'a') for i in xrange(10)), | |
|
42 | 'int': 100, | |
|
43 | 'float': 100.123456, | |
|
44 | 'datetime' : datetime.datetime(2001,1,1,10,10,10) | |
|
45 | }''' | |
|
46 | ||
|
47 | ||
|
48 | setup_pickle = '%s ; import pickle ; src = pickle.dumps(d)' % setup | |
|
49 | setup_pickle2 = '%s ; import pickle ; src = pickle.dumps(d, 2)' % setup | |
|
50 | setup_cpickle = '%s ; import cPickle ; src = cPickle.dumps(d)' % setup | |
|
51 | setup_cpickle2 = '%s ; import cPickle ; src = cPickle.dumps(d, 2)' % setup | |
|
52 | setup_json = '%s ; import json; src = json.dumps(d)' % setup | |
|
53 | setup_ujson = '%s ; import ujson; src = ujson.encode(d)' % setup | |
|
54 | setup_cjson = '%s ; import cjson; src = cjson.encode(d)' % setup | |
|
55 | setup_simplejson = '%s ; import simplejson; src = simplejson.dump(d)' % setup | |
|
56 | setup_jsonpickle = '%s ; import jsonpickle; src = jsonpickle.encode(d)' % setup | |
|
57 | setup_yaml = '%s ; import yaml; src = yaml.dump(d)' % setup | |
|
58 | setup_msgpack = '%s ; import msgpack; src = msgpack.dumps(d)' % setup | |
|
59 | setup_msgpack_np = '%s; import msgpack_numpy as msgpack; src = msgpack.dumps(d)' % setup | |
|
60 | ||
|
61 | tests = [ | |
|
62 | # (title, setup, enc_test, dec_test, result) | |
|
63 | ('cPickle (binary)', 'import cPickle; %s' % setup_cpickle2, 'cPickle.dumps(d, 2)', 'r = cPickle.loads(src)', 'print r'), | |
|
64 | ('cPickle (ascii)', 'import cPickle; %s' % setup_cpickle, 'cPickle.dumps(d, 0)', 'r = cPickle.loads(src)', 'print r'), | |
|
65 | ('pickle (binary)', 'import pickle; %s' % setup_pickle2, 'pickle.dumps(d, 2)', 'r = pickle.loads(src)', 'print r'), | |
|
66 | ('pickle (ascii)', 'import pickle; %s' % setup_pickle, 'pickle.dumps(d, 0)', 'r = pickle.loads(src)', 'print r'), | |
|
67 | ('jsonpickle', 'import jsonpickle; %s' % setup_jsonpickle, 'jsonpickle.encode(d)', 'r = jsonpickle.decode(src)', 'print r'), | |
|
68 | # ('msgpack-numpy-python', '%s' % setup_msgpack_np, 'msgpack.dumps(d)', 'r = msgpack.loads(src)', 'print r'), | |
|
69 | ('ujson', 'import ujson; %s' % setup_ujson, 'ujson.encode(d)', 'r = ujson.decode(src)', 'print r'), | |
|
70 | # ('msgpack-python', 'import msgpack; %s' % setup_msgpack, 'msgpack.dumps(d)', 'r = msgpack.loads(src)', 'print r'), | |
|
71 | # ('json', 'import json; %s' % setup_json, 'json.dumps(d)', 'r = json.loads(src)', 'print r'), | |
|
72 | # ('python-cjson-1.0.5', 'import cjson; %s' % setup_cjson, 'cjson.encode(d)', 'r = cjson.decode(src)', 'print r'), | |
|
73 | # ('simplejson-3.3.1', 'import simplejson; %s' % setup_json, 'simplejson.dumps(d)', 'r = simplejson.loads(src)', 'print r'), | |
|
74 | ('yaml', 'import yaml; %s' % setup_yaml, 'yaml.dump(d)', 'r = yaml.load(src)', 'print r'), | |
|
75 | ] | |
|
76 | ||
|
77 | loops = 1 | |
|
78 | enc_table = [] | |
|
79 | dec_table = [] | |
|
80 | ||
|
81 | print "Running tests (%d loops each)" % loops | |
|
82 | ||
|
83 | for title, mod, enc, dec, msg in tests: | |
|
84 | print title | |
|
85 | ||
|
86 | ### Getting the package size | |
|
87 | exec mod | |
|
88 | size = len("".join(src)) | |
|
89 | ||
|
90 | print " [Encode]", enc | |
|
91 | result = timeit(enc, mod, number=loops) | |
|
92 | enc_table.append([title, result, size]) | |
|
93 | ||
|
94 | print " [Decode]", dec | |
|
95 | result = timeit(dec, mod, number=loops) | |
|
96 | dec_table.append([title, result]) | |
|
97 | ||
|
98 | print " Result" | |
|
99 | result = timeit(msg, mod+';'+dec, number=1) | |
|
100 | ||
|
101 | enc_table.sort(key=lambda x: x[1]) | |
|
102 | enc_table.insert(0, ['Package', 'Seconds', 'Size']) | |
|
103 | ||
|
104 | dec_table.sort(key=lambda x: x[1]) | |
|
105 | dec_table.insert(0, ['Package', 'Seconds']) | |
|
106 | ||
|
107 | print "\nEncoding Test (%d loops)" % loops | |
|
108 | print tabulate(enc_table, headers="firstrow") | |
|
109 | ||
|
110 | print "\nDecoding Test (%d loops)" % loops | |
|
111 | print tabulate(dec_table, headers="firstrow") | |
|
112 | ||
|
113 | """ | |
|
114 | OUTPUT: | |
|
115 | ||
|
116 | Running tests (15000 loops each) | |
|
117 | pickle (ascii) | |
|
118 | [Encode] pickle.dumps(d, 0) | |
|
119 | [Decode] pickle.loads(src) | |
|
120 | pickle (binary) | |
|
121 | [Encode] pickle.dumps(d, 2) | |
|
122 | [Decode] pickle.loads(src) | |
|
123 | cPickle (ascii) | |
|
124 | [Encode] cPickle.dumps(d, 0) | |
|
125 | [Decode] cPickle.loads(src) | |
|
126 | cPickle (binary) | |
|
127 | [Encode] cPickle.dumps(d, 2) | |
|
128 | [Decode] cPickle.loads(src) | |
|
129 | json | |
|
130 | [Encode] json.dumps(d) | |
|
131 | [Decode] json.loads(src) | |
|
132 | simplejson-3.3.1 | |
|
133 | [Encode] simplejson.dumps(d) | |
|
134 | [Decode] simplejson.loads(src) | |
|
135 | python-cjson-1.0.5 | |
|
136 | [Encode] cjson.encode(d) | |
|
137 | [Decode] cjson.decode(src) | |
|
138 | ujson-1.33 | |
|
139 | [Encode] ujson.dumps(d) | |
|
140 | [Decode] ujson.loads(src) | |
|
141 | yajl 0.3.5 | |
|
142 | [Encode] yajl.dumps(d) | |
|
143 | [Decode] yajl.loads(src) | |
|
144 | msgpack-python-0.3.0 | |
|
145 | [Encode] msgpack.dumps(d) | |
|
146 | [Decode] msgpack.loads(src) | |
|
147 | ||
|
148 | Encoding Test (15000 loops) | |
|
149 | Package Seconds | |
|
150 | -------------------- --------- | |
|
151 | ujson-1.33 0.232215 | |
|
152 | msgpack-python-0.3.0 0.241945 | |
|
153 | cPickle (binary) 0.305273 | |
|
154 | yajl 0.3.5 0.634148 | |
|
155 | python-cjson-1.0.5 0.680604 | |
|
156 | json 0.780438 | |
|
157 | simplejson-3.3.1 1.04763 | |
|
158 | cPickle (ascii) 1.62062 | |
|
159 | pickle (ascii) 14.0497 | |
|
160 | pickle (binary) 15.4712 | |
|
161 | ||
|
162 | Decoding Test (15000 loops) | |
|
163 | Package Seconds | |
|
164 | -------------------- --------- | |
|
165 | msgpack-python-0.3.0 0.240885 | |
|
166 | cPickle (binary) 0.393152 | |
|
167 | ujson-1.33 0.396875 | |
|
168 | python-cjson-1.0.5 0.694321 | |
|
169 | yajl 0.3.5 0.748369 | |
|
170 | simplejson-3.3.1 0.780531 | |
|
171 | cPickle (ascii) 1.38561 | |
|
172 | json 1.65921 | |
|
173 | pickle (binary) 5.20554 | |
|
174 | pickle (ascii) 17.8767 | |
|
175 | """ No newline at end of file |
@@ -0,0 +1,147 | |||
|
1 | ''' | |
|
2 | Created on Jul 11, 2014 | |
|
3 | ||
|
4 | @author: roj-idl71 | |
|
5 | ''' | |
|
6 | import time | |
|
7 | from gevent import sleep | |
|
8 | ||
|
9 | import zerorpc | |
|
10 | from schainpy.model import * | |
|
11 | from schainpy.serializer.DataTranslate import serial2Obj, serial2Dict | |
|
12 | # import schainpy.model.io.jroIO_usrp | |
|
13 | ||
|
14 | def createObjVolt(): | |
|
15 | ''' | |
|
16 | This function creates a processing object "VoltProc" with some operations. | |
|
17 | such as: "CohInt", "Scope", etc | |
|
18 | These class are found inside schainpy.model.proc and schainpy.model.graphics | |
|
19 | ''' | |
|
20 | procObj = VoltageProc() | |
|
21 | ||
|
22 | opObj = CohInt() | |
|
23 | procObj.addOperation(opObj, 1) | |
|
24 | ||
|
25 | opObj = Scope() | |
|
26 | procObj.addOperation(opObj, 2) | |
|
27 | ||
|
28 | return procObj | |
|
29 | ||
|
30 | def createObjSpec(): | |
|
31 | ''' | |
|
32 | This function creates a processing object "SpecProc" with some operation objects | |
|
33 | such as: "IncohInt", "SpectraPlot", "RTIPlot", etc | |
|
34 | These class are found inside schainpy.model.proc and schainpy.model.graphics | |
|
35 | ''' | |
|
36 | ||
|
37 | procObj = SpectraProc() | |
|
38 | ||
|
39 | opObj = IncohInt() | |
|
40 | procObj.addOperation(opObj, objId = 1) | |
|
41 | ||
|
42 | opObj = SpectraPlot() | |
|
43 | procObj.addOperation(opObj, objId = 2) | |
|
44 | ||
|
45 | opObj = RTIPlot() | |
|
46 | procObj.addOperation(opObj, objId = 3) | |
|
47 | ||
|
48 | opObj = SpectraPlot() | |
|
49 | procObj.addOperation(opObj, objId = 4) | |
|
50 | ||
|
51 | opObj = RTIPlot() | |
|
52 | procObj.addOperation(opObj, objId = 5) | |
|
53 | ||
|
54 | return procObj | |
|
55 | ||
|
56 | def processingSpec(procObj, dataInObj): | |
|
57 | ||
|
58 | procObj.setInput(dataInObj) | |
|
59 | procObj.run(nFFTPoints = 16) | |
|
60 | ||
|
61 | procObj.call(opType = "external", | |
|
62 | opId = 1, | |
|
63 | n=1) | |
|
64 | ||
|
65 | procObj.call(opType = "external", | |
|
66 | opId = 2, | |
|
67 | id=191, | |
|
68 | zmin=-100, | |
|
69 | zmax=-40) | |
|
70 | ||
|
71 | procObj.call(opType = "external", | |
|
72 | opId = 3, | |
|
73 | id=192, | |
|
74 | zmin=-100, | |
|
75 | zmax=-40, | |
|
76 | timerange=10*60) | |
|
77 | ||
|
78 | # procObj.call(opType = "self", | |
|
79 | # opName = "selectChannels", | |
|
80 | # channelList = [0,1]) | |
|
81 | # | |
|
82 | # procObj.call(opType = "self", | |
|
83 | # opName = "selectHeights", | |
|
84 | # minHei = 300, | |
|
85 | # maxHei = 400) | |
|
86 | # | |
|
87 | # procObj.call(opType = "external", | |
|
88 | # opId = 4, | |
|
89 | # id=193, | |
|
90 | # zmin=-100, | |
|
91 | # zmax=-40) | |
|
92 | # | |
|
93 | # procObj.call(opType = "external", | |
|
94 | # opId = 5, | |
|
95 | # id=194, | |
|
96 | # zmin=-100, | |
|
97 | # zmax=-40, | |
|
98 | # timerange=10*60) | |
|
99 | ||
|
100 | def printSpeed(deltaTime, mySerial): | |
|
101 | ||
|
102 | #################### | |
|
103 | size = len(mySerial)/1024. | |
|
104 | vel = 1.0*size / deltaTime | |
|
105 | ||
|
106 | print "Index [", replayerObj.getProfileIndex(), "]: ", | |
|
107 | print "Total time %5.2f ms, Data size %5.2f KB, Speed %5.2f MB/s" %(deltaTime, size, vel) | |
|
108 | #################### | |
|
109 | ||
|
110 | if __name__ == '__main__': | |
|
111 | ||
|
112 | procObj = createObjSpec() | |
|
113 | ||
|
114 | replayerObj = zerorpc.Client() | |
|
115 | replayerObj.connect("tcp://127.0.0.1:4242") | |
|
116 | ||
|
117 | serializer = replayerObj.getSerializer() | |
|
118 | ||
|
119 | ini = time.time() | |
|
120 | mySerialMetadata = replayerObj.getSerialMetaData() | |
|
121 | deltaTime = (time.time() - ini)*1024 | |
|
122 | ||
|
123 | printSpeed(deltaTime, mySerialMetadata) | |
|
124 | ||
|
125 | myMetaDict = serial2Dict(mySerialMetadata, | |
|
126 | serializer = serializer) | |
|
127 | # print myMetaDict | |
|
128 | while True: | |
|
129 | ini = time.time() | |
|
130 | mySerialData = replayerObj.getSerialData() | |
|
131 | deltaTime = (time.time() - ini)*1024 | |
|
132 | ||
|
133 | if not mySerialData: | |
|
134 | print "No more data" | |
|
135 | break | |
|
136 | ||
|
137 | # myDataDict = SERIALIZER.loads(mySerialData) | |
|
138 | # print myDataDict | |
|
139 | # continue | |
|
140 | ||
|
141 | printSpeed(deltaTime, mySerialData) | |
|
142 | ||
|
143 | dataInObj = serial2Obj(mySerialData, | |
|
144 | metadataDict=myMetaDict, | |
|
145 | serializer = serializer) | |
|
146 | processingSpec(procObj, dataInObj) | |
|
147 | sleep(1e-1) No newline at end of file |
@@ -0,0 +1,13 | |||
|
1 | ''' | |
|
2 | Created on Jul 11, 2014 | |
|
3 | ||
|
4 | @author: roj-idl71 | |
|
5 | ''' | |
|
6 | ||
|
7 | import zerorpc | |
|
8 | ||
|
9 | if __name__ == '__main__': | |
|
10 | c = zerorpc.Client() | |
|
11 | c.connect("tcp://127.0.0.1:4242") | |
|
12 | c.load("file2") # AAAHH! The previously loaded model gets overwritten here! | |
|
13 | print c.getModelName() No newline at end of file |
@@ -0,0 +1,33 | |||
|
1 | ''' | |
|
2 | Created on Jul 11, 2014 | |
|
3 | ||
|
4 | @author: roj-idl71 | |
|
5 | ''' | |
|
6 | # import sys | |
|
7 | import datetime | |
|
8 | import zerorpc | |
|
9 | ||
|
10 | from schainpy.model.io.jrodataIO import USRPReaderAPI | |
|
11 | # from schainpy.serializer.DataTranslate import serial2Obj | |
|
12 | ||
|
13 | if __name__ == '__main__': | |
|
14 | ||
|
15 | replayerObj = USRPReaderAPI(serializer='msgpack') | |
|
16 | ||
|
17 | replayerObj.setup(path='/Volumes/DATA/haystack/passive_radar/', | |
|
18 | startDate=datetime.date(2000,1,1), | |
|
19 | endDate=datetime.date(2015,1,1), | |
|
20 | startTime=datetime.time(0,0,0), | |
|
21 | endTime=datetime.time(23,59,59), | |
|
22 | online=1, | |
|
23 | nSamples=500, | |
|
24 | channelList = [0,1,2,3,4,5,6,7]) | |
|
25 | ||
|
26 | replayerObj.start() | |
|
27 | ||
|
28 | print "Initializing 'zerorpc' server" | |
|
29 | s = zerorpc.Server(replayerObj) | |
|
30 | s.bind("tcp://0.0.0.0:4242") | |
|
31 | s.run() | |
|
32 | ||
|
33 | print "End" No newline at end of file |
|
1 | NO CONTENT: new file 100644 |
@@ -0,0 +1,177 | |||
|
1 | ''' | |
|
2 | Created on Jul 15, 2014 | |
|
3 | ||
|
4 | @author: roj-idl71 | |
|
5 | ''' | |
|
6 | ||
|
7 | import sys | |
|
8 | import yaml | |
|
9 | import numpy | |
|
10 | import jsonpickle | |
|
11 | ||
|
12 | # import schainpy.serializer.DynamicSerializer as DynamicSerializer | |
|
13 | ||
|
14 | ||
|
15 | ||
|
16 | def isNotClassVar(myObj): | |
|
17 | ||
|
18 | return not hasattr(myObj,'__dict__') | |
|
19 | ||
|
20 | def isDictFormat(thisValue): | |
|
21 | ||
|
22 | if type(thisValue) != type({}): | |
|
23 | return False | |
|
24 | ||
|
25 | if '__name__' not in thisValue.keys(): | |
|
26 | return False | |
|
27 | ||
|
28 | return True | |
|
29 | ||
|
30 | def obj2Dict(myObj): | |
|
31 | ||
|
32 | myDict = {} | |
|
33 | ||
|
34 | myDict['__name__'] = myObj.__class__.__name__ | |
|
35 | ||
|
36 | for thisKey, thisValue in myObj.__dict__.items(): | |
|
37 | ||
|
38 | if isNotClassVar(thisValue): | |
|
39 | myDict[thisKey] = thisValue | |
|
40 | continue | |
|
41 | ||
|
42 | ## If this value is another class instance | |
|
43 | myNewDict = obj2Dict(thisValue) | |
|
44 | myDict[thisKey] = myNewDict | |
|
45 | ||
|
46 | return myDict | |
|
47 | ||
|
48 | def dict2Obj(myDict): | |
|
49 | ''' | |
|
50 | ''' | |
|
51 | ||
|
52 | if '__name__' not in myDict.keys(): | |
|
53 | return None | |
|
54 | ||
|
55 | className = eval(myDict['__name__']) | |
|
56 | ||
|
57 | myObj = className() | |
|
58 | ||
|
59 | for thisKey, thisValue in myDict.items(): | |
|
60 | ||
|
61 | if thisKey == '__name__': | |
|
62 | continue | |
|
63 | ||
|
64 | if not isDictFormat(thisValue): | |
|
65 | setattr(myObj, thisKey, thisValue) | |
|
66 | continue | |
|
67 | ||
|
68 | myNewObj = dict2Obj(thisValue) | |
|
69 | setattr(myObj, thisKey, myNewObj) | |
|
70 | ||
|
71 | return myObj | |
|
72 | ||
|
73 | class myTestClass3(object): | |
|
74 | ||
|
75 | def __init__(self): | |
|
76 | ''' | |
|
77 | ''' | |
|
78 | self.y1 = 'y1' | |
|
79 | self.y2 = 'y2' | |
|
80 | ||
|
81 | class myTestClass2(object): | |
|
82 | ||
|
83 | def __init__(self): | |
|
84 | ''' | |
|
85 | ''' | |
|
86 | self.x1 = 'x1' | |
|
87 | self.x2 = 'x2' | |
|
88 | self.otherObj = myTestClass3() | |
|
89 | ||
|
90 | ||
|
91 | class myTestClass(object): | |
|
92 | ||
|
93 | flagNoData = True | |
|
94 | value1 = 1 | |
|
95 | value2 = 2 | |
|
96 | myObj = None | |
|
97 | ||
|
98 | def __init__(self): | |
|
99 | ||
|
100 | ''' | |
|
101 | ''' | |
|
102 | self.flagNoData = True | |
|
103 | self.value1 = 1 | |
|
104 | self.value2 = 2 | |
|
105 | self.myObj = myTestClass2() | |
|
106 | ||
|
107 | def get_dtype(self): | |
|
108 | ||
|
109 | ''' | |
|
110 | ''' | |
|
111 | return self.value1 | |
|
112 | ||
|
113 | def set_dtype(self, value): | |
|
114 | ||
|
115 | ''' | |
|
116 | ''' | |
|
117 | ||
|
118 | self.value1 = value | |
|
119 | ||
|
120 | dtype = property(get_dtype, set_dtype) | |
|
121 | ||
|
122 | def myMsgPackTest(): | |
|
123 | ||
|
124 | import msgpack | |
|
125 | import msgpack_numpy as m | |
|
126 | import numpy as np | |
|
127 | ||
|
128 | x = np.random.rand(5) | |
|
129 | x_enc = m.encode(x) | |
|
130 | x_rec = m.decode(x_enc) | |
|
131 | ||
|
132 | print x_rec | |
|
133 | # | |
|
134 | # x_enc = msgpack.packb(x, default=m.encoder) | |
|
135 | # x_rec = msgpack.unpackb(x_enc, object_hook=m.decoder) | |
|
136 | ||
|
137 | if __name__ == '__main__': | |
|
138 | ||
|
139 | myMsgPackTest() | |
|
140 | ||
|
141 | sys.exit() | |
|
142 | ||
|
143 | serializerObj = DynamicSerializer.DynamicSerializer('json') | |
|
144 | serializerObj = jsonpickle | |
|
145 | ||
|
146 | myTestObj = myTestClass() | |
|
147 | ||
|
148 | myTestObj.flagNoData = False | |
|
149 | myTestObj.value1 = [1+3.4j,4,'5',] | |
|
150 | myTestObj.value2 = {'x2': numpy.complex(1,2),'x1': 'x1'} | |
|
151 | # myTestObj.myObj.x2 = numpy.arange(15, dtype=numpy.complex) | |
|
152 | ||
|
153 | myDict = obj2Dict(myTestObj) | |
|
154 | ||
|
155 | myNewObj = dict2Obj(myDict) | |
|
156 | ||
|
157 | # print myDict | |
|
158 | # print myTestObj.__dict__ | |
|
159 | # print myNewObj.__dict__ | |
|
160 | ||
|
161 | # sys.exit() | |
|
162 | print myDict | |
|
163 | ||
|
164 | newSerial = serializerObj.encode(myDict) | |
|
165 | # print newSerial | |
|
166 | ||
|
167 | newDict = serializerObj.decode(newSerial) | |
|
168 | print newDict | |
|
169 | ||
|
170 | myNewObj = dict2Obj(newDict) | |
|
171 | ||
|
172 | ||
|
173 | ||
|
174 | print 50*'###' | |
|
175 | print myTestObj.__dict__ | |
|
176 | print myNewObj.__dict__ | |
|
177 | No newline at end of file |
@@ -0,0 +1,46 | |||
|
1 | ''' | |
|
2 | Created on Jul 15, 2014 | |
|
3 | ||
|
4 | @author: roj-idl71 | |
|
5 | ''' | |
|
6 | ||
|
7 | import sys | |
|
8 | import cPickle | |
|
9 | ||
|
10 | from schainpy.model.data.jrodata import Voltage | |
|
11 | # from schainpy.model.io.jrodataIO import USRPReaderMP | |
|
12 | from schainpy.serializer.DynamicSerializer import DynamicSerializer | |
|
13 | from schainpy.serializer.DataTranslate import obj2Dict, dict2Obj | |
|
14 | ||
|
15 | ||
|
16 | if __name__ == "__main__": | |
|
17 | ||
|
18 | serializerObj = DynamicSerializer('yaml') | |
|
19 | ||
|
20 | myTestObj = Voltage() | |
|
21 | ||
|
22 | myDict = obj2Dict(myTestObj) | |
|
23 | ||
|
24 | myNewObj = dict2Obj(myDict) | |
|
25 | ||
|
26 | # print myDict | |
|
27 | # print myTestObj.__dict__ | |
|
28 | # print myNewObj.__dict__ | |
|
29 | # | |
|
30 | ||
|
31 | # print '#############################' | |
|
32 | ||
|
33 | # newValue = serializerObj.dumps(myDict) | |
|
34 | # print newValue | |
|
35 | # | |
|
36 | # newValue = serializerObj.loads(newValue) | |
|
37 | # print newValue | |
|
38 | ||
|
39 | ||
|
40 | print '###########CPICKLE##################' | |
|
41 | print myDict | |
|
42 | newSerialized = cPickle.dumps(myDict, 2) | |
|
43 | # print newValue | |
|
44 | ||
|
45 | newDict = cPickle.loads(newSerialized) | |
|
46 | print newDict No newline at end of file |
@@ -0,0 +1,22 | |||
|
1 | ''' | |
|
2 | Created on Jul 16, 2014 | |
|
3 | ||
|
4 | @author: roj-idl71 | |
|
5 | ''' | |
|
6 | ||
|
7 | from distutils.core import setup, Extension | |
|
8 | ||
|
9 | setup(name="schainpy", | |
|
10 | version="1.0", | |
|
11 | description="Python tools to read, write and process Jicamarca data", | |
|
12 | author="Miguel Urco", | |
|
13 | author_email="miguel.urco@jro.igp.gob.pe", | |
|
14 | url="http://jro.igp.gob.pe", | |
|
15 | packages = {'schainpy', 'schainpy.model', | |
|
16 | 'schainpy.model.data', | |
|
17 | 'schainpy.model.graphics', | |
|
18 | 'schainpy.model.io', | |
|
19 | 'schainpy.model.proc', | |
|
20 | 'schainpy.model.utils'}, | |
|
21 | py_modules=['schainpy.serializer.DataTranslate', | |
|
22 | 'schainpy.serializer.JROSerializer']) No newline at end of file |
@@ -2,11 +2,11 | |||
|
2 | 2 | Created on September , 2012 |
|
3 | 3 | @author: |
|
4 | 4 | ''' |
|
5 |
from xml.etree.ElementTree import Element, SubElement |
|
|
5 | from xml.etree.ElementTree import Element, SubElement | |
|
6 | 6 | from xml.etree import ElementTree as ET |
|
7 | 7 | from xml.dom import minidom |
|
8 | 8 | |
|
9 | import datetime | |
|
9 | #import datetime | |
|
10 | 10 | from model import * |
|
11 | 11 | |
|
12 | 12 | import ast |
@@ -153,8 +153,12 class ParameterConf(): | |||
|
153 | 153 | self.id = parmElement.get('id') |
|
154 | 154 | self.name = parmElement.get('name') |
|
155 | 155 | self.value = parmElement.get('value') |
|
156 | self.format = parmElement.get('format') | |
|
156 | self.format = str.lower(parmElement.get('format')) | |
|
157 | 157 | |
|
158 | #Compatible with old signal chain version | |
|
159 | if self.format == 'int' and self.name == 'idfigure': | |
|
160 | self.name = 'id' | |
|
161 | ||
|
158 | 162 | def printattr(self): |
|
159 | 163 | |
|
160 | 164 | print "Parameter[%s]: name = %s, value = %s, format = %s" %(self.id, self.name, self.value, self.format) |
@@ -172,10 +176,10 class OperationConf(): | |||
|
172 | 176 | |
|
173 | 177 | def __init__(self): |
|
174 | 178 | |
|
175 | id = 0 | |
|
176 | name = None | |
|
177 | priority = None | |
|
178 | type = 'self' | |
|
179 | self.id = 0 | |
|
180 | self.name = None | |
|
181 | self.priority = None | |
|
182 | self.type = 'self' | |
|
179 | 183 | |
|
180 | 184 | |
|
181 | 185 | def __getNewId(self): |
@@ -228,6 +232,11 class OperationConf(): | |||
|
228 | 232 | self.type = opElement.get('type') |
|
229 | 233 | self.priority = opElement.get('priority') |
|
230 | 234 | |
|
235 | #Compatible with old signal chain version | |
|
236 | #Use of 'run' method instead 'init' | |
|
237 | if self.type == 'self' and self.name == 'init': | |
|
238 | self.name = 'run' | |
|
239 | ||
|
231 | 240 | self.parmConfObjList = [] |
|
232 | 241 | |
|
233 | 242 | parmElementList = opElement.getiterator(ParameterConf().getElementName()) |
@@ -235,8 +244,16 class OperationConf(): | |||
|
235 | 244 | for parmElement in parmElementList: |
|
236 | 245 | parmConfObj = ParameterConf() |
|
237 | 246 | parmConfObj.readXml(parmElement) |
|
238 | self.parmConfObjList.append(parmConfObj) | |
|
247 | ||
|
248 | #Compatible with old signal chain version | |
|
249 | #If an 'plot' OPERATION is found, changes name operation by the value of its type PARAMETER | |
|
250 | if self.type != 'self' and self.name == 'Plot': | |
|
251 | if parmConfObj.format == 'str' and parmConfObj.name == 'type': | |
|
252 | self.name = parmConfObj.value | |
|
253 | continue | |
|
239 | 254 | |
|
255 | self.parmConfObjList.append(parmConfObj) | |
|
256 | ||
|
240 | 257 | def printattr(self): |
|
241 | 258 | |
|
242 | 259 | print "%s[%s]: name = %s, type = %s, priority = %s" %(self.ELEMENTNAME, |
@@ -361,7 +378,16 class ProcUnitConf(): | |||
|
361 | 378 | self.name = upElement.get('name') |
|
362 | 379 | self.datatype = upElement.get('datatype') |
|
363 | 380 | self.inputId = upElement.get('inputId') |
|
364 |
|
|
|
381 | ||
|
382 | #Compatible with old signal chain version | |
|
383 | if self.ELEMENTNAME == ReadUnitConf().getElementName(): | |
|
384 | if 'Reader' not in self.name: | |
|
385 | self.name += 'Reader' | |
|
386 | ||
|
387 | if self.ELEMENTNAME == ProcUnitConf().getElementName(): | |
|
388 | if 'Proc' not in self.name: | |
|
389 | self.name += 'Proc' | |
|
390 | ||
|
365 | 391 | self.opConfObjList = [] |
|
366 | 392 | |
|
367 | 393 | opElementList = upElement.getiterator(OperationConf().getElementName()) |
@@ -509,10 +535,22 class Project(): | |||
|
509 | 535 | self.name = name |
|
510 | 536 | self.description = description |
|
511 | 537 | |
|
512 | def addReadUnit(self, datatype, **kwargs): | |
|
538 | def addReadUnit(self, datatype=None, name=None, **kwargs): | |
|
513 | 539 | |
|
540 | #Compatible with old signal chain version | |
|
541 | if datatype==None and name==None: | |
|
542 | raise ValueError, "datatype or name should be defined" | |
|
543 | ||
|
544 | if name==None: | |
|
545 | if 'Reader' in datatype: | |
|
546 | name = datatype | |
|
547 | else: | |
|
548 | name = '%sReader' %(datatype) | |
|
549 | ||
|
550 | if datatype==None: | |
|
551 | datatype = name.replace('Reader','') | |
|
552 | ||
|
514 | 553 | id = self.__getNewId() |
|
515 | name = '%s' %(datatype) | |
|
516 | 554 | |
|
517 | 555 | readUnitConfObj = ReadUnitConf() |
|
518 | 556 | readUnitConfObj.setup(id, name, datatype, **kwargs) |
@@ -521,10 +559,22 class Project(): | |||
|
521 | 559 | |
|
522 | 560 | return readUnitConfObj |
|
523 | 561 | |
|
524 |
def addProcUnit(self, d |
|
|
562 | def addProcUnit(self, inputId, datatype=None, name=None): | |
|
563 | ||
|
564 | #Compatible with old signal chain version | |
|
565 | if datatype==None and name==None: | |
|
566 | raise ValueError, "datatype or name should be defined" | |
|
567 | ||
|
568 | if name==None: | |
|
569 | if 'Proc' in datatype: | |
|
570 | name = datatype | |
|
571 | else: | |
|
572 | name = '%sProc' %(datatype) | |
|
573 | ||
|
574 | if datatype==None: | |
|
575 | datatype = name.replace('Proc','') | |
|
525 | 576 | |
|
526 | 577 | id = self.__getNewId() |
|
527 | name = '%s' %(datatype) | |
|
528 | 578 | |
|
529 | 579 | procUnitConfObj = ProcUnitConf() |
|
530 | 580 | procUnitConfObj.setup(id, name, datatype, inputId) |
@@ -552,7 +602,7 class Project(): | |||
|
552 | 602 | |
|
553 | 603 | self.makeXml() |
|
554 | 604 | |
|
555 | print prettify(self.projectElement) | |
|
605 | #print prettify(self.projectElement) | |
|
556 | 606 | |
|
557 | 607 | ElementTree(self.projectElement).write(filename, method='xml') |
|
558 | 608 | |
@@ -633,19 +683,29 class Project(): | |||
|
633 | 683 | |
|
634 | 684 | # for readUnitConfObj in self.readUnitConfObjList: |
|
635 | 685 | # readUnitConfObj.run() |
|
636 | ||
|
686 | ||
|
687 | print "*"*40 | |
|
688 | print " Starting SIGNAL CHAIN PROCESSING " | |
|
689 | print "*"*40 | |
|
690 | ||
|
691 | ||
|
692 | keyList = self.procUnitConfObjDict.keys() | |
|
693 | keyList.sort() | |
|
694 | ||
|
637 | 695 | while(True): |
|
638 | 696 | |
|
639 | 697 | finalSts = False |
|
640 | 698 | |
|
641 | for procUnitConfObj in self.procUnitConfObjDict.values(): | |
|
642 |
|
|
|
699 | for procKey in keyList: | |
|
700 | # print "Running the '%s' process with %s" %(procUnitConfObj.name, procUnitConfObj.id) | |
|
701 | ||
|
702 | procUnitConfObj = self.procUnitConfObjDict[procKey] | |
|
643 | 703 | sts = procUnitConfObj.run() |
|
644 | 704 | finalSts = finalSts or sts |
|
645 | 705 | |
|
646 | 706 | #If every process unit finished so end process |
|
647 | 707 | if not(finalSts): |
|
648 |
print "Every process unit |
|
|
708 | print "Every process unit have finished" | |
|
649 | 709 | break |
|
650 | 710 | |
|
651 | 711 | if __name__ == '__main__': |
@@ -1,5 +1,12 | |||
|
1 | from model.data.jrodata import * | |
|
2 | from model.io.jrodataIO import * | |
|
3 | from model.proc.jroprocessing import * | |
|
4 | from model.graphics.jroplot import * | |
|
5 | from model.utils.jroutils import * No newline at end of file | |
|
1 | #from schainpy.model.data.jrodata import * | |
|
2 | # from schainpy.model.io.jrodataIO import * | |
|
3 | # from schainpy.model.proc.jroprocessing import * | |
|
4 | # from schainpy.model.graphics.jroplot import * | |
|
5 | # from schainpy.model.utils.jroutils import * | |
|
6 | # from schainpy.serializer import * | |
|
7 | ||
|
8 | from data import * | |
|
9 | from io import * | |
|
10 | from proc import * | |
|
11 | from graphics import * | |
|
12 | from utils import * |
@@ -0,0 +1,3 | |||
|
1 | from jrodata import * | |
|
2 | from jroheaderIO import * | |
|
3 | from jroamisr import * No newline at end of file |
@@ -1,4 +1,5 | |||
|
1 | 1 | import numpy |
|
2 | import copy | |
|
2 | 3 | |
|
3 | 4 | class Beam: |
|
4 | 5 | def __init__(self): |
@@ -25,7 +26,7 class AMISR: | |||
|
25 | 26 | self.channelList = [0]#self.dataIn.channelList esto solo aplica para el caso de AMISR |
|
26 | 27 | self.dtype = numpy.dtype([('real','<f4'),('imag','<f4')]) |
|
27 | 28 | |
|
28 |
self.flag |
|
|
29 | self.flagDiscontinuousBlock = None#self.dataIn.flagDiscontinuousBlock | |
|
29 | 30 | #self.utctime = #self.firstdatatime |
|
30 | 31 | self.flagDecodeData = None#self.dataIn.flagDecodeData #asumo q la data esta decodificada |
|
31 | 32 | self.flagDeflipData = None#self.dataIn.flagDeflipData #asumo q la data esta sin flip |
@@ -49,8 +49,19 def getDataTypeCode(numpyDtype): | |||
|
49 | 49 | return datatype |
|
50 | 50 | |
|
51 | 51 | def hildebrand_sekhon(data, navg): |
|
52 | """ | |
|
53 | This method is for the objective determination of the noise level in Doppler spectra. This | |
|
54 | implementation technique is based on the fact that the standard deviation of the spectral | |
|
55 | densities is equal to the mean spectral density for white Gaussian noise | |
|
52 | 56 |
|
|
53 | data = data.copy() | |
|
57 | Inputs: | |
|
58 | Data : heights | |
|
59 | navg : numbers of averages | |
|
60 | ||
|
61 | Return: | |
|
62 | -1 : any error | |
|
63 | anoise : noise's level | |
|
64 | """ | |
|
54 | 65 | |
|
55 | 66 | sortdata = numpy.sort(data,axis=None) |
|
56 | 67 | lenOfData = len(sortdata) |
@@ -146,7 +157,7 class JROData(GenericData): | |||
|
146 | 157 | |
|
147 | 158 | channelList = None |
|
148 | 159 | |
|
149 |
flag |
|
|
160 | flagDiscontinuousBlock = False | |
|
150 | 161 | |
|
151 | 162 | useLocalTime = False |
|
152 | 163 | |
@@ -160,11 +171,11 class JROData(GenericData): | |||
|
160 | 171 | |
|
161 | 172 | blocksize = None |
|
162 | 173 | |
|
163 | nCode = None | |
|
164 | ||
|
165 | nBaud = None | |
|
166 | ||
|
167 | code = None | |
|
174 | # nCode = None | |
|
175 | # | |
|
176 | # nBaud = None | |
|
177 | # | |
|
178 | # code = None | |
|
168 | 179 | |
|
169 | 180 | flagDecodeData = False #asumo q la data no esta decodificada |
|
170 | 181 | |
@@ -178,7 +189,7 class JROData(GenericData): | |||
|
178 | 189 | |
|
179 | 190 | nCohInt = None |
|
180 | 191 | |
|
181 | noise = None | |
|
192 | # noise = None | |
|
182 | 193 | |
|
183 | 194 | windowOfFilter = 1 |
|
184 | 195 | |
@@ -294,7 +305,42 class JROData(GenericData): | |||
|
294 | 305 | ''' |
|
295 | 306 | |
|
296 | 307 | self.datatype = getDataTypeCode(numpyDtype) |
|
308 | ||
|
309 | def get_code(self): | |
|
310 | ''' | |
|
311 | ''' | |
|
312 | return self.radarControllerHeaderObj.code | |
|
313 | ||
|
314 | def set_code(self, code): | |
|
315 | ''' | |
|
316 | ''' | |
|
317 | self.radarControllerHeaderObj.code = code | |
|
318 | ||
|
319 | return | |
|
320 | ||
|
321 | def get_ncode(self): | |
|
322 | ''' | |
|
323 | ''' | |
|
324 | return self.radarControllerHeaderObj.nCode | |
|
325 | ||
|
326 | def set_ncode(self, nCode): | |
|
327 | ''' | |
|
328 | ''' | |
|
329 | self.radarControllerHeaderObj.nCode = nCode | |
|
330 | ||
|
331 | return | |
|
332 | ||
|
333 | def get_nbaud(self): | |
|
334 | ''' | |
|
335 | ''' | |
|
336 | return self.radarControllerHeaderObj.nBaud | |
|
297 | 337 | |
|
338 | def set_nbaud(self, nBaud): | |
|
339 | ''' | |
|
340 | ''' | |
|
341 | self.radarControllerHeaderObj.nBaud = nBaud | |
|
342 | ||
|
343 | return | |
|
298 | 344 | # def getTimeInterval(self): |
|
299 | 345 | # |
|
300 | 346 | # raise IOError, "This method should be implemented inside each Class" |
@@ -308,6 +354,9 class JROData(GenericData): | |||
|
308 | 354 | ippSeconds = property(get_ippSeconds, set_ippSeconds) |
|
309 | 355 | dtype = property(get_dtype, set_dtype) |
|
310 | 356 | # timeInterval = property(getTimeInterval, "I'm the 'timeInterval' property") |
|
357 | code = property(get_code, set_code) | |
|
358 | nCode = property(get_ncode, set_ncode) | |
|
359 | nBaud = property(get_nbaud, set_nbaud) | |
|
311 | 360 | |
|
312 | 361 | class Voltage(JROData): |
|
313 | 362 | |
@@ -345,7 +394,7 class Voltage(JROData): | |||
|
345 | 394 | |
|
346 | 395 | self.flagNoData = True |
|
347 | 396 | |
|
348 |
self.flag |
|
|
397 | self.flagDiscontinuousBlock = False | |
|
349 | 398 | |
|
350 | 399 | self.utctime = None |
|
351 | 400 | |
@@ -366,10 +415,10 class Voltage(JROData): | |||
|
366 | 415 | self.flagShiftFFT = False |
|
367 | 416 | |
|
368 | 417 | self.flagDataAsBlock = False #Asumo que la data es leida perfil a perfil |
|
369 | ||
|
418 | ||
|
370 | 419 | self.profileIndex = 0 |
|
371 | 420 | |
|
372 | def getNoisebyHildebrand(self): | |
|
421 | def getNoisebyHildebrand(self, channel = None): | |
|
373 | 422 | """ |
|
374 | 423 | Determino el nivel de ruido usando el metodo Hildebrand-Sekhon |
|
375 | 424 | |
@@ -377,21 +426,43 class Voltage(JROData): | |||
|
377 | 426 | noiselevel |
|
378 | 427 | """ |
|
379 | 428 | |
|
380 |
f |
|
|
381 |
da |
|
|
382 | self.noise[channel] = hildebrand_sekhon(daux, self.nCohInt) | |
|
429 | if channel != None: | |
|
430 | data = self.data[channel] | |
|
431 | nChannels = 1 | |
|
432 | else: | |
|
433 | data = self.data | |
|
434 | nChannels = self.nChannels | |
|
435 | ||
|
436 | noise = numpy.zeros(nChannels) | |
|
437 | power = data * numpy.conjugate(data) | |
|
383 | 438 | |
|
384 | return self.noise | |
|
385 | ||
|
386 | def getNoise(self, type = 1): | |
|
439 | for thisChannel in range(nChannels): | |
|
440 | if nChannels == 1: | |
|
441 | daux = power[:].real | |
|
442 | else: | |
|
443 | daux = power[thisChannel,:].real | |
|
444 | noise[thisChannel] = hildebrand_sekhon(daux, self.nCohInt) | |
|
387 | 445 | |
|
388 | self.noise = numpy.zeros(self.nChannels) | |
|
446 | return noise | |
|
447 | ||
|
448 | def getNoise(self, type = 1, channel = None): | |
|
389 | 449 | |
|
390 | 450 | if type == 1: |
|
391 | noise = self.getNoisebyHildebrand() | |
|
451 | noise = self.getNoisebyHildebrand(channel) | |
|
392 | 452 | |
|
393 | 453 | return 10*numpy.log10(noise) |
|
394 | 454 | |
|
455 | def getPower(self, channel = None): | |
|
456 | ||
|
457 | if channel != None: | |
|
458 | data = self.data[channel] | |
|
459 | else: | |
|
460 | data = self.data | |
|
461 | ||
|
462 | power = data * numpy.conjugate(data) | |
|
463 | ||
|
464 | return 10*numpy.log10(power.real) | |
|
465 | ||
|
395 | 466 | def getTimeInterval(self): |
|
396 | 467 | |
|
397 | 468 | timeInterval = self.ippSeconds * self.nCohInt |
@@ -461,7 +532,7 class Spectra(JROData): | |||
|
461 | 532 | |
|
462 | 533 | self.flagNoData = True |
|
463 | 534 | |
|
464 |
self.flag |
|
|
535 | self.flagDiscontinuousBlock = False | |
|
465 | 536 | |
|
466 | 537 | self.utctime = None |
|
467 | 538 | |
@@ -490,7 +561,7 class Spectra(JROData): | |||
|
490 | 561 | self.noise_estimation = None |
|
491 | 562 | |
|
492 | 563 | |
|
493 | def getNoisebyHildebrand(self): | |
|
564 | def getNoisebyHildebrand(self, xmin_index=None, xmax_index=None, ymin_index=None, ymax_index=None): | |
|
494 | 565 | """ |
|
495 | 566 | Determino el nivel de ruido usando el metodo Hildebrand-Sekhon |
|
496 | 567 | |
@@ -499,17 +570,19 class Spectra(JROData): | |||
|
499 | 570 | """ |
|
500 | 571 | |
|
501 | 572 | noise = numpy.zeros(self.nChannels) |
|
573 | ||
|
502 | 574 | for channel in range(self.nChannels): |
|
503 | daux = self.data_spc[channel,:,:] | |
|
575 | daux = self.data_spc[channel,xmin_index:xmax_index,ymin_index:ymax_index] | |
|
504 | 576 | noise[channel] = hildebrand_sekhon(daux, self.nIncohInt) |
|
505 | 577 | |
|
506 | 578 | return noise |
|
507 | 579 | |
|
508 | def getNoise(self): | |
|
580 | def getNoise(self, xmin_index=None, xmax_index=None, ymin_index=None, ymax_index=None): | |
|
581 | ||
|
509 | 582 | if self.noise_estimation != None: |
|
510 | 583 | return self.noise_estimation #this was estimated by getNoise Operation defined in jroproc_spectra.py |
|
511 | 584 | else: |
|
512 | noise = self.getNoisebyHildebrand() | |
|
585 | noise = self.getNoisebyHildebrand(xmin_index, xmax_index, ymin_index, ymax_index) | |
|
513 | 586 | return noise |
|
514 | 587 | |
|
515 | 588 | |
@@ -612,7 +685,7 class SpectraHeis(Spectra): | |||
|
612 | 685 | |
|
613 | 686 | self.flagNoData = True |
|
614 | 687 | |
|
615 |
self.flag |
|
|
688 | self.flagDiscontinuousBlock = False | |
|
616 | 689 | |
|
617 | 690 | # self.nPairs = 0 |
|
618 | 691 | |
@@ -648,7 +721,7 class Fits: | |||
|
648 | 721 | |
|
649 | 722 | flagNoData = True |
|
650 | 723 | |
|
651 |
flag |
|
|
724 | flagDiscontinuousBlock = False | |
|
652 | 725 | |
|
653 | 726 | useLocalTime = False |
|
654 | 727 | |
@@ -764,7 +837,7 class Fits: | |||
|
764 | 837 | |
|
765 | 838 | def getNoise(self, type = 1): |
|
766 | 839 | |
|
767 |
|
|
|
840 | #noise = numpy.zeros(self.nChannels) | |
|
768 | 841 | |
|
769 | 842 | if type == 1: |
|
770 | 843 | noise = self.getNoisebyHildebrand() |
@@ -840,7 +913,7 class Correlation(JROData): | |||
|
840 | 913 | |
|
841 | 914 | self.flagNoData = True |
|
842 | 915 | |
|
843 |
self.flag |
|
|
916 | self.flagDiscontinuousBlock = False | |
|
844 | 917 | |
|
845 | 918 | self.utctime = None |
|
846 | 919 | |
@@ -987,7 +1060,7 class Parameters(JROData): | |||
|
987 | 1060 | |
|
988 | 1061 | noise = None #Noise Potency |
|
989 | 1062 | |
|
990 |
|
|
|
1063 | initUtcTime = None #Initial UTC time | |
|
991 | 1064 | |
|
992 | 1065 | paramInterval = None #Time interval to calculate Parameters in seconds |
|
993 | 1066 | |
@@ -1021,8 +1094,8 class Parameters(JROData): | |||
|
1021 | 1094 | |
|
1022 | 1095 | datatime = [] |
|
1023 | 1096 | |
|
1024 |
datatime.append(self. |
|
|
1025 |
datatime.append(self. |
|
|
1097 | datatime.append(self.initUtcTime) | |
|
1098 | datatime.append(self.initUtcTime + self.outputInterval - 1) | |
|
1026 | 1099 | |
|
1027 | 1100 | datatime = numpy.array(datatime) |
|
1028 | 1101 |
@@ -86,7 +86,7 class Header(object): | |||
|
86 | 86 | print "#"*100 |
|
87 | 87 | for key in self.__dict__.keys(): |
|
88 | 88 | print "%s = %s" %(key, self.__dict__[key]) |
|
89 | ||
|
89 | ||
|
90 | 90 | class BasicHeader(Header): |
|
91 | 91 | |
|
92 | 92 | size = None |
@@ -168,15 +168,17 class SystemHeader(Header): | |||
|
168 | 168 | adcResolution = None |
|
169 | 169 | pciDioBusWidth = None |
|
170 | 170 | |
|
171 | def __init__(self): | |
|
171 | def __init__(self, nSamples=0, nProfiles=0, nChannels=0, adcResolution=14, pciDioBusWith=0): | |
|
172 | ||
|
172 | 173 | self.size = 24 |
|
173 |
self.nSamples = |
|
|
174 |
self.nProfiles = |
|
|
175 |
self.nChannels = |
|
|
176 |
self.adcResolution = |
|
|
177 |
self.pciDioBusWidth = |
|
|
178 | ||
|
174 | self.nSamples = nSamples | |
|
175 | self.nProfiles = nProfiles | |
|
176 | self.nChannels = nChannels | |
|
177 | self.adcResolution = adcResolution | |
|
178 | self.pciDioBusWidth = pciDioBusWith | |
|
179 | ||
|
179 | 180 | def read(self, fp): |
|
181 | ||
|
180 | 182 | try: |
|
181 | 183 | header = numpy.fromfile(fp,SYSTEM_STRUCTURE,1) |
|
182 | 184 | self.size = header['nSize'][0] |
@@ -222,41 +224,49 class RadarControllerHeader(Header): | |||
|
222 | 224 | |
|
223 | 225 | __C = 3e8 |
|
224 | 226 | |
|
225 |
def __init__(self |
|
|
227 | def __init__(self, expType=2, nTx=1, | |
|
228 | ippKm=None, txA=0, txB=0, | |
|
229 | nWindows=None, nHeights=None, firstHeight=None, deltaHeight=None, | |
|
230 | numTaus=0, line6Function=0, line5Function=0, fClock=0, | |
|
231 | prePulseBefore=0, prePulseAfter=0, | |
|
232 | codeType=0, nCode=0, nBaud=0, code=None, | |
|
233 | flip1=0, flip2=0): | |
|
234 | ||
|
226 | 235 | self.size = 116 |
|
227 |
self.expType = |
|
|
228 |
self.nTx = |
|
|
229 |
self.ipp = |
|
|
230 |
self.txA = |
|
|
231 |
self.txB = |
|
|
232 |
self. |
|
|
233 |
self. |
|
|
234 |
self. |
|
|
235 | self.line6Function = 0 | |
|
236 |
self. |
|
|
237 |
self. |
|
|
238 | self.prePulseBefore = 0 | |
|
239 | self.prePulserAfter = 0 | |
|
240 | self.rangeIpp = 0 | |
|
241 |
self. |
|
|
242 | self.rangeTxB = 0 | |
|
243 | ||
|
244 | self.samplingWindow = None | |
|
245 |
self.nHeights = |
|
|
246 |
self.firstHeight = |
|
|
247 |
self.deltaHeight = |
|
|
248 |
self.samplesWin = |
|
|
249 | ||
|
250 |
self.nCode = |
|
|
251 |
self.nBaud = |
|
|
252 |
self.code = |
|
|
253 |
self.flip1 = |
|
|
254 |
self.flip2 = |
|
|
236 | self.expType = expType | |
|
237 | self.nTx = nTx | |
|
238 | self.ipp = ippKm | |
|
239 | self.txA = txA | |
|
240 | self.txB = txB | |
|
241 | self.rangeIpp = ippKm | |
|
242 | self.rangeTxA = txA | |
|
243 | self.rangeTxB = txB | |
|
244 | ||
|
245 | self.nWindows = nWindows | |
|
246 | self.numTaus = numTaus | |
|
247 | self.codeType = codeType | |
|
248 | self.line6Function = line6Function | |
|
249 | self.line5Function = line5Function | |
|
250 | self.fClock = fClock | |
|
251 | self.prePulseBefore = prePulseBefore | |
|
252 | self.prePulserAfter = prePulseAfter | |
|
253 | ||
|
254 | self.nHeights = nHeights | |
|
255 | self.firstHeight = firstHeight | |
|
256 | self.deltaHeight = deltaHeight | |
|
257 | self.samplesWin = nHeights | |
|
258 | ||
|
259 | self.nCode = nCode | |
|
260 | self.nBaud = nBaud | |
|
261 | self.code = code | |
|
262 | self.flip1 = flip1 | |
|
263 | self.flip2 = flip2 | |
|
255 | 264 | |
|
256 | 265 | # self.dynamic = numpy.array([],numpy.dtype('byte')) |
|
257 | 266 | |
|
258 | 267 | |
|
259 | 268 | def read(self, fp): |
|
269 | ||
|
260 | 270 | try: |
|
261 | 271 | startFp = fp.tell() |
|
262 | 272 | header = numpy.fromfile(fp,RADAR_STRUCTURE,1) |
@@ -285,20 +295,20 class RadarControllerHeader(Header): | |||
|
285 | 295 | # backFp = fp.tell() - jumpFp |
|
286 | 296 | # fp.seek(backFp) |
|
287 | 297 | |
|
288 |
|
|
|
298 | samplingWindow = numpy.fromfile(fp,SAMPLING_STRUCTURE,self.nWindows) | |
|
289 | 299 | |
|
290 |
self.nHeights = int(numpy.sum( |
|
|
291 |
self.firstHeight = |
|
|
292 |
self.deltaHeight = |
|
|
293 |
self.samplesWin = |
|
|
300 | self.nHeights = int(numpy.sum(samplingWindow['nsa'])) | |
|
301 | self.firstHeight = samplingWindow['h0'] | |
|
302 | self.deltaHeight = samplingWindow['dh'] | |
|
303 | self.samplesWin = samplingWindow['nsa'] | |
|
294 | 304 | |
|
295 | 305 | self.Taus = numpy.fromfile(fp,'<f4',self.numTaus) |
|
296 | 306 | |
|
297 | 307 | if self.codeType != 0: |
|
298 | 308 | self.nCode = int(numpy.fromfile(fp,'<u4',1)) |
|
299 | 309 | self.nBaud = int(numpy.fromfile(fp,'<u4',1)) |
|
300 |
self.code = numpy.empty([self.nCode,self.nBaud],dtype=' |
|
|
301 | ||
|
310 | self.code = numpy.empty([self.nCode,self.nBaud],dtype='i1') | |
|
311 | ||
|
302 | 312 | for ic in range(self.nCode): |
|
303 | 313 | temp = numpy.fromfile(fp,'u4',int(numpy.ceil(self.nBaud/32.))) |
|
304 | 314 | for ib in range(self.nBaud-1,-1,-1): |
@@ -349,7 +359,8 class RadarControllerHeader(Header): | |||
|
349 | 359 | #dynamic = self.dynamic |
|
350 | 360 | #dynamic.tofile(fp) |
|
351 | 361 | |
|
352 | samplingWindow = self.samplingWindow | |
|
362 | sampleWindowTuple = (self.firstHeight,self.deltaHeight,self.samplesWin) | |
|
363 | samplingWindow = numpy.array(sampleWindowTuple,SAMPLING_STRUCTURE) | |
|
353 | 364 | samplingWindow.tofile(fp) |
|
354 | 365 | |
|
355 | 366 | if self.numTaus > 0: |
@@ -419,6 +430,7 class ProcessingHeader(Header): | |||
|
419 | 430 | flag_cspc = None |
|
420 | 431 | |
|
421 | 432 | def __init__(self): |
|
433 | ||
|
422 | 434 | self.size = 0 |
|
423 | 435 | self.dtype = 0 |
|
424 | 436 | self.blockSize = 0 |
@@ -430,8 +442,6 class ProcessingHeader(Header): | |||
|
430 | 442 | self.nIncohInt = 0 |
|
431 | 443 | self.totalSpectra = 0 |
|
432 | 444 | |
|
433 | self.samplingWindow = 0 | |
|
434 | ||
|
435 | 445 | self.nHeights = 0 |
|
436 | 446 | self.firstHeight = 0 |
|
437 | 447 | self.deltaHeight = 0 |
@@ -458,12 +468,12 class ProcessingHeader(Header): | |||
|
458 | 468 | self.nIncohInt = int(header['nIncoherentIntegrations'][0]) |
|
459 | 469 | self.totalSpectra = int(header['nTotalSpectra'][0]) |
|
460 | 470 | |
|
461 |
|
|
|
471 | samplingWindow = numpy.fromfile(fp,SAMPLING_STRUCTURE,self.nWindows) | |
|
462 | 472 | |
|
463 |
self.nHeights = int(numpy.sum( |
|
|
464 |
self.firstHeight = float( |
|
|
465 |
self.deltaHeight = float( |
|
|
466 |
self.samplesWin = |
|
|
473 | self.nHeights = int(numpy.sum(samplingWindow['nsa'])) | |
|
474 | self.firstHeight = float(samplingWindow['h0'][0]) | |
|
475 | self.deltaHeight = float(samplingWindow['dh'][0]) | |
|
476 | self.samplesWin = samplingWindow['nsa'][0] | |
|
467 | 477 | self.spectraComb = numpy.fromfile(fp,'u1',2*self.totalSpectra) |
|
468 | 478 | |
|
469 | 479 | # if ((self.processFlags & PROCFLAG.DEFINE_PROCESS_CODE) == PROCFLAG.DEFINE_PROCESS_CODE): |
@@ -501,6 +511,7 class ProcessingHeader(Header): | |||
|
501 | 511 | return 1 |
|
502 | 512 | |
|
503 | 513 | def write(self, fp): |
|
514 | ||
|
504 | 515 | headerTuple = (self.size, |
|
505 | 516 | self.dtype, |
|
506 | 517 | self.blockSize, |
@@ -0,0 +1,5 | |||
|
1 | from jroplot_voltage import * | |
|
2 | from jroplot_spectra import * | |
|
3 | from jroplot_heispectra import * | |
|
4 | from jroplot_correlation import * | |
|
5 | from jroplot_parameters import * No newline at end of file |
@@ -45,7 +45,7 class Figure: | |||
|
45 | 45 | |
|
46 | 46 | def __del__(self): |
|
47 | 47 | |
|
48 | self.__driver.closeFigure() | |
|
48 | self.__driver.closeFigure(True) | |
|
49 | 49 | |
|
50 | 50 | def getFilename(self, name, ext='.png'): |
|
51 | 51 | |
@@ -70,7 +70,7 class Figure: | |||
|
70 | 70 | |
|
71 | 71 | return widthscreen, heightscreen |
|
72 | 72 | |
|
73 |
def getTimeLim(self, x, xmin=None, xmax=None, timerange=None |
|
|
73 | def getTimeLim(self, x, xmin=None, xmax=None, timerange=None): | |
|
74 | 74 | |
|
75 | 75 | if self.xmin != None and self.xmax != None: |
|
76 | 76 | if timerange == None: |
@@ -80,17 +80,18 class Figure: | |||
|
80 | 80 | |
|
81 | 81 | return xmin, xmax |
|
82 | 82 | |
|
83 | ||
|
84 | if timerange != None and self.xmin == None and self.xmax == None: | |
|
85 | txmin = x[0] - x[0]%timerange | |
|
83 | if timerange == None and (xmin==None or xmax==None): | |
|
84 | raise ValueError, "timerange or xmin+xmax should be defined" | |
|
85 | ||
|
86 | if timerange != None: | |
|
87 | txmin = x[0] - x[0] % min(timerange/10, 10*60) | |
|
86 | 88 | else: |
|
87 | txmin = numpy.min(x) | |
|
88 | timerange = self.timerange | |
|
89 | txmin = x[0] - x[0] % 10*60 | |
|
89 | 90 | |
|
90 | 91 | thisdatetime = datetime.datetime.utcfromtimestamp(txmin) |
|
91 | 92 | thisdate = datetime.datetime.combine(thisdatetime.date(), datetime.time(0,0,0)) |
|
92 | 93 | |
|
93 |
if |
|
|
94 | if timerange != None: | |
|
94 | 95 | xmin = (thisdatetime - thisdate).seconds/(60*60.) |
|
95 | 96 | xmax = xmin + timerange/(60*60.) |
|
96 | 97 |
@@ -75,7 +75,7 class CorrelationPlot(Figure): | |||
|
75 | 75 | |
|
76 | 76 | def run(self, dataOut, id, wintitle="", channelList=None, showprofile=False, |
|
77 | 77 | xmin=None, xmax=None, ymin=None, ymax=None, zmin=None, zmax=None, |
|
78 | save=False, figpath='', figfile=None, show=True, ftp=False, wr_period=1, | |
|
78 | save=False, figpath='./', figfile=None, show=True, ftp=False, wr_period=1, | |
|
79 | 79 | server=None, folder=None, username=None, password=None, |
|
80 | 80 | ftp_wei=0, exp_code=0, sub_exp_code=0, plot_pos=0, realtime=False): |
|
81 | 81 | |
@@ -127,7 +127,7 class CorrelationPlot(Figure): | |||
|
127 | 127 | # noise = dataOut.noise/factor |
|
128 | 128 | |
|
129 | 129 | #thisDatetime = dataOut.datatime |
|
130 |
thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[ |
|
|
130 | thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[0]) | |
|
131 | 131 | title = wintitle + " Correlation" |
|
132 | 132 | xlabel = "Lag T (s)" |
|
133 | 133 | ylabel = "Range (Km)" |
@@ -180,17 +180,19 class CorrelationPlot(Figure): | |||
|
180 | 180 | |
|
181 | 181 | self.draw() |
|
182 | 182 | |
|
183 |
if |
|
|
184 | str_datetime = thisDatetime.strftime("%Y%m%d_%H%M%S") | |
|
185 | figfile = self.getFilename(name = str_datetime) | |
|
186 | ||
|
187 | if figpath != '': | |
|
183 | if save: | |
|
184 | ||
|
185 | if figfile == None: | |
|
186 | str_datetime = thisDatetime.strftime("%Y%m%d_%H%M%S") | |
|
187 | figfile = self.getFilename(name = str_datetime) | |
|
188 | ||
|
188 | 189 | self.counter_imagwr += 1 |
|
189 | 190 | if (self.counter_imagwr>=wr_period): |
|
190 | 191 | # store png plot to local folder |
|
191 | 192 | self.saveFigure(figpath, figfile) |
|
192 |
# store png plot to FTP server according to RT-Web format |
|
|
193 | name = self.getNameToFtp(thisDatetime, self.FTP_WEI, self.EXP_CODE, self.SUB_EXP_CODE, self.PLOT_CODE, self.PLOT_POS) | |
|
194 | ftp_filename = os.path.join(figpath, name) | |
|
195 | self.saveFigure(figpath, ftp_filename) | |
|
193 | # store png plot to FTP server according to RT-Web format | |
|
194 | if ftp: | |
|
195 | name = self.getNameToFtp(thisDatetime, self.FTP_WEI, self.EXP_CODE, self.SUB_EXP_CODE, self.PLOT_CODE, self.PLOT_POS) | |
|
196 | ftp_filename = os.path.join(figpath, name) | |
|
197 | self.saveFigure(figpath, ftp_filename) | |
|
196 | 198 | self.counter_imagwr = 0 |
@@ -1,8 +1,8 | |||
|
1 | 1 | ''' |
|
2 | Created on Jul 9, 2014 | |
|
2 | 3 | |
|
3 | @author: Daniel Suarez | |
|
4 | @author: roj-idl71 | |
|
4 | 5 | ''' |
|
5 | ||
|
6 | 6 | import os |
|
7 | 7 | import datetime |
|
8 | 8 | import numpy |
@@ -75,7 +75,7 class SpectraHeisScope(Figure): | |||
|
75 | 75 | |
|
76 | 76 | def run(self, dataOut, id, wintitle="", channelList=None, |
|
77 | 77 | xmin=None, xmax=None, ymin=None, ymax=None, save=False, |
|
78 | figpath='', figfile=None, ftp=False, wr_period=1, show=True, | |
|
78 | figpath='./', figfile=None, ftp=False, wr_period=1, show=True, | |
|
79 | 79 | server=None, folder=None, username=None, password=None): |
|
80 | 80 | |
|
81 | 81 | """ |
@@ -120,7 +120,7 class SpectraHeisScope(Figure): | |||
|
120 | 120 | y = datadB |
|
121 | 121 | |
|
122 | 122 | #thisDatetime = dataOut.datatime |
|
123 |
thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[ |
|
|
123 | thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[0]) | |
|
124 | 124 | title = wintitle + " Scope: %s" %(thisDatetime.strftime("%d-%b-%Y %H:%M:%S")) |
|
125 | 125 | xlabel = "" |
|
126 | 126 | #para 1Mhz descomentar la siguiente linea |
@@ -155,12 +155,13 class SpectraHeisScope(Figure): | |||
|
155 | 155 | |
|
156 | 156 | |
|
157 | 157 | self.draw() |
|
158 | ||
|
159 | if figfile == None: | |
|
160 | str_datetime = thisDatetime.strftime("%Y%m%d_%H%M%S") | |
|
161 | figfile = self.getFilename(name = str_datetime) | |
|
162 | 158 | |
|
163 |
if |
|
|
159 | if save: | |
|
160 | ||
|
161 | if figfile == None: | |
|
162 | str_datetime = thisDatetime.strftime("%Y%m%d_%H%M%S") | |
|
163 | figfile = self.getFilename(name = str_datetime) | |
|
164 | ||
|
164 | 165 | self.counter_imagwr += 1 |
|
165 | 166 | if (self.counter_imagwr>=wr_period): |
|
166 | 167 | # store png plot to local folder |
@@ -223,7 +224,7 class RTIfromSpectraHeis(Figure): | |||
|
223 | 224 | def run(self, dataOut, id, wintitle="", channelList=None, showprofile='True', |
|
224 | 225 | xmin=None, xmax=None, ymin=None, ymax=None, |
|
225 | 226 | timerange=None, |
|
226 | save=False, figpath='', figfile=None, ftp=False, wr_period=1, show=True, | |
|
227 | save=False, figpath='./', figfile=None, ftp=False, wr_period=1, show=True, | |
|
227 | 228 | server=None, folder=None, username=None, password=None): |
|
228 | 229 | |
|
229 | 230 | if channelList == None: |
@@ -254,7 +255,7 class RTIfromSpectraHeis(Figure): | |||
|
254 | 255 | # noisedB = 10*numpy.log10(noise) |
|
255 | 256 | |
|
256 | 257 | #thisDatetime = dataOut.datatime |
|
257 |
thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[ |
|
|
258 | thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[0]) | |
|
258 | 259 | title = wintitle + " RTI: %s" %(thisDatetime.strftime("%d-%b-%Y")) |
|
259 | 260 | xlabel = "Local Time" |
|
260 | 261 | ylabel = "Intensity (dB)" |
@@ -310,11 +311,12 class RTIfromSpectraHeis(Figure): | |||
|
310 | 311 | del self.ydata |
|
311 | 312 | self.__isConfig = False |
|
312 | 313 | |
|
313 | if self.figfile == None: | |
|
314 | str_datetime = thisDatetime.strftime("%Y%m%d_%H%M%S") | |
|
315 | self.figfile = self.getFilename(name = str_datetime) | |
|
316 | ||
|
317 | if figpath != '': | |
|
314 | if save: | |
|
315 | ||
|
316 | if self.figfile == None: | |
|
317 | str_datetime = thisDatetime.strftime("%Y%m%d_%H%M%S") | |
|
318 | self.figfile = self.getFilename(name = str_datetime) | |
|
319 | ||
|
318 | 320 | self.counter_imagwr += 1 |
|
319 | 321 | if (self.counter_imagwr>=wr_period): |
|
320 | 322 | # store png plot to local folder |
@@ -73,7 +73,7 class MomentsPlot(Figure): | |||
|
73 | 73 | |
|
74 | 74 | def run(self, dataOut, id, wintitle="", channelList=None, showprofile=True, |
|
75 | 75 | xmin=None, xmax=None, ymin=None, ymax=None, zmin=None, zmax=None, |
|
76 | save=False, figpath='', figfile=None, show=True, ftp=False, wr_period=1, | |
|
76 | save=False, figpath='./', figfile=None, show=True, ftp=False, wr_period=1, | |
|
77 | 77 | server=None, folder=None, username=None, password=None, |
|
78 | 78 | ftp_wei=0, exp_code=0, sub_exp_code=0, plot_pos=0, realtime=False): |
|
79 | 79 | |
@@ -124,7 +124,7 class MomentsPlot(Figure): | |||
|
124 | 124 | noisedB = 10*numpy.log10(noise) |
|
125 | 125 | |
|
126 | 126 | #thisDatetime = dataOut.datatime |
|
127 |
thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[ |
|
|
127 | thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[0]) | |
|
128 | 128 | title = wintitle + " Parameters" |
|
129 | 129 | xlabel = "Velocity (m/s)" |
|
130 | 130 | ylabel = "Range (Km)" |
@@ -180,20 +180,24 class MomentsPlot(Figure): | |||
|
180 | 180 | |
|
181 | 181 | self.draw() |
|
182 | 182 | |
|
183 |
if |
|
|
184 | str_datetime = thisDatetime.strftime("%Y%m%d_%H%M%S") | |
|
185 | figfile = self.getFilename(name = str_datetime) | |
|
186 | ||
|
187 | if figpath != '': | |
|
183 | if save: | |
|
184 | ||
|
185 | if figfile == None: | |
|
186 | str_datetime = thisDatetime.strftime("%Y%m%d_%H%M%S") | |
|
187 | figfile = self.getFilename(name = str_datetime) | |
|
188 | ||
|
188 | 189 | self.counter_imagwr += 1 |
|
189 | 190 | if (self.counter_imagwr>=wr_period): |
|
190 | 191 | # store png plot to local folder |
|
191 | 192 | self.saveFigure(figpath, figfile) |
|
192 | # store png plot to FTP server according to RT-Web format | |
|
193 | name = self.getNameToFtp(thisDatetime, self.FTP_WEI, self.EXP_CODE, self.SUB_EXP_CODE, self.PLOT_CODE, self.PLOT_POS) | |
|
194 | ftp_filename = os.path.join(figpath, name) | |
|
195 | self.saveFigure(figpath, ftp_filename) | |
|
196 | 193 | self.counter_imagwr = 0 |
|
194 | # store png plot to FTP server according to RT-Web format | |
|
195 | if ftp: | |
|
196 | name = self.getNameToFtp(thisDatetime, self.FTP_WEI, self.EXP_CODE, self.SUB_EXP_CODE, self.PLOT_CODE, self.PLOT_POS) | |
|
197 | ftp_filename = os.path.join(figpath, name) | |
|
198 | self.saveFigure(figpath, ftp_filename) | |
|
199 | ||
|
200 | ||
|
197 | 201 | |
|
198 | 202 | class SkyMapPlot(Figure): |
|
199 | 203 | |
@@ -284,7 +288,7 class SkyMapPlot(Figure): | |||
|
284 | 288 | |
|
285 | 289 | |
|
286 | 290 | #thisDatetime = dataOut.datatime |
|
287 |
thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[ |
|
|
291 | thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[0]) | |
|
288 | 292 | title = wintitle + " Parameters" |
|
289 | 293 | xlabel = "Zonal Zenith Angle (deg) " |
|
290 | 294 | ylabel = "Meridional Zenith Angle (deg)" |
@@ -328,7 +332,9 class SkyMapPlot(Figure): | |||
|
328 | 332 | |
|
329 | 333 | if figfile == None: |
|
330 | 334 | figfile = self.getFilename(name = self.name) |
|
335 | ||
|
331 | 336 | self.saveFigure(figpath, figfile) |
|
337 | self.counter_imagwr = 0 | |
|
332 | 338 | |
|
333 | 339 | if ftp: |
|
334 | 340 | #provisionalmente envia archivos en el formato de la web en tiempo real |
@@ -345,7 +351,7 class SkyMapPlot(Figure): | |||
|
345 | 351 | self.counter_imagwr = 0 |
|
346 | 352 | raise ValueError, 'Error FTP' |
|
347 | 353 | |
|
348 | self.counter_imagwr = 0 | |
|
354 | ||
|
349 | 355 | |
|
350 | 356 | |
|
351 | 357 | class WindProfilerPlot(Figure): |
@@ -417,7 +423,7 class WindProfilerPlot(Figure): | |||
|
417 | 423 | xmin=None, xmax=None, ymin=None, ymax=None, zmin=None, zmax=None, |
|
418 | 424 | zmax_ver = None, zmin_ver = None, SNRmin = None, SNRmax = None, |
|
419 | 425 | timerange=None, SNRthresh = None, |
|
420 | save=False, figpath='', lastone=0,figfile=None, ftp=False, wr_period=1, show=True, | |
|
426 | save=False, figpath='./', lastone=0,figfile=None, ftp=False, wr_period=1, show=True, | |
|
421 | 427 | server=None, folder=None, username=None, password=None, |
|
422 | 428 | ftp_wei=0, exp_code=0, sub_exp_code=0, plot_pos=0): |
|
423 | 429 | """ |
@@ -477,7 +483,7 class WindProfilerPlot(Figure): | |||
|
477 | 483 | |
|
478 | 484 | showprofile = False |
|
479 | 485 | # thisDatetime = dataOut.datatime |
|
480 |
thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[ |
|
|
486 | thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[0]) | |
|
481 | 487 | title = wintitle + "Wind" |
|
482 | 488 | xlabel = "" |
|
483 | 489 | ylabel = "Range (Km)" |
@@ -553,22 +559,26 class WindProfilerPlot(Figure): | |||
|
553 | 559 | |
|
554 | 560 | self.draw() |
|
555 | 561 | |
|
556 | if self.figfile == None: | |
|
557 | str_datetime = thisDatetime.strftime("%Y%m%d_%H%M%S") | |
|
558 | self.figfile = self.getFilename(name = str_datetime) | |
|
559 | ||
|
560 | if figpath != '': | |
|
562 | if save: | |
|
561 | 563 | |
|
564 | if self.figfile == None: | |
|
565 | str_datetime = thisDatetime.strftime("%Y%m%d_%H%M%S") | |
|
566 | self.figfile = self.getFilename(name = str_datetime) | |
|
567 | ||
|
562 | 568 | self.counter_imagwr += 1 |
|
569 | ||
|
563 | 570 | if (self.counter_imagwr>=wr_period): |
|
564 | 571 | # store png plot to local folder |
|
565 | 572 | self.saveFigure(figpath, self.figfile) |
|
566 | # store png plot to FTP server according to RT-Web format | |
|
567 | name = self.getNameToFtp(thisDatetime, self.FTP_WEI, self.EXP_CODE, self.SUB_EXP_CODE, self.PLOT_CODE, self.PLOT_POS) | |
|
568 | ftp_filename = os.path.join(figpath, name) | |
|
569 | self.saveFigure(figpath, ftp_filename) | |
|
570 | ||
|
571 | 573 | self.counter_imagwr = 0 |
|
574 | ||
|
575 | if ftp: | |
|
576 | # store png plot to FTP server according to RT-Web format | |
|
577 | name = self.getNameToFtp(thisDatetime, self.FTP_WEI, self.EXP_CODE, self.SUB_EXP_CODE, self.PLOT_CODE, self.PLOT_POS) | |
|
578 | ftp_filename = os.path.join(figpath, name) | |
|
579 | self.saveFigure(figpath, ftp_filename) | |
|
580 | ||
|
581 | ||
|
572 | 582 | |
|
573 | 583 | if x[1] >= self.axesList[0].xmax: |
|
574 | 584 | self.counter_imagwr = wr_period |
@@ -651,9 +661,8 class ParametersPlot(Figure): | |||
|
651 | 661 | xmin=None, xmax=None, ymin=None, ymax=None, zmin=None, zmax=None,timerange=None, |
|
652 | 662 | parameterIndex = None, onlyPositive = False, |
|
653 | 663 | SNRthresh = -numpy.inf, SNR = True, SNRmin = None, SNRmax = None, |
|
654 | ||
|
655 | 664 | zlabel = "", parameterName = "", parameterObject = "data_param", |
|
656 | save=False, figpath='', lastone=0,figfile=None, ftp=False, wr_period=1, show=True, | |
|
665 | save=False, figpath='./', lastone=0,figfile=None, ftp=False, wr_period=1, show=True, | |
|
657 | 666 | server=None, folder=None, username=None, password=None, |
|
658 | 667 | ftp_wei=0, exp_code=0, sub_exp_code=0, plot_pos=0): |
|
659 | 668 | |
@@ -704,7 +713,7 class ParametersPlot(Figure): | |||
|
704 | 713 | ind = numpy.where(SNRdB < 10**(SNRthresh/10)) |
|
705 | 714 | z[ind] = numpy.nan |
|
706 | 715 | |
|
707 |
thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[ |
|
|
716 | thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[0]) | |
|
708 | 717 | title = wintitle + " Parameters Plot" #: %s" %(thisDatetime.strftime("%d-%b-%Y")) |
|
709 | 718 | xlabel = "" |
|
710 | 719 | ylabel = "Range (Km)" |
@@ -775,25 +784,27 class ParametersPlot(Figure): | |||
|
775 | 784 | |
|
776 | 785 | |
|
777 | 786 | |
|
778 |
self.draw() |
|
|
779 | ||
|
780 | if self.figfile == None: | |
|
781 | str_datetime = thisDatetime.strftime("%Y%m%d_%H%M%S") | |
|
782 | self.figfile = self.getFilename(name = str_datetime) | |
|
787 | self.draw() | |
|
783 | 788 | |
|
784 |
if |
|
|
789 | if save: | |
|
785 | 790 | |
|
791 | if self.figfile == None: | |
|
792 | str_datetime = thisDatetime.strftime("%Y%m%d_%H%M%S") | |
|
793 | self.figfile = self.getFilename(name = str_datetime) | |
|
794 | ||
|
786 | 795 | self.counter_imagwr += 1 |
|
796 | ||
|
787 | 797 | if (self.counter_imagwr>=wr_period): |
|
788 | # store png plot to local folder | |
|
798 | # store png plot to local folder | |
|
789 | 799 | self.saveFigure(figpath, self.figfile) |
|
790 | # store png plot to FTP server according to RT-Web format | |
|
791 | name = self.getNameToFtp(thisDatetime, self.FTP_WEI, self.EXP_CODE, self.SUB_EXP_CODE, self.PLOT_CODE, self.PLOT_POS) | |
|
792 | ftp_filename = os.path.join(figpath, name) | |
|
793 | self.saveFigure(figpath, ftp_filename) | |
|
794 | ||
|
795 | 800 | self.counter_imagwr = 0 |
|
796 | ||
|
801 | ||
|
802 | if ftp: | |
|
803 | # store png plot to FTP server according to RT-Web format | |
|
804 | name = self.getNameToFtp(thisDatetime, self.FTP_WEI, self.EXP_CODE, self.SUB_EXP_CODE, self.PLOT_CODE, self.PLOT_POS) | |
|
805 | ftp_filename = os.path.join(figpath, name) | |
|
806 | self.saveFigure(figpath, ftp_filename) | |
|
807 | ||
|
797 | 808 | if x[1] >= self.axesList[0].xmax: |
|
798 | 809 | self.counter_imagwr = wr_period |
|
799 | 810 | self.__isConfig = False |
@@ -923,7 +934,7 class SpectralFittingPlot(Figure): | |||
|
923 | 934 | |
|
924 | 935 | zdB = 10*numpy.log10(z) |
|
925 | 936 | #thisDatetime = dataOut.datatime |
|
926 |
thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[ |
|
|
937 | thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[0]) | |
|
927 | 938 | title = wintitle + " Doppler Spectra: %s" %(thisDatetime.strftime("%d-%b-%Y %H:%M:%S")) |
|
928 | 939 | xlabel = "Velocity (m/s)" |
|
929 | 940 | ylabel = "Spectrum" |
@@ -1044,7 +1055,7 class EWDriftsPlot(Figure): | |||
|
1044 | 1055 | xmin=None, xmax=None, ymin=None, ymax=None, zmin=None, zmax=None, |
|
1045 | 1056 | zmaxVertical = None, zminVertical = None, zmaxZonal = None, zminZonal = None, |
|
1046 | 1057 | timerange=None, SNRthresh = -numpy.inf, SNRmin = None, SNRmax = None, SNR_1 = False, |
|
1047 | save=False, figpath='', lastone=0,figfile=None, ftp=False, wr_period=1, show=True, | |
|
1058 | save=False, figpath='./', lastone=0,figfile=None, ftp=False, wr_period=1, show=True, | |
|
1048 | 1059 | server=None, folder=None, username=None, password=None, |
|
1049 | 1060 | ftp_wei=0, exp_code=0, sub_exp_code=0, plot_pos=0): |
|
1050 | 1061 | """ |
@@ -1171,23 +1182,25 class EWDriftsPlot(Figure): | |||
|
1171 | 1182 | ticksize=9, cblabel='', cbsize="1%", colormap="jet") |
|
1172 | 1183 | |
|
1173 | 1184 | self.draw() |
|
1174 | ||
|
1175 | if self.figfile == None: | |
|
1176 | str_datetime = thisDatetime.strftime("%Y%m%d_%H%M%S") | |
|
1177 | self.figfile = self.getFilename(name = str_datetime) | |
|
1178 | 1185 | |
|
1179 |
if |
|
|
1186 | if save: | |
|
1180 | 1187 | |
|
1188 | if self.figfile == None: | |
|
1189 | str_datetime = thisDatetime.strftime("%Y%m%d_%H%M%S") | |
|
1190 | self.figfile = self.getFilename(name = str_datetime) | |
|
1191 | ||
|
1181 | 1192 | self.counter_imagwr += 1 |
|
1193 | ||
|
1182 | 1194 | if (self.counter_imagwr>=wr_period): |
|
1183 | # store png plot to local folder | |
|
1195 | # store png plot to local folder | |
|
1184 | 1196 | self.saveFigure(figpath, self.figfile) |
|
1185 | # store png plot to FTP server according to RT-Web format | |
|
1186 | name = self.getNameToFtp(thisDatetime, self.FTP_WEI, self.EXP_CODE, self.SUB_EXP_CODE, self.PLOT_CODE, self.PLOT_POS) | |
|
1187 | ftp_filename = os.path.join(figpath, name) | |
|
1188 | self.saveFigure(figpath, ftp_filename) | |
|
1189 | ||
|
1190 | 1197 | self.counter_imagwr = 0 |
|
1198 | ||
|
1199 | if ftp: | |
|
1200 | # store png plot to FTP server according to RT-Web format | |
|
1201 | name = self.getNameToFtp(thisDatetime, self.FTP_WEI, self.EXP_CODE, self.SUB_EXP_CODE, self.PLOT_CODE, self.PLOT_POS) | |
|
1202 | ftp_filename = os.path.join(figpath, name) | |
|
1203 | self.saveFigure(figpath, ftp_filename) | |
|
1191 | 1204 | |
|
1192 | 1205 | if x[1] >= self.axesList[0].xmax: |
|
1193 | 1206 | self.counter_imagwr = wr_period |
@@ -1,5 +1,7 | |||
|
1 | 1 | ''' |
|
2 | @author: Daniel Suarez | |
|
2 | Created on Jul 9, 2014 | |
|
3 | ||
|
4 | @author: roj-idl71 | |
|
3 | 5 | ''' |
|
4 | 6 | import os |
|
5 | 7 | import datetime |
@@ -33,6 +35,9 class SpectraPlot(Figure): | |||
|
33 | 35 | self.SUB_EXP_CODE = None |
|
34 | 36 | self.PLOT_POS = None |
|
35 | 37 | |
|
38 | self.__xfilter_ena = False | |
|
39 | self.__yfilter_ena = False | |
|
40 | ||
|
36 | 41 | def getSubplots(self): |
|
37 | 42 | |
|
38 | 43 | ncol = int(numpy.sqrt(self.nplots)+0.9) |
@@ -76,7 +81,7 class SpectraPlot(Figure): | |||
|
76 | 81 | |
|
77 | 82 | def run(self, dataOut, id, wintitle="", channelList=None, showprofile=True, |
|
78 | 83 | xmin=None, xmax=None, ymin=None, ymax=None, zmin=None, zmax=None, |
|
79 | save=False, figpath='', figfile=None, show=True, ftp=False, wr_period=1, | |
|
84 | save=False, figpath='./', figfile=None, show=True, ftp=False, wr_period=1, | |
|
80 | 85 | server=None, folder=None, username=None, password=None, |
|
81 | 86 | ftp_wei=0, exp_code=0, sub_exp_code=0, plot_pos=0, realtime=False): |
|
82 | 87 | |
@@ -96,9 +101,6 class SpectraPlot(Figure): | |||
|
96 | 101 | zmax : None |
|
97 | 102 | """ |
|
98 | 103 | |
|
99 | if dataOut.flagNoData: | |
|
100 | return None | |
|
101 | ||
|
102 | 104 | if realtime: |
|
103 | 105 | if not(isRealtime(utcdatatime = dataOut.utctime)): |
|
104 | 106 | print 'Skipping this plot function' |
@@ -120,16 +122,15 class SpectraPlot(Figure): | |||
|
120 | 122 | |
|
121 | 123 | z = dataOut.data_spc[channelIndexList,:,:]/factor |
|
122 | 124 | z = numpy.where(numpy.isfinite(z), z, numpy.NAN) |
|
123 | avg = numpy.average(z, axis=1) | |
|
124 | #avg = numpy.nanmean(z, axis=1) | |
|
125 | noise = dataOut.noise/factor | |
|
126 | ||
|
127 | 125 | zdB = 10*numpy.log10(z) |
|
126 | ||
|
127 | avg = numpy.nanmean(z, axis=1) | |
|
128 | 128 | avgdB = 10*numpy.log10(avg) |
|
129 | ||
|
130 | noise = dataOut.getNoise()/factor | |
|
129 | 131 | noisedB = 10*numpy.log10(noise) |
|
130 | 132 | |
|
131 | #thisDatetime = dataOut.datatime | |
|
132 | thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[1]) | |
|
133 | thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[0]) | |
|
133 | 134 | title = wintitle + " Spectra" |
|
134 | 135 | if ((dataOut.azimuth!=None) and (dataOut.zenith!=None)): |
|
135 | 136 | title = title + '_' + 'azimuth,zenith=%2.2f,%2.2f'%(dataOut.azimuth, dataOut.zenith) |
@@ -151,9 +152,9 class SpectraPlot(Figure): | |||
|
151 | 152 | if xmax == None: xmax = numpy.nanmax(x) |
|
152 | 153 | if ymin == None: ymin = numpy.nanmin(y) |
|
153 | 154 | if ymax == None: ymax = numpy.nanmax(y) |
|
154 |
if zmin == None: zmin = numpy.nanmin( |
|
|
155 |
if zmax == None: zmax = numpy.nanmax(avgdB) |
|
|
156 | ||
|
155 | if zmin == None: zmin = numpy.floor(numpy.nanmin(noisedB)) - 3 | |
|
156 | if zmax == None: zmax = numpy.ceil(numpy.nanmax(avgdB)) + 3 | |
|
157 | ||
|
157 | 158 | self.FTP_WEI = ftp_wei |
|
158 | 159 | self.EXP_CODE = exp_code |
|
159 | 160 | self.SUB_EXP_CODE = sub_exp_code |
@@ -177,7 +178,7 class SpectraPlot(Figure): | |||
|
177 | 178 | |
|
178 | 179 | if self.__showprofile: |
|
179 | 180 | axes = self.axesList[i*self.__nsubplots +1] |
|
180 | axes.pline(avgdB[i], y, | |
|
181 | axes.pline(avgdB[i,:], y, | |
|
181 | 182 | xmin=zmin, xmax=zmax, ymin=ymin, ymax=ymax, |
|
182 | 183 | xlabel='dB', ylabel='', title='', |
|
183 | 184 | ytick_visible=False, |
@@ -187,24 +188,30 class SpectraPlot(Figure): | |||
|
187 | 188 | axes.addpline(noiseline, y, idline=1, color="black", linestyle="dashed", lw=2) |
|
188 | 189 | |
|
189 | 190 | self.draw() |
|
190 | ||
|
191 |
if |
|
|
192 | str_datetime = thisDatetime.strftime("%Y%m%d_%H%M%S") | |
|
193 | figfile = self.getFilename(name = str_datetime) | |
|
194 | name = str_datetime | |
|
195 | if ((dataOut.azimuth!=None) and (dataOut.zenith!=None)): | |
|
196 | name = name + '_az' + '_%2.2f'%(dataOut.azimuth) + '_zn' + '_%2.2f'%(dataOut.zenith) | |
|
197 | figfile = self.getFilename(name) | |
|
198 | if figpath != '': | |
|
191 | ||
|
192 | if save: | |
|
193 | ||
|
194 | if figfile == None: | |
|
195 | str_datetime = thisDatetime.strftime("%Y%m%d_%H%M%S") | |
|
196 | figfile = self.getFilename(name = str_datetime) | |
|
197 | name = str_datetime | |
|
198 | if ((dataOut.azimuth!=None) and (dataOut.zenith!=None)): | |
|
199 | name = name + '_az' + '_%2.2f'%(dataOut.azimuth) + '_zn' + '_%2.2f'%(dataOut.zenith) | |
|
200 | figfile = self.getFilename(name) | |
|
201 | ||
|
199 | 202 | self.counter_imagwr += 1 |
|
203 | ||
|
200 | 204 | if (self.counter_imagwr>=wr_period): |
|
201 | # store png plot to local folder | |
|
205 | # store png plot to local folder | |
|
202 | 206 | self.saveFigure(figpath, figfile) |
|
203 | # store png plot to FTP server according to RT-Web format | |
|
204 | name = self.getNameToFtp(thisDatetime, self.FTP_WEI, self.EXP_CODE, self.SUB_EXP_CODE, self.PLOT_CODE, self.PLOT_POS) | |
|
205 | ftp_filename = os.path.join(figpath, name) | |
|
206 | self.saveFigure(figpath, ftp_filename) | |
|
207 | 207 | self.counter_imagwr = 0 |
|
208 | ||
|
209 | if ftp: | |
|
210 | # store png plot to FTP server according to RT-Web format | |
|
211 | name = self.getNameToFtp(thisDatetime, self.FTP_WEI, self.EXP_CODE, self.SUB_EXP_CODE, self.PLOT_CODE, self.PLOT_POS) | |
|
212 | ftp_filename = os.path.join(figpath, name) | |
|
213 | self.saveFigure(figpath, ftp_filename) | |
|
214 | ||
|
208 | 215 | |
|
209 | 216 | |
|
210 | 217 | class CrossSpectraPlot(Figure): |
@@ -266,7 +273,7 class CrossSpectraPlot(Figure): | |||
|
266 | 273 | |
|
267 | 274 | def run(self, dataOut, id, wintitle="", pairsList=None, |
|
268 | 275 | xmin=None, xmax=None, ymin=None, ymax=None, zmin=None, zmax=None, |
|
269 | save=False, figpath='', figfile=None, ftp=False, wr_period=1, | |
|
276 | save=False, figpath='./', figfile=None, ftp=False, wr_period=1, | |
|
270 | 277 | power_cmap='jet', coherence_cmap='jet', phase_cmap='RdBu_r', show=True, |
|
271 | 278 | server=None, folder=None, username=None, password=None, |
|
272 | 279 | ftp_wei=0, exp_code=0, sub_exp_code=0, plot_pos=0): |
@@ -293,7 +300,7 class CrossSpectraPlot(Figure): | |||
|
293 | 300 | pairsIndexList = [] |
|
294 | 301 | for pair in pairsList: |
|
295 | 302 | if pair not in dataOut.pairsList: |
|
296 | raise ValueError, "Pair %s is not in dataOut.pairsList" %(pair) | |
|
303 | raise ValueError, "Pair %s is not in dataOut.pairsList" %str(pair) | |
|
297 | 304 | pairsIndexList.append(dataOut.pairsList.index(pair)) |
|
298 | 305 | |
|
299 | 306 | if pairsIndexList == []: |
@@ -305,17 +312,16 class CrossSpectraPlot(Figure): | |||
|
305 | 312 | x = dataOut.getVelRange(1) |
|
306 | 313 | y = dataOut.getHeiRange() |
|
307 | 314 | z = dataOut.data_spc[:,:,:]/factor |
|
308 |
|
|
|
309 | avg = numpy.abs(numpy.average(z, axis=1)) | |
|
315 | z = numpy.where(numpy.isfinite(z), z, numpy.NAN) | |
|
316 | ||
|
310 | 317 | noise = dataOut.noise/factor |
|
311 | 318 | |
|
312 | 319 | zdB = 10*numpy.log10(z) |
|
313 | avgdB = 10*numpy.log10(avg) | |
|
314 | 320 | noisedB = 10*numpy.log10(noise) |
|
315 | 321 | |
|
316 | 322 | |
|
317 | 323 | #thisDatetime = dataOut.datatime |
|
318 |
thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[ |
|
|
324 | thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[0]) | |
|
319 | 325 | title = wintitle + " Cross-Spectra: %s" %(thisDatetime.strftime("%d-%b-%Y %H:%M:%S")) |
|
320 | 326 | xlabel = "Velocity (m/s)" |
|
321 | 327 | ylabel = "Range (Km)" |
@@ -330,12 +336,15 class CrossSpectraPlot(Figure): | |||
|
330 | 336 | showprofile=False, |
|
331 | 337 | show=show) |
|
332 | 338 | |
|
339 | avg = numpy.abs(numpy.average(z, axis=1)) | |
|
340 | avgdB = 10*numpy.log10(avg) | |
|
341 | ||
|
333 | 342 | if xmin == None: xmin = numpy.nanmin(x) |
|
334 | 343 | if xmax == None: xmax = numpy.nanmax(x) |
|
335 | 344 | if ymin == None: ymin = numpy.nanmin(y) |
|
336 | 345 | if ymax == None: ymax = numpy.nanmax(y) |
|
337 |
if zmin == None: zmin = numpy.nanmin( |
|
|
338 |
if zmax == None: zmax = numpy.nanmax(avgdB) |
|
|
346 | if zmin == None: zmin = numpy.floor(numpy.nanmin(noisedB)) - 3 | |
|
347 | if zmax == None: zmax = numpy.ceil(numpy.nanmax(avgdB)) + 3 | |
|
339 | 348 | |
|
340 | 349 | self.FTP_WEI = ftp_wei |
|
341 | 350 | self.EXP_CODE = exp_code |
@@ -388,20 +397,24 class CrossSpectraPlot(Figure): | |||
|
388 | 397 | |
|
389 | 398 | self.draw() |
|
390 | 399 | |
|
391 |
if |
|
|
392 | str_datetime = thisDatetime.strftime("%Y%m%d_%H%M%S") | |
|
393 | figfile = self.getFilename(name = str_datetime) | |
|
394 | ||
|
395 | if figpath != '': | |
|
400 | if save != '': | |
|
401 | ||
|
402 | if figfile == None: | |
|
403 | str_datetime = thisDatetime.strftime("%Y%m%d_%H%M%S") | |
|
404 | figfile = self.getFilename(name = str_datetime) | |
|
405 | ||
|
396 | 406 | self.counter_imagwr += 1 |
|
407 | ||
|
397 | 408 | if (self.counter_imagwr>=wr_period): |
|
398 | # store png plot to local folder | |
|
409 | # store png plot to local folder | |
|
399 | 410 | self.saveFigure(figpath, figfile) |
|
400 | # store png plot to FTP server according to RT-Web format | |
|
401 | name = self.getNameToFtp(thisDatetime, self.FTP_WEI, self.EXP_CODE, self.SUB_EXP_CODE, self.PLOT_CODE, self.PLOT_POS) | |
|
402 | ftp_filename = os.path.join(figpath, name) | |
|
403 | self.saveFigure(figpath, ftp_filename) | |
|
404 | 411 | self.counter_imagwr = 0 |
|
412 | ||
|
413 | if ftp: | |
|
414 | # store png plot to FTP server according to RT-Web format | |
|
415 | name = self.getNameToFtp(thisDatetime, self.FTP_WEI, self.EXP_CODE, self.SUB_EXP_CODE, self.PLOT_CODE, self.PLOT_POS) | |
|
416 | ftp_filename = os.path.join(figpath, name) | |
|
417 | self.saveFigure(figpath, ftp_filename) | |
|
405 | 418 | |
|
406 | 419 | |
|
407 | 420 | class RTIPlot(Figure): |
@@ -482,7 +495,7 class RTIPlot(Figure): | |||
|
482 | 495 | def run(self, dataOut, id, wintitle="", channelList=None, showprofile='True', |
|
483 | 496 | xmin=None, xmax=None, ymin=None, ymax=None, zmin=None, zmax=None, |
|
484 | 497 | timerange=None, |
|
485 | save=False, figpath='', lastone=0,figfile=None, ftp=False, wr_period=1, show=True, | |
|
498 | save=False, figpath='./', lastone=0,figfile=None, ftp=False, wr_period=1, show=True, | |
|
486 | 499 | server=None, folder=None, username=None, password=None, |
|
487 | 500 | ftp_wei=0, exp_code=0, sub_exp_code=0, plot_pos=0): |
|
488 | 501 | |
@@ -511,8 +524,8 class RTIPlot(Figure): | |||
|
511 | 524 | raise ValueError, "Channel %d is not in dataOut.channelList" |
|
512 | 525 | channelIndexList.append(dataOut.channelList.index(channel)) |
|
513 | 526 | |
|
514 | if timerange != None: | |
|
515 | self.timerange = timerange | |
|
527 | # if timerange != None: | |
|
528 | # self.timerange = timerange | |
|
516 | 529 | |
|
517 | 530 | #tmin = None |
|
518 | 531 | #tmax = None |
@@ -528,7 +541,7 class RTIPlot(Figure): | |||
|
528 | 541 | |
|
529 | 542 | |
|
530 | 543 | # thisDatetime = dataOut.datatime |
|
531 |
thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[ |
|
|
544 | thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[0]) | |
|
532 | 545 | title = wintitle + " RTI" #: %s" %(thisDatetime.strftime("%d-%b-%Y")) |
|
533 | 546 | xlabel = "" |
|
534 | 547 | ylabel = "Range (Km)" |
@@ -543,18 +556,18 class RTIPlot(Figure): | |||
|
543 | 556 | showprofile=showprofile, |
|
544 | 557 | show=show) |
|
545 | 558 | |
|
546 | self.xmin, self.xmax = self.getTimeLim(x, xmin, xmax, timerange) | |
|
547 | ||
|
548 | # if timerange != None: | |
|
549 | # self.timerange = timerange | |
|
550 | # self.xmin, self.tmax = self.getTimeLim(x, xmin, xmax, timerange) | |
|
559 | if timerange != None: | |
|
560 | self.timerange = timerange | |
|
551 | 561 | |
|
562 | self.xmin, self.xmax = self.getTimeLim(x, xmin, xmax, timerange) | |
|
552 | 563 | |
|
564 | noise = dataOut.noise/factor | |
|
565 | noisedB = 10*numpy.log10(noise) | |
|
553 | 566 | |
|
554 | 567 | if ymin == None: ymin = numpy.nanmin(y) |
|
555 | 568 | if ymax == None: ymax = numpy.nanmax(y) |
|
556 |
if zmin == None: zmin = numpy.nanmin( |
|
|
557 |
if zmax == None: zmax = numpy.nanmax(avgdB) |
|
|
569 | if zmin == None: zmin = numpy.floor(numpy.nanmin(noisedB)) - 3 | |
|
570 | if zmax == None: zmax = numpy.ceil(numpy.nanmax(avgdB)) + 3 | |
|
558 | 571 | |
|
559 | 572 | self.FTP_WEI = ftp_wei |
|
560 | 573 | self.EXP_CODE = exp_code |
@@ -591,22 +604,24 class RTIPlot(Figure): | |||
|
591 | 604 | |
|
592 | 605 | self.draw() |
|
593 | 606 | |
|
594 | if self.figfile == None: | |
|
595 | str_datetime = thisDatetime.strftime("%Y%m%d_%H%M%S") | |
|
596 | self.figfile = self.getFilename(name = str_datetime) | |
|
597 | ||
|
598 | if figpath != '': | |
|
607 | if save: | |
|
608 | ||
|
609 | if self.figfile == None: | |
|
610 | str_datetime = thisDatetime.strftime("%Y%m%d_%H%M%S") | |
|
611 | self.figfile = self.getFilename(name = str_datetime) | |
|
599 | 612 | |
|
600 | 613 | self.counter_imagwr += 1 |
|
614 | ||
|
601 | 615 | if (self.counter_imagwr>=wr_period): |
|
602 | # store png plot to local folder | |
|
616 | # store png plot to local folder | |
|
603 | 617 | self.saveFigure(figpath, self.figfile) |
|
604 | # store png plot to FTP server according to RT-Web format | |
|
605 | name = self.getNameToFtp(thisDatetime, self.FTP_WEI, self.EXP_CODE, self.SUB_EXP_CODE, self.PLOT_CODE, self.PLOT_POS) | |
|
606 | ftp_filename = os.path.join(figpath, name) | |
|
607 | self.saveFigure(figpath, ftp_filename) | |
|
608 | ||
|
609 | 618 | self.counter_imagwr = 0 |
|
619 | ||
|
620 | if ftp: | |
|
621 | # store png plot to FTP server according to RT-Web format | |
|
622 | name = self.getNameToFtp(thisDatetime, self.FTP_WEI, self.EXP_CODE, self.SUB_EXP_CODE, self.PLOT_CODE, self.PLOT_POS) | |
|
623 | ftp_filename = os.path.join(figpath, name) | |
|
624 | self.saveFigure(figpath, ftp_filename) | |
|
610 | 625 | |
|
611 | 626 | if x[1] >= self.axesList[0].xmax: |
|
612 | 627 | self.counter_imagwr = wr_period |
@@ -678,7 +693,7 class CoherenceMap(Figure): | |||
|
678 | 693 | def run(self, dataOut, id, wintitle="", pairsList=None, showprofile='True', |
|
679 | 694 | xmin=None, xmax=None, ymin=None, ymax=None, zmin=None, zmax=None, |
|
680 | 695 | timerange=None, |
|
681 | save=False, figpath='', figfile=None, ftp=False, wr_period=1, | |
|
696 | save=False, figpath='./', figfile=None, ftp=False, wr_period=1, | |
|
682 | 697 | coherence_cmap='jet', phase_cmap='RdBu_r', show=True, |
|
683 | 698 | server=None, folder=None, username=None, password=None, |
|
684 | 699 | ftp_wei=0, exp_code=0, sub_exp_code=0, plot_pos=0): |
@@ -692,9 +707,6 class CoherenceMap(Figure): | |||
|
692 | 707 | raise ValueError, "Pair %s is not in dataOut.pairsList" %(pair) |
|
693 | 708 | pairsIndexList.append(dataOut.pairsList.index(pair)) |
|
694 | 709 | |
|
695 | if timerange != None: | |
|
696 | self.timerange = timerange | |
|
697 | ||
|
698 | 710 | if pairsIndexList == []: |
|
699 | 711 | return |
|
700 | 712 | |
@@ -707,7 +719,7 class CoherenceMap(Figure): | |||
|
707 | 719 | y = dataOut.getHeiRange() |
|
708 | 720 | |
|
709 | 721 | #thisDatetime = dataOut.datatime |
|
710 |
thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[ |
|
|
722 | thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[0]) | |
|
711 | 723 | title = wintitle + " CoherenceMap" #: %s" %(thisDatetime.strftime("%d-%b-%Y")) |
|
712 | 724 | xlabel = "" |
|
713 | 725 | ylabel = "Range (Km)" |
@@ -720,7 +732,8 class CoherenceMap(Figure): | |||
|
720 | 732 | showprofile=showprofile, |
|
721 | 733 | show=show) |
|
722 | 734 | |
|
723 | #tmin, tmax = self.getTimeLim(x, xmin, xmax) | |
|
735 | if timerange != None: | |
|
736 | self.timerange = timerange | |
|
724 | 737 | |
|
725 | 738 | self.xmin, self.xmax = self.getTimeLim(x, xmin, xmax, timerange) |
|
726 | 739 | |
@@ -803,22 +816,24 class CoherenceMap(Figure): | |||
|
803 | 816 | self.counter_imagwr = wr_period |
|
804 | 817 | self.__isConfig = False |
|
805 | 818 | |
|
806 |
if |
|
|
807 | str_datetime = thisDatetime.strftime("%Y%m%d_%H%M%S") | |
|
808 | figfile = self.getFilename(name = str_datetime) | |
|
809 | ||
|
810 | if figpath != '': | |
|
811 | ||
|
819 | if save: | |
|
820 | ||
|
821 | if figfile == None: | |
|
822 | str_datetime = thisDatetime.strftime("%Y%m%d_%H%M%S") | |
|
823 | figfile = self.getFilename(name = str_datetime) | |
|
824 | ||
|
812 | 825 | self.counter_imagwr += 1 |
|
826 | ||
|
813 | 827 | if (self.counter_imagwr>=wr_period): |
|
814 | # store png plot to local folder | |
|
828 | # store png plot to local folder | |
|
815 | 829 | self.saveFigure(figpath, figfile) |
|
816 | # store png plot to FTP server according to RT-Web format | |
|
817 | name = self.getNameToFtp(thisDatetime, self.FTP_WEI, self.EXP_CODE, self.SUB_EXP_CODE, self.PLOT_CODE, self.PLOT_POS) | |
|
818 | ftp_filename = os.path.join(figpath, name) | |
|
819 | self.saveFigure(figpath, ftp_filename) | |
|
820 | ||
|
821 | 830 | self.counter_imagwr = 0 |
|
831 | ||
|
832 | if ftp: | |
|
833 | # store png plot to FTP server according to RT-Web format | |
|
834 | name = self.getNameToFtp(thisDatetime, self.FTP_WEI, self.EXP_CODE, self.SUB_EXP_CODE, self.PLOT_CODE, self.PLOT_POS) | |
|
835 | ftp_filename = os.path.join(figpath, name) | |
|
836 | self.saveFigure(figpath, ftp_filename) | |
|
822 | 837 | |
|
823 | 838 | class PowerProfile(Figure): |
|
824 | 839 | isConfig = None |
@@ -864,11 +879,10 class PowerProfile(Figure): | |||
|
864 | 879 | |
|
865 | 880 | def run(self, dataOut, id, wintitle="", channelList=None, |
|
866 | 881 | xmin=None, xmax=None, ymin=None, ymax=None, |
|
867 |
save=False, figpath='', figfile=None, show=True, |
|
|
868 | server=None, folder=None, username=None, password=None,): | |
|
882 | save=False, figpath='./', figfile=None, show=True, | |
|
883 | ftp=False, wr_period=1, server=None, | |
|
884 | folder=None, username=None, password=None): | |
|
869 | 885 | |
|
870 | if dataOut.flagNoData: | |
|
871 | return None | |
|
872 | 886 | |
|
873 | 887 | if channelList == None: |
|
874 | 888 | channelIndexList = dataOut.channelIndexList |
@@ -880,10 +894,7 class PowerProfile(Figure): | |||
|
880 | 894 | raise ValueError, "Channel %d is not in dataOut.channelList" |
|
881 | 895 | channelIndexList.append(dataOut.channelList.index(channel)) |
|
882 | 896 | |
|
883 | try: | |
|
884 | factor = dataOut.normFactor | |
|
885 | except: | |
|
886 | factor = 1 | |
|
897 | factor = dataOut.normFactor | |
|
887 | 898 | |
|
888 | 899 | y = dataOut.getHeiRange() |
|
889 | 900 | |
@@ -902,7 +913,7 class PowerProfile(Figure): | |||
|
902 | 913 | |
|
903 | 914 | xdB = 10*numpy.log10(x) |
|
904 | 915 | |
|
905 |
thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[ |
|
|
916 | thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[0]) | |
|
906 | 917 | title = wintitle + " Power Profile %s" %(thisDatetime.strftime("%d-%b-%Y")) |
|
907 | 918 | xlabel = "dB" |
|
908 | 919 | ylabel = "Range (Km)" |
@@ -919,7 +930,7 class PowerProfile(Figure): | |||
|
919 | 930 | if ymin == None: ymin = numpy.nanmin(y) |
|
920 | 931 | if ymax == None: ymax = numpy.nanmax(y) |
|
921 | 932 | if xmin == None: xmin = numpy.nanmin(xdB)*0.9 |
|
922 |
if xmax == None: xmax = numpy.nanmax(xdB)* |
|
|
933 | if xmax == None: xmax = numpy.nanmax(xdB)*1.1 | |
|
923 | 934 | |
|
924 | 935 | self.__isConfig = True |
|
925 | 936 | |
@@ -937,23 +948,19 class PowerProfile(Figure): | |||
|
937 | 948 | |
|
938 | 949 | self.draw() |
|
939 | 950 | |
|
940 |
if |
|
|
941 | str_datetime = thisDatetime.strftime("%Y%m%d_%H%M%S") | |
|
942 | figfile = self.getFilename(name = str_datetime) | |
|
943 | ||
|
944 | if figpath != '': | |
|
951 | if save: | |
|
952 | ||
|
953 | if figfile == None: | |
|
954 | str_datetime = thisDatetime.strftime("%Y%m%d_%H%M%S") | |
|
955 | figfile = self.getFilename(name = str_datetime) | |
|
956 | ||
|
945 | 957 | self.counter_imagwr += 1 |
|
958 | ||
|
946 | 959 | if (self.counter_imagwr>=wr_period): |
|
947 | # store png plot to local folder | |
|
960 | # store png plot to local folder | |
|
948 | 961 | self.saveFigure(figpath, figfile) |
|
949 | # store png plot to FTP server according to RT-Web format | |
|
950 | #name = self.getNameToFtp(thisDatetime, self.FTP_WEI, self.EXP_CODE, self.SUB_EXP_CODE, self.PLOT_CODE, self.PLOT_POS) | |
|
951 | #ftp_filename = os.path.join(figpath, name) | |
|
952 | #self.saveFigure(figpath, ftp_filename) | |
|
953 | 962 | self.counter_imagwr = 0 |
|
954 | 963 | |
|
955 | ||
|
956 | ||
|
957 | 964 | class Noise(Figure): |
|
958 | 965 | |
|
959 | 966 | isConfig = None |
@@ -981,6 +988,9 class Noise(Figure): | |||
|
981 | 988 | self.PLOT_POS = None |
|
982 | 989 | self.figfile = None |
|
983 | 990 | |
|
991 | self.xmin = None | |
|
992 | self.xmax = None | |
|
993 | ||
|
984 | 994 | def getSubplots(self): |
|
985 | 995 | |
|
986 | 996 | ncol = 1 |
@@ -1031,7 +1041,7 class Noise(Figure): | |||
|
1031 | 1041 | def run(self, dataOut, id, wintitle="", channelList=None, showprofile='True', |
|
1032 | 1042 | xmin=None, xmax=None, ymin=None, ymax=None, |
|
1033 | 1043 | timerange=None, |
|
1034 | save=False, figpath='', figfile=None, show=True, ftp=False, wr_period=1, | |
|
1044 | save=False, figpath='./', figfile=None, show=True, ftp=False, wr_period=1, | |
|
1035 | 1045 | server=None, folder=None, username=None, password=None, |
|
1036 | 1046 | ftp_wei=0, exp_code=0, sub_exp_code=0, plot_pos=0): |
|
1037 | 1047 | |
@@ -1045,19 +1055,14 class Noise(Figure): | |||
|
1045 | 1055 | raise ValueError, "Channel %d is not in dataOut.channelList" |
|
1046 | 1056 | channelIndexList.append(dataOut.channelList.index(channel)) |
|
1047 | 1057 | |
|
1048 | if timerange != None: | |
|
1049 | self.timerange = timerange | |
|
1050 | ||
|
1051 | tmin = None | |
|
1052 | tmax = None | |
|
1053 | 1058 | x = dataOut.getTimeRange() |
|
1054 | y = dataOut.getHeiRange() | |
|
1059 | #y = dataOut.getHeiRange() | |
|
1055 | 1060 | factor = dataOut.normFactor |
|
1056 | 1061 | noise = dataOut.noise/factor |
|
1057 | 1062 | noisedB = 10*numpy.log10(noise) |
|
1058 | 1063 | |
|
1059 | 1064 | #thisDatetime = dataOut.datatime |
|
1060 |
thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[ |
|
|
1065 | thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[0]) | |
|
1061 | 1066 | title = wintitle + " Noise" # : %s" %(thisDatetime.strftime("%d-%b-%Y")) |
|
1062 | 1067 | xlabel = "" |
|
1063 | 1068 | ylabel = "Intensity (dB)" |
@@ -1072,8 +1077,12 class Noise(Figure): | |||
|
1072 | 1077 | showprofile=showprofile, |
|
1073 | 1078 | show=show) |
|
1074 | 1079 | |
|
1075 | tmin, tmax = self.getTimeLim(x, xmin, xmax) | |
|
1076 | if ymin == None: ymin = numpy.nanmin(noisedB) - 10.0 | |
|
1080 | if timerange != None: | |
|
1081 | self.timerange = timerange | |
|
1082 | ||
|
1083 | self.xmin, self.xmax = self.getTimeLim(x, xmin, xmax, timerange) | |
|
1084 | ||
|
1085 | if ymin == None: ymin = numpy.floor(numpy.nanmin(noisedB)) - 10.0 | |
|
1077 | 1086 | if ymax == None: ymax = numpy.nanmax(noisedB) + 10.0 |
|
1078 | 1087 | |
|
1079 | 1088 | self.FTP_WEI = ftp_wei |
@@ -1116,7 +1125,7 class Noise(Figure): | |||
|
1116 | 1125 | |
|
1117 | 1126 | |
|
1118 | 1127 | axes.pmultilineyaxis(x=self.xdata, y=self.ydata, |
|
1119 |
xmin= |
|
|
1128 | xmin=self.xmin, xmax=self.xmax, ymin=ymin, ymax=ymax, | |
|
1120 | 1129 | xlabel=xlabel, ylabel=ylabel, title=title, legendlabels=legendlabels, marker='x', markersize=8, linestyle="solid", |
|
1121 | 1130 | XAxisAsTime=True, grid='both' |
|
1122 | 1131 | ) |
@@ -1129,20 +1138,24 class Noise(Figure): | |||
|
1129 | 1138 | del self.ydata |
|
1130 | 1139 | self.__isConfig = False |
|
1131 | 1140 | |
|
1132 |
if s |
|
|
1133 | str_datetime = thisDatetime.strftime("%Y%m%d_%H%M%S") | |
|
1134 | self.figfile = self.getFilename(name = str_datetime) | |
|
1135 | ||
|
1136 | if figpath != '': | |
|
1141 | if save != '': | |
|
1142 | ||
|
1143 | if self.figfile == None: | |
|
1144 | str_datetime = thisDatetime.strftime("%Y%m%d_%H%M%S") | |
|
1145 | self.figfile = self.getFilename(name = str_datetime) | |
|
1146 | ||
|
1137 | 1147 | self.counter_imagwr += 1 |
|
1148 | ||
|
1138 | 1149 | if (self.counter_imagwr>=wr_period): |
|
1139 | # store png plot to local folder | |
|
1150 | # store png plot to local folder | |
|
1140 | 1151 | self.saveFigure(figpath, self.figfile) |
|
1141 | # store png plot to FTP server according to RT-Web format | |
|
1142 | name = self.getNameToFtp(thisDatetime, self.FTP_WEI, self.EXP_CODE, self.SUB_EXP_CODE, self.PLOT_CODE, self.PLOT_POS) | |
|
1143 | ftp_filename = os.path.join(figpath, name) | |
|
1144 | self.saveFigure(figpath, ftp_filename) | |
|
1145 | 1152 | self.counter_imagwr = 0 |
|
1153 | ||
|
1154 | if ftp: | |
|
1155 | # store png plot to FTP server according to RT-Web format | |
|
1156 | name = self.getNameToFtp(thisDatetime, self.FTP_WEI, self.EXP_CODE, self.SUB_EXP_CODE, self.PLOT_CODE, self.PLOT_POS) | |
|
1157 | ftp_filename = os.path.join(figpath, name) | |
|
1158 | self.saveFigure(figpath, ftp_filename) | |
|
1146 | 1159 | |
|
1147 | 1160 | |
|
1148 | 1161 | class BeaconPhase(Figure): |
@@ -1175,6 +1188,9 class BeaconPhase(Figure): | |||
|
1175 | 1188 | |
|
1176 | 1189 | self.figfile = None |
|
1177 | 1190 | |
|
1191 | self.xmin = None | |
|
1192 | self.xmax = None | |
|
1193 | ||
|
1178 | 1194 | def getSubplots(self): |
|
1179 | 1195 | |
|
1180 | 1196 | ncol = 1 |
@@ -1224,7 +1240,7 class BeaconPhase(Figure): | |||
|
1224 | 1240 | def run(self, dataOut, id, wintitle="", pairsList=None, showprofile='True', |
|
1225 | 1241 | xmin=None, xmax=None, ymin=None, ymax=None, |
|
1226 | 1242 | timerange=None, |
|
1227 | save=False, figpath='', figfile=None, show=True, ftp=False, wr_period=1, | |
|
1243 | save=False, figpath='./', figfile=None, show=True, ftp=False, wr_period=1, | |
|
1228 | 1244 | server=None, folder=None, username=None, password=None, |
|
1229 | 1245 | ftp_wei=0, exp_code=0, sub_exp_code=0, plot_pos=0): |
|
1230 | 1246 | |
@@ -1242,18 +1258,13 class BeaconPhase(Figure): | |||
|
1242 | 1258 | |
|
1243 | 1259 | # if len(pairsIndexList) > 4: |
|
1244 | 1260 | # pairsIndexList = pairsIndexList[0:4] |
|
1245 | ||
|
1246 | if timerange != None: | |
|
1247 | self.timerange = timerange | |
|
1248 | 1261 | |
|
1249 | tmin = None | |
|
1250 | tmax = None | |
|
1251 | 1262 | x = dataOut.getTimeRange() |
|
1252 | y = dataOut.getHeiRange() | |
|
1263 | #y = dataOut.getHeiRange() | |
|
1253 | 1264 | |
|
1254 | 1265 | |
|
1255 | 1266 | #thisDatetime = dataOut.datatime |
|
1256 |
thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[ |
|
|
1267 | thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[0]) | |
|
1257 | 1268 | title = wintitle + " Phase of Beacon Signal" # : %s" %(thisDatetime.strftime("%d-%b-%Y")) |
|
1258 | 1269 | xlabel = "Local Time" |
|
1259 | 1270 | ylabel = "Phase" |
@@ -1284,7 +1295,11 class BeaconPhase(Figure): | |||
|
1284 | 1295 | showprofile=showprofile, |
|
1285 | 1296 | show=show) |
|
1286 | 1297 | |
|
1287 | tmin, tmax = self.getTimeLim(x, xmin, xmax) | |
|
1298 | if timerange != None: | |
|
1299 | self.timerange = timerange | |
|
1300 | ||
|
1301 | self.xmin, self.xmax = self.getTimeLim(x, xmin, xmax, timerange) | |
|
1302 | ||
|
1288 | 1303 | if ymin == None: ymin = numpy.nanmin(phase_beacon) - 10.0 |
|
1289 | 1304 | if ymax == None: ymax = numpy.nanmax(phase_beacon) + 10.0 |
|
1290 | 1305 | |
@@ -1327,7 +1342,7 class BeaconPhase(Figure): | |||
|
1327 | 1342 | |
|
1328 | 1343 | |
|
1329 | 1344 | axes.pmultilineyaxis(x=self.xdata, y=self.ydata, |
|
1330 |
xmin= |
|
|
1345 | xmin=self.xmin, xmax=self.xmax, ymin=ymin, ymax=ymax, | |
|
1331 | 1346 | xlabel=xlabel, ylabel=ylabel, title=title, legendlabels=legendlabels, marker='x', markersize=8, linestyle="solid", |
|
1332 | 1347 | XAxisAsTime=True, grid='both' |
|
1333 | 1348 | ) |
@@ -1340,17 +1355,21 class BeaconPhase(Figure): | |||
|
1340 | 1355 | del self.ydata |
|
1341 | 1356 | self.__isConfig = False |
|
1342 | 1357 | |
|
1343 | if self.figfile == None: | |
|
1344 | str_datetime = thisDatetime.strftime("%Y%m%d_%H%M%S") | |
|
1345 | self.figfile = self.getFilename(name = str_datetime) | |
|
1346 | ||
|
1347 | if figpath != '': | |
|
1358 | if save: | |
|
1359 | ||
|
1360 | if self.figfile == None: | |
|
1361 | str_datetime = thisDatetime.strftime("%Y%m%d_%H%M%S") | |
|
1362 | self.figfile = self.getFilename(name = str_datetime) | |
|
1363 | ||
|
1348 | 1364 | self.counter_imagwr += 1 |
|
1365 | ||
|
1349 | 1366 | if (self.counter_imagwr>=wr_period): |
|
1350 | # store png plot to local folder | |
|
1367 | # store png plot to local folder | |
|
1351 | 1368 | self.saveFigure(figpath, self.figfile) |
|
1352 | # store png plot to FTP server according to RT-Web format | |
|
1353 | name = self.getNameToFtp(thisDatetime, self.FTP_WEI, self.EXP_CODE, self.SUB_EXP_CODE, self.PLOT_CODE, self.PLOT_POS) | |
|
1354 | ftp_filename = os.path.join(figpath, name) | |
|
1355 | self.saveFigure(figpath, ftp_filename) | |
|
1356 | 1369 | self.counter_imagwr = 0 |
|
1370 | ||
|
1371 | if ftp: | |
|
1372 | # store png plot to FTP server according to RT-Web format | |
|
1373 | name = self.getNameToFtp(thisDatetime, self.FTP_WEI, self.EXP_CODE, self.SUB_EXP_CODE, self.PLOT_CODE, self.PLOT_POS) | |
|
1374 | ftp_filename = os.path.join(figpath, name) | |
|
1375 | self.saveFigure(figpath, ftp_filename) |
@@ -1,7 +1,9 | |||
|
1 | 1 | ''' |
|
2 | @author: Daniel Suarez | |
|
3 | ''' | |
|
2 | Created on Jul 9, 2014 | |
|
4 | 3 |
|
|
4 | @author: roj-idl71 | |
|
5 | ''' | |
|
6 | import os | |
|
5 | 7 | import datetime |
|
6 | 8 | import numpy |
|
7 | 9 | |
@@ -111,7 +113,7 class Scope(Figure): | |||
|
111 | 113 | def run(self, dataOut, id, wintitle="", channelList=None, |
|
112 | 114 | xmin=None, xmax=None, ymin=None, ymax=None, save=False, |
|
113 | 115 | figpath='./', figfile=None, show=True, wr_period=1, |
|
114 | server=None, folder=None, username=None, password=None, type='power'): | |
|
116 | ftp=False, server=None, folder=None, username=None, password=None, type='power'): | |
|
115 | 117 | |
|
116 | 118 | """ |
|
117 | 119 | |
@@ -125,8 +127,6 class Scope(Figure): | |||
|
125 | 127 | ymin : None, |
|
126 | 128 | ymax : None, |
|
127 | 129 | """ |
|
128 | if dataOut.flagNoData: | |
|
129 | return None | |
|
130 | 130 | |
|
131 | 131 | if channelList == None: |
|
132 | 132 | channelIndexList = dataOut.channelIndexList |
@@ -141,7 +141,7 class Scope(Figure): | |||
|
141 | 141 | y = dataOut.data[channelIndexList,:] * numpy.conjugate(dataOut.data[channelIndexList,:]) |
|
142 | 142 | y = y.real |
|
143 | 143 | |
|
144 |
thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[ |
|
|
144 | thisDatetime = datetime.datetime.utcfromtimestamp(dataOut.getTimeRange()[0]) | |
|
145 | 145 | |
|
146 | 146 | if type == "power": |
|
147 | 147 | self.plot_power(dataOut.heightList, |
@@ -216,11 +216,8 def createPcolor(ax, x, y, z, xmin, xmax, ymin, ymax, zmin, zmax, | |||
|
216 | 216 | def pcolor(imesh, z, xlabel='', ylabel='', title=''): |
|
217 | 217 | |
|
218 | 218 | z = z.T |
|
219 | ||
|
220 | 219 | ax = imesh.get_axes() |
|
221 | ||
|
222 | 220 | printLabels(ax, xlabel, ylabel, title) |
|
223 | ||
|
224 | 221 | imesh.set_array(z.ravel()) |
|
225 | 222 | |
|
226 | 223 | def addpcolor(ax, x, y, z, zmin, zmax, xlabel='', ylabel='', title='', colormap='jet'): |
@@ -0,0 +1,30 | |||
|
1 | ''' | |
|
2 | ||
|
3 | $Author: murco $ | |
|
4 | $Id: JRODataIO.py 169 2012-11-19 21:57:03Z murco $ | |
|
5 | ''' | |
|
6 | ||
|
7 | from jroIO_voltage import * | |
|
8 | from jroIO_spectra import * | |
|
9 | from jroIO_heispectra import * | |
|
10 | from jroIO_usrp import * | |
|
11 | ||
|
12 | try: | |
|
13 | from jroIO_usrp_api import * | |
|
14 | except: | |
|
15 | print "jroIO_usrp_api could not be imported" | |
|
16 | ||
|
17 | try: | |
|
18 | from jroIO_amisr import * | |
|
19 | except: | |
|
20 | print "jroIO_amisr could not be imported" | |
|
21 | ||
|
22 | try: | |
|
23 | from jroIO_HDF5 import * | |
|
24 | except: | |
|
25 | print "jroIO_HDF5 could not be imported" | |
|
26 | ||
|
27 | try: | |
|
28 | from jroIO_hf import * | |
|
29 | except: | |
|
30 | print "jroIO_hf could not be imported" No newline at end of file |
@@ -4,9 +4,9 import os | |||
|
4 | 4 | import h5py |
|
5 | 5 | import re |
|
6 | 6 | |
|
7 | from model.data.jrodata import * | |
|
8 | from model.proc.jroproc_base import ProcessingUnit, Operation | |
|
9 | from model.io.jroIO_base import * | |
|
7 | from schainpy.model.data.jrodata import * | |
|
8 | from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation | |
|
9 | from schainpy.model.io.jroIO_base import * | |
|
10 | 10 | |
|
11 | 11 | |
|
12 | 12 | class HDF5Reader(ProcessingUnit): |
@@ -12,8 +12,8 import re | |||
|
12 | 12 | import h5py |
|
13 | 13 | import numpy |
|
14 | 14 | |
|
15 | from model.proc.jroproc_base import ProcessingUnit, Operation | |
|
16 | from model.data.jroamisr import AMISR | |
|
15 | from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation | |
|
16 | from schainpy.model.data.jroamisr import AMISR | |
|
17 | 17 | |
|
18 | 18 | class RadacHeader(): |
|
19 | 19 | def __init__(self, fp): |
@@ -1,5 +1,7 | |||
|
1 | 1 | ''' |
|
2 | Created on Jul 2, 2014 | |
|
2 | 3 | |
|
4 | @author: roj-idl71 | |
|
3 | 5 | ''' |
|
4 | 6 | import os |
|
5 | 7 | import sys |
@@ -11,24 +13,16 import time, datetime | |||
|
11 | 13 | #import h5py |
|
12 | 14 | import traceback |
|
13 | 15 | |
|
14 |
|
|
|
15 | # import pyfits | |
|
16 |
|
|
|
17 | # print "pyfits module has not been imported, it should be installed to save files in fits format" | |
|
18 | ||
|
19 | #from jrodata import * | |
|
20 | #from jroheaderIO import * | |
|
21 | #from jroprocessing import * | |
|
22 | ||
|
23 | #import re | |
|
24 | #from xml.etree.ElementTree import Element, SubElement, ElementTree | |
|
25 | ||
|
26 | ||
|
27 | LOCALTIME = True #-18000 | |
|
16 | try: | |
|
17 | from gevent import sleep | |
|
18 | except: | |
|
19 | from time import sleep | |
|
20 | ||
|
21 | from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader | |
|
28 | 22 | |
|
29 | from model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader | |
|
23 | LOCALTIME = True | |
|
30 | 24 | |
|
31 |
def isNumber( |
|
|
25 | def isNumber(cad): | |
|
32 | 26 | """ |
|
33 | 27 | Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero. |
|
34 | 28 | |
@@ -42,7 +36,7 def isNumber(str): | |||
|
42 | 36 | False : no es un string numerico |
|
43 | 37 | """ |
|
44 | 38 | try: |
|
45 |
float( |
|
|
39 | float( cad ) | |
|
46 | 40 | return True |
|
47 | 41 | except: |
|
48 | 42 | return False |
@@ -308,7 +302,7 class JRODataIO: | |||
|
308 | 302 | |
|
309 | 303 | flagIsNewFile = 1 |
|
310 | 304 | |
|
311 |
flag |
|
|
305 | flagDiscontinuousBlock = 0 | |
|
312 | 306 | |
|
313 | 307 | flagIsNewBlock = 0 |
|
314 | 308 | |
@@ -469,7 +463,6 class JRODataReader(JRODataIO): | |||
|
469 | 463 | fileList = glob.glob1(thisPath, "*%s" %ext) |
|
470 | 464 | if len(fileList) < 1: |
|
471 | 465 | continue |
|
472 | ||
|
473 | 466 | fileList.sort() |
|
474 | 467 | pathDict.setdefault(fileList[0]) |
|
475 | 468 | pathDict[fileList[0]] = i |
@@ -658,7 +651,7 class JRODataReader(JRODataIO): | |||
|
658 | 651 | for nTries in range( tries ): |
|
659 | 652 | if firstTime_flag: |
|
660 | 653 | print "\tWaiting %0.2f sec for the file \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 ) |
|
661 |
|
|
|
654 | sleep( self.delay ) | |
|
662 | 655 | else: |
|
663 | 656 | print "\tSearching next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext) |
|
664 | 657 | |
@@ -748,8 +741,8 class JRODataReader(JRODataIO): | |||
|
748 | 741 | # self.flagEoF = True |
|
749 | 742 | return 0 |
|
750 | 743 | |
|
751 |
print " |
|
|
752 |
|
|
|
744 | print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1) | |
|
745 | sleep( self.delay ) | |
|
753 | 746 | |
|
754 | 747 | |
|
755 | 748 | return 0 |
@@ -771,8 +764,8 class JRODataReader(JRODataIO): | |||
|
771 | 764 | if ( currentSize >= neededSize ): |
|
772 | 765 | return 1 |
|
773 | 766 | |
|
774 |
print " |
|
|
775 |
|
|
|
767 | print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1) | |
|
768 | sleep( self.delay ) | |
|
776 | 769 | |
|
777 | 770 | return 0 |
|
778 | 771 | |
@@ -836,10 +829,10 class JRODataReader(JRODataIO): | |||
|
836 | 829 | |
|
837 | 830 | deltaTime = self.basicHeaderObj.utc - self.lastUTTime # |
|
838 | 831 | |
|
839 |
self.flag |
|
|
832 | self.flagDiscontinuousBlock = 0 | |
|
840 | 833 | |
|
841 | 834 | if deltaTime > self.maxTimeStep: |
|
842 |
self.flag |
|
|
835 | self.flagDiscontinuousBlock = 1 | |
|
843 | 836 | |
|
844 | 837 | return 1 |
|
845 | 838 | |
@@ -892,7 +885,7 class JRODataReader(JRODataIO): | |||
|
892 | 885 | except IOError: |
|
893 | 886 | traceback.print_exc() |
|
894 | 887 | if msgFlag: |
|
895 | print "The file %s can't be opened" % (filename) | |
|
888 | print "[Reading] The file %s can't be opened" % (filename) | |
|
896 | 889 | return False |
|
897 | 890 | |
|
898 | 891 | neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize |
@@ -915,12 +908,12 class JRODataReader(JRODataIO): | |||
|
915 | 908 | except IOError: |
|
916 | 909 | traceback.print_exc() |
|
917 | 910 | if msgFlag: |
|
918 |
print " |
|
|
911 | print "[Reading] The file %s is empty or it hasn't enough data" % filename | |
|
919 | 912 | |
|
920 | 913 | fp.close() |
|
921 | 914 | return False |
|
922 | 915 | else: |
|
923 |
msg = " |
|
|
916 | msg = "[Reading] Skipping the file %s due to it hasn't enough data" %filename | |
|
924 | 917 | |
|
925 | 918 | fp.close() |
|
926 | 919 | fileSize = os.path.getsize(filename) |
@@ -948,13 +941,13 class JRODataReader(JRODataIO): | |||
|
948 | 941 | nTxs = 1): |
|
949 | 942 | |
|
950 | 943 | if path == None: |
|
951 | raise ValueError, "The path is not valid" | |
|
944 | raise ValueError, "[Reading] The path is not valid" | |
|
952 | 945 | |
|
953 | 946 | if ext == None: |
|
954 | 947 | ext = self.ext |
|
955 | 948 | |
|
956 | 949 | if online: |
|
957 | print "Searching files in online mode..." | |
|
950 | print "[Reading] Searching files in online mode..." | |
|
958 | 951 | |
|
959 | 952 | for nTries in range( self.nTries ): |
|
960 | 953 | fullpath, foldercounter, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk, set=set) |
@@ -962,11 +955,11 class JRODataReader(JRODataIO): | |||
|
962 | 955 | if fullpath: |
|
963 | 956 | break |
|
964 | 957 | |
|
965 |
print ' |
|
|
966 |
|
|
|
958 | print '[Reading] Waiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1) | |
|
959 | sleep( self.delay ) | |
|
967 | 960 | |
|
968 | 961 | if not(fullpath): |
|
969 |
print "There 'isn't vali |
|
|
962 | print "There 'isn't any valid file in %s" % path | |
|
970 | 963 | return None |
|
971 | 964 | |
|
972 | 965 | self.year = year |
@@ -977,14 +970,14 class JRODataReader(JRODataIO): | |||
|
977 | 970 | last_set = None |
|
978 | 971 | |
|
979 | 972 | else: |
|
980 | print "Searching files in offline mode ..." | |
|
973 | print "[Reading] Searching files in offline mode ..." | |
|
981 | 974 | pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate, |
|
982 | 975 | startTime=startTime, endTime=endTime, |
|
983 | 976 | set=set, expLabel=expLabel, ext=ext, |
|
984 | 977 | walk=walk) |
|
985 | 978 | |
|
986 | 979 | if not(pathList): |
|
987 | print "No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path, | |
|
980 | print "[Reading] No *%s files into the folder %s \nfor the range: %s - %s"%(ext, path, | |
|
988 | 981 | datetime.datetime.combine(startDate,startTime).ctime(), |
|
989 | 982 | datetime.datetime.combine(endDate,endTime).ctime()) |
|
990 | 983 | |
@@ -1007,11 +1000,11 class JRODataReader(JRODataIO): | |||
|
1007 | 1000 | |
|
1008 | 1001 | if not(self.setNextFile()): |
|
1009 | 1002 | if (startDate!=None) and (endDate!=None): |
|
1010 | print "No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime()) | |
|
1003 | print "[Reading] No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime()) | |
|
1011 | 1004 | elif startDate != None: |
|
1012 | print "No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime()) | |
|
1005 | print "[Reading] No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime()) | |
|
1013 | 1006 | else: |
|
1014 | print "No files" | |
|
1007 | print "[Reading] No files" | |
|
1015 | 1008 | |
|
1016 | 1009 | sys.exit(-1) |
|
1017 | 1010 | |
@@ -1024,7 +1017,7 class JRODataReader(JRODataIO): | |||
|
1024 | 1017 | |
|
1025 | 1018 | self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds |
|
1026 | 1019 | |
|
1027 |
self.dataOut.flag |
|
|
1020 | self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock | |
|
1028 | 1021 | |
|
1029 | 1022 | self.dataOut.timeZone = self.basicHeaderObj.timeZone |
|
1030 | 1023 | |
@@ -1061,16 +1054,16 class JRODataReader(JRODataIO): | |||
|
1061 | 1054 | |
|
1062 | 1055 | def printReadBlocks(self): |
|
1063 | 1056 | |
|
1064 | print "Number of read blocks per file %04d" %self.nReadBlocks | |
|
1057 | print "[Reading] Number of read blocks per file %04d" %self.nReadBlocks | |
|
1065 | 1058 | |
|
1066 | 1059 | def printTotalBlocks(self): |
|
1067 | 1060 | |
|
1068 | print "Number of read blocks %04d" %self.nTotalBlocks | |
|
1061 | print "[Reading] Number of read blocks %04d" %self.nTotalBlocks | |
|
1069 | 1062 | |
|
1070 | 1063 | def printNumberOfBlock(self): |
|
1071 | 1064 | |
|
1072 | 1065 | if self.flagIsNewBlock: |
|
1073 | print "Block No. %04d, Total blocks %04d -> %s" %(self.basicHeaderObj.dataBlock, self.nTotalBlocks, self.dataOut.datatime.ctime()) | |
|
1066 | print "[Reading] Block No. %04d, Total blocks %04d -> %s" %(self.basicHeaderObj.dataBlock, self.nTotalBlocks, self.dataOut.datatime.ctime()) | |
|
1074 | 1067 | self.dataOut.blocknow = self.basicHeaderObj.dataBlock |
|
1075 | 1068 | |
|
1076 | 1069 | def printInfo(self): |
@@ -1275,13 +1268,13 class JRODataWriter(JRODataIO): | |||
|
1275 | 1268 | setFile = self.setFile |
|
1276 | 1269 | setFile += 1 |
|
1277 | 1270 | |
|
1278 | file = '%s%4.4d%3.3d%3.3d%s' % (self.optchar, | |
|
1271 | filen = '%s%4.4d%3.3d%3.3d%s' % (self.optchar, | |
|
1279 | 1272 | timeTuple.tm_year, |
|
1280 | 1273 | timeTuple.tm_yday, |
|
1281 | 1274 | setFile, |
|
1282 | 1275 | ext ) |
|
1283 | 1276 | |
|
1284 | filename = os.path.join( path, subfolder, file ) | |
|
1277 | filename = os.path.join( path, subfolder, filen ) | |
|
1285 | 1278 | |
|
1286 | 1279 | fp = open( filename,'wb' ) |
|
1287 | 1280 | |
@@ -1296,7 +1289,7 class JRODataWriter(JRODataIO): | |||
|
1296 | 1289 | |
|
1297 | 1290 | self.setFirstHeader() |
|
1298 | 1291 | |
|
1299 |
print 'Writing |
|
|
1292 | print '[Writing] file: %s'%self.filename | |
|
1300 | 1293 | |
|
1301 | 1294 | self.__writeFirstHeader() |
|
1302 | 1295 | |
@@ -1334,7 +1327,7 class JRODataWriter(JRODataIO): | |||
|
1334 | 1327 | self.dataOut = dataOut |
|
1335 | 1328 | |
|
1336 | 1329 | if not(self.setNextFile()): |
|
1337 | print "There isn't a next file" | |
|
1330 | print "[Writing] There isn't a next file" | |
|
1338 | 1331 | return 0 |
|
1339 | 1332 | |
|
1340 | 1333 | self.setBlockDimension() |
@@ -21,10 +21,6 class Reader(ProcessingUnit): | |||
|
21 | 21 | |
|
22 | 22 | ProcessingUnit.__init__(self) |
|
23 | 23 | |
|
24 | # self.dataIn = None | |
|
25 | # | |
|
26 | # self.isConfig = False | |
|
27 | ||
|
28 | 24 | #Is really necessary create the output object in the initializer |
|
29 | 25 | self.dataOut = Voltage() |
|
30 | 26 | |
@@ -44,6 +40,10 class Reader(ProcessingUnit): | |||
|
44 | 40 | |
|
45 | 41 | ''' |
|
46 | 42 |
|
|
43 | ''' | |
|
44 | Add code | |
|
45 | ''' | |
|
46 | ||
|
47 | 47 | self.isConfig = True |
|
48 | 48 | |
|
49 | 49 | def run(self, **kwargs): |
@@ -54,6 +54,10 class Reader(ProcessingUnit): | |||
|
54 | 54 | if not self.isConfig: |
|
55 | 55 | self.setup(**kwargs) |
|
56 | 56 |
|
|
57 | ''' | |
|
58 | Add code | |
|
59 | ''' | |
|
60 | ||
|
57 | 61 | class Writer(Operation): |
|
58 | 62 | ''' |
|
59 | 63 | classdocs |
@@ -1,5 +1,7 | |||
|
1 | 1 | ''' |
|
2 | Created on Jul 3, 2014 | |
|
2 | 3 | |
|
4 | @author: roj-idl71 | |
|
3 | 5 | ''' |
|
4 | 6 | |
|
5 | 7 | import os, sys |
@@ -9,15 +11,84 import fnmatch | |||
|
9 | 11 | import glob |
|
10 | 12 | |
|
11 | 13 | try: |
|
14 | from gevent import sleep | |
|
15 | except: | |
|
16 | from time import sleep | |
|
17 | ||
|
18 | try: | |
|
12 | 19 | import pyfits |
|
13 | 20 | except: |
|
14 | 21 | """ |
|
15 | 22 | """ |
|
16 | ||
|
23 | ||
|
17 | 24 | from xml.etree.ElementTree import ElementTree |
|
18 | 25 | |
|
19 | 26 | from jroIO_base import isDoyFolder, isNumber |
|
20 | from model.proc.jroproc_base import Operation, ProcessingUnit | |
|
27 | from schainpy.model.proc.jroproc_base import Operation, ProcessingUnit | |
|
28 | ||
|
29 | class Fits: | |
|
30 | name=None | |
|
31 | format=None | |
|
32 | array =None | |
|
33 | data =None | |
|
34 | thdulist=None | |
|
35 | prihdr=None | |
|
36 | hdu=None | |
|
37 | ||
|
38 | def __init__(self): | |
|
39 | ||
|
40 | pass | |
|
41 | ||
|
42 | def setColF(self,name,format,array): | |
|
43 | self.name=name | |
|
44 | self.format=format | |
|
45 | self.array=array | |
|
46 | a1=numpy.array([self.array],dtype=numpy.float32) | |
|
47 | self.col1 = pyfits.Column(name=self.name, format=self.format, array=a1) | |
|
48 | return self.col1 | |
|
49 | ||
|
50 | # def setColP(self,name,format,data): | |
|
51 | # self.name=name | |
|
52 | # self.format=format | |
|
53 | # self.data=data | |
|
54 | # a2=numpy.array([self.data],dtype=numpy.float32) | |
|
55 | # self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2) | |
|
56 | # return self.col2 | |
|
57 | ||
|
58 | ||
|
59 | def writeData(self,name,format,data): | |
|
60 | self.name=name | |
|
61 | self.format=format | |
|
62 | self.data=data | |
|
63 | a2=numpy.array([self.data],dtype=numpy.float32) | |
|
64 | self.col2 = pyfits.Column(name=self.name, format=self.format, array=a2) | |
|
65 | return self.col2 | |
|
66 | ||
|
67 | def cFImage(self,idblock,year,month,day,hour,minute,second): | |
|
68 | self.hdu= pyfits.PrimaryHDU(idblock) | |
|
69 | self.hdu.header.set("Year",year) | |
|
70 | self.hdu.header.set("Month",month) | |
|
71 | self.hdu.header.set("Day",day) | |
|
72 | self.hdu.header.set("Hour",hour) | |
|
73 | self.hdu.header.set("Minute",minute) | |
|
74 | self.hdu.header.set("Second",second) | |
|
75 | return self.hdu | |
|
76 | ||
|
77 | ||
|
78 | def Ctable(self,colList): | |
|
79 | self.cols=pyfits.ColDefs(colList) | |
|
80 | self.tbhdu = pyfits.new_table(self.cols) | |
|
81 | return self.tbhdu | |
|
82 | ||
|
83 | ||
|
84 | def CFile(self,hdu,tbhdu): | |
|
85 | self.thdulist=pyfits.HDUList([hdu,tbhdu]) | |
|
86 | ||
|
87 | def wFile(self,filename): | |
|
88 | if os.path.isfile(filename): | |
|
89 | os.remove(filename) | |
|
90 | self.thdulist.writeto(filename) | |
|
91 | ||
|
21 | 92 | |
|
22 | 93 | class ParameterConf: |
|
23 | 94 | ELEMENTNAME = 'Parameter' |
@@ -165,13 +236,13 class FitsWriter(Operation): | |||
|
165 | 236 | setFile = self.setFile |
|
166 | 237 | setFile += 1 |
|
167 | 238 | |
|
168 |
|
|
|
239 | thisFile = '%s%4.4d%3.3d%3.3d%s' % (self.optchar, | |
|
169 | 240 | timeTuple.tm_year, |
|
170 | 241 | timeTuple.tm_yday, |
|
171 | 242 | setFile, |
|
172 | 243 | ext ) |
|
173 | 244 | |
|
174 |
filename = os.path.join( path, subfolder, |
|
|
245 | filename = os.path.join( path, subfolder, thisFile ) | |
|
175 | 246 | |
|
176 | 247 | self.blockIndex = 0 |
|
177 | 248 | self.filename = filename |
@@ -242,7 +313,7 class FitsReader(ProcessingUnit): | |||
|
242 | 313 | self.setFile = 0 |
|
243 | 314 | self.flagNoMoreFiles = 0 |
|
244 | 315 | self.flagIsNewFile = 1 |
|
245 |
self.flag |
|
|
316 | self.flagDiscontinuousBlock = None | |
|
246 | 317 | self.fileIndex = None |
|
247 | 318 | self.filename = None |
|
248 | 319 | self.fileSize = None |
@@ -432,9 +503,9 class FitsReader(ProcessingUnit): | |||
|
432 | 503 | fileList = glob.glob1(thisPath, "*%s" %ext) |
|
433 | 504 | fileList.sort() |
|
434 | 505 | |
|
435 |
for |
|
|
506 | for thisFile in fileList: | |
|
436 | 507 | |
|
437 |
filename = os.path.join(thisPath, |
|
|
508 | filename = os.path.join(thisPath,thisFile) | |
|
438 | 509 | thisDatetime = self.isFileinThisTime(filename, startTime, endTime) |
|
439 | 510 | |
|
440 | 511 | if not(thisDatetime): |
@@ -558,7 +629,7 class FitsReader(ProcessingUnit): | |||
|
558 | 629 | return 1 |
|
559 | 630 | |
|
560 | 631 | print "\tWaiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1) |
|
561 |
|
|
|
632 | sleep( self.delay ) | |
|
562 | 633 | |
|
563 | 634 | |
|
564 | 635 | return 0 |
@@ -585,10 +656,10 class FitsReader(ProcessingUnit): | |||
|
585 | 656 | |
|
586 | 657 | deltaTime = self.utc - self.lastUTTime |
|
587 | 658 | |
|
588 |
self.flag |
|
|
659 | self.flagDiscontinuousBlock = 0 | |
|
589 | 660 | |
|
590 | 661 | if deltaTime > self.maxTimeStep: |
|
591 |
self.flag |
|
|
662 | self.flagDiscontinuousBlock = 1 | |
|
592 | 663 | |
|
593 | 664 | return 1 |
|
594 | 665 | |
@@ -610,7 +681,7 class FitsReader(ProcessingUnit): | |||
|
610 | 681 | print 'Process finished' |
|
611 | 682 | return 0 |
|
612 | 683 | |
|
613 |
self.flag |
|
|
684 | self.flagDiscontinuousBlock = 0 | |
|
614 | 685 | self.flagIsNewBlock = 0 |
|
615 | 686 | |
|
616 | 687 | if not(self.readNextBlock()): |
@@ -653,7 +724,7 class SpectraHeisWriter(Operation): | |||
|
653 | 724 | subfolder = None |
|
654 | 725 | |
|
655 | 726 | def __init__(self): |
|
656 |
self.wrObj = F |
|
|
727 | self.wrObj = Fits() | |
|
657 | 728 | # self.dataOut = dataOut |
|
658 | 729 | self.nTotalBlocks=0 |
|
659 | 730 | # self.set = None |
@@ -709,9 +780,9 class SpectraHeisWriter(Operation): | |||
|
709 | 780 | # self.setFile = 0 |
|
710 | 781 | |
|
711 | 782 | #make the filename |
|
712 |
|
|
|
783 | thisFile = 'D%4.4d%3.3d_%3.3d%s' % (name.tm_year,name.tm_yday,self.setFile,ext) | |
|
713 | 784 | |
|
714 |
filename = os.path.join(self.wrpath,self.subfolder, |
|
|
785 | filename = os.path.join(self.wrpath,self.subfolder, thisFile) | |
|
715 | 786 | |
|
716 | 787 | idblock = numpy.array([self.idblock],dtype="int64") |
|
717 | 788 | header=self.wrObj.cFImage(idblock=idblock, |
@@ -1,12 +1,14 | |||
|
1 | 1 | ''' |
|
2 | ''' | |
|
2 | Created on Jul 2, 2014 | |
|
3 | 3 |
|
|
4 | @author: roj-idl71 | |
|
5 | ''' | |
|
4 | 6 | import numpy |
|
5 | 7 | |
|
6 | 8 | from jroIO_base import LOCALTIME, JRODataReader, JRODataWriter |
|
7 | from model.proc.jroproc_base import ProcessingUnit, Operation | |
|
8 | from model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader | |
|
9 | from model.data.jrodata import Spectra | |
|
9 | from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation | |
|
10 | from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader | |
|
11 | from schainpy.model.data.jrodata import Spectra | |
|
10 | 12 | |
|
11 | 13 | class SpectraReader(JRODataReader, ProcessingUnit): |
|
12 | 14 | """ |
@@ -158,7 +160,7 class SpectraReader(JRODataReader, ProcessingUnit): | |||
|
158 | 160 | |
|
159 | 161 | # self.ippSeconds = 0 |
|
160 | 162 | |
|
161 |
self.flag |
|
|
163 | self.flagDiscontinuousBlock = 0 | |
|
162 | 164 | |
|
163 | 165 | self.flagIsNewBlock = 0 |
|
164 | 166 | |
@@ -328,15 +330,15 class SpectraReader(JRODataReader, ProcessingUnit): | |||
|
328 | 330 | |
|
329 | 331 | self.dataOut.flagDecodeData = False #asumo q la data no esta decodificada |
|
330 | 332 | |
|
331 |
self.dataOut.flagDeflipData = |
|
|
333 | self.dataOut.flagDeflipData = False #asumo q la data esta sin flip | |
|
332 | 334 | |
|
333 | 335 | if self.radarControllerHeaderObj.code != None: |
|
334 | 336 | |
|
335 | self.dataOut.nCode = self.radarControllerHeaderObj.nCode | |
|
336 | ||
|
337 | self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud | |
|
338 | ||
|
339 | self.dataOut.code = self.radarControllerHeaderObj.code | |
|
337 | # self.dataOut.nCode = self.radarControllerHeaderObj.nCode | |
|
338 | # | |
|
339 | # self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud | |
|
340 | # | |
|
341 | # self.dataOut.code = self.radarControllerHeaderObj.code | |
|
340 | 342 | |
|
341 | 343 | self.dataOut.flagDecodeData = True |
|
342 | 344 | |
@@ -355,7 +357,7 class SpectraReader(JRODataReader, ProcessingUnit): | |||
|
355 | 357 | Affected: |
|
356 | 358 | self.dataOut |
|
357 | 359 | |
|
358 |
self.flag |
|
|
360 | self.flagDiscontinuousBlock | |
|
359 | 361 | self.flagIsNewBlock |
|
360 | 362 | """ |
|
361 | 363 | |
@@ -364,7 +366,7 class SpectraReader(JRODataReader, ProcessingUnit): | |||
|
364 | 366 | print 'Process finished' |
|
365 | 367 | return 0 |
|
366 | 368 | |
|
367 |
self.flag |
|
|
369 | self.flagDiscontinuousBlock = 0 | |
|
368 | 370 | self.flagIsNewBlock = 0 |
|
369 | 371 | |
|
370 | 372 | if self.__hasNotDataInBuffer(): |
@@ -555,6 +557,7 class SpectraWriter(JRODataWriter, Operation): | |||
|
555 | 557 | self.nWriteBlocks += 1 |
|
556 | 558 | self.blockIndex += 1 |
|
557 | 559 | |
|
560 | print "[Writing] Block = ", self.blockIndex | |
|
558 | 561 | |
|
559 | 562 | def putData(self): |
|
560 | 563 | """ |
@@ -575,7 +578,7 class SpectraWriter(JRODataWriter, Operation): | |||
|
575 | 578 | |
|
576 | 579 | self.flagIsNewBlock = 0 |
|
577 | 580 | |
|
578 |
if self.dataOut.flag |
|
|
581 | if self.dataOut.flagDiscontinuousBlock: | |
|
579 | 582 | self.data_spc.fill(0) |
|
580 | 583 | self.data_cspc.fill(0) |
|
581 | 584 | self.data_dc.fill(0) |
@@ -1,12 +1,15 | |||
|
1 | 1 | ''' |
|
2 | Created on Jul 2, 2014 | |
|
2 | 3 | |
|
4 | @author: roj-idl71 | |
|
3 | 5 | ''' |
|
6 | ||
|
4 | 7 | import numpy |
|
5 | 8 | |
|
6 | 9 | from jroIO_base import LOCALTIME, JRODataReader, JRODataWriter |
|
7 | from model.proc.jroproc_base import ProcessingUnit, Operation | |
|
8 | from model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader | |
|
9 | from model.data.jrodata import Voltage | |
|
10 | from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation | |
|
11 | from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader | |
|
12 | from schainpy.model.data.jrodata import Voltage | |
|
10 | 13 | |
|
11 | 14 | class VoltageReader(JRODataReader, ProcessingUnit): |
|
12 | 15 | """ |
@@ -144,7 +147,7 class VoltageReader(JRODataReader, ProcessingUnit): | |||
|
144 | 147 | |
|
145 | 148 | # self.ippSeconds = 0 |
|
146 | 149 | |
|
147 |
self.flag |
|
|
150 | self.flagDiscontinuousBlock = 0 | |
|
148 | 151 | |
|
149 | 152 | self.flagIsNewBlock = 0 |
|
150 | 153 | |
@@ -245,14 +248,14 class VoltageReader(JRODataReader, ProcessingUnit): | |||
|
245 | 248 | self.dataOut.radarControllerHeaderObj.ippSeconds = self.radarControllerHeaderObj.ippSeconds/self.nTxs |
|
246 | 249 | |
|
247 | 250 | # self.dataOut.timeInterval = self.radarControllerHeaderObj.ippSeconds * self.processingHeaderObj.nCohInt |
|
248 | ||
|
249 | if self.radarControllerHeaderObj.code != None: | |
|
250 | ||
|
251 | self.dataOut.nCode = self.radarControllerHeaderObj.nCode | |
|
252 | ||
|
253 | self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud | |
|
254 | ||
|
255 | self.dataOut.code = self.radarControllerHeaderObj.code | |
|
251 | # | |
|
252 | # if self.radarControllerHeaderObj.code != None: | |
|
253 | # | |
|
254 | # self.dataOut.nCode = self.radarControllerHeaderObj.nCode | |
|
255 | # | |
|
256 | # self.dataOut.nBaud = self.radarControllerHeaderObj.nBaud | |
|
257 | # | |
|
258 | # self.dataOut.code = self.radarControllerHeaderObj.code | |
|
256 | 259 | |
|
257 | 260 | self.dataOut.dtype = self.dtype |
|
258 | 261 | |
@@ -310,7 +313,7 class VoltageReader(JRODataReader, ProcessingUnit): | |||
|
310 | 313 | Affected: |
|
311 | 314 | self.dataOut |
|
312 | 315 | self.profileIndex |
|
313 |
self.flag |
|
|
316 | self.flagDiscontinuousBlock | |
|
314 | 317 | self.flagIsNewBlock |
|
315 | 318 | """ |
|
316 | 319 | |
@@ -319,7 +322,7 class VoltageReader(JRODataReader, ProcessingUnit): | |||
|
319 | 322 | print 'Process finished' |
|
320 | 323 | return 0 |
|
321 | 324 | |
|
322 |
self.flag |
|
|
325 | self.flagDiscontinuousBlock = 0 | |
|
323 | 326 | self.flagIsNewBlock = 0 |
|
324 | 327 | |
|
325 | 328 | if self.__hasNotDataInBuffer(): |
@@ -462,7 +465,6 class VoltageWriter(JRODataWriter, Operation): | |||
|
462 | 465 | self.processingHeaderObj.profilesPerBlock, |
|
463 | 466 | self.processingHeaderObj.nHeights), |
|
464 | 467 | dtype=numpy.dtype('complex64')) |
|
465 | ||
|
466 | 468 | |
|
467 | 469 | def writeBlock(self): |
|
468 | 470 | """ |
@@ -497,6 +499,8 class VoltageWriter(JRODataWriter, Operation): | |||
|
497 | 499 | self.blockIndex += 1 |
|
498 | 500 | self.nTotalBlocks += 1 |
|
499 | 501 | |
|
502 | print "[Writing] Block = ", self.blockIndex | |
|
503 | ||
|
500 | 504 | def putData(self): |
|
501 | 505 | """ |
|
502 | 506 | Setea un bloque de datos y luego los escribe en un file |
@@ -514,8 +518,7 class VoltageWriter(JRODataWriter, Operation): | |||
|
514 | 518 | |
|
515 | 519 | self.flagIsNewBlock = 0 |
|
516 | 520 | |
|
517 |
if self.dataOut.flag |
|
|
518 | ||
|
521 | if self.dataOut.flagDiscontinuousBlock: | |
|
519 | 522 | self.datablock.fill(0) |
|
520 | 523 | self.profileIndex = 0 |
|
521 | 524 | self.setNextFile() |
@@ -0,0 +1,12 | |||
|
1 | ''' | |
|
2 | ||
|
3 | $Author: murco $ | |
|
4 | $Id: Processor.py 1 2012-11-12 18:56:07Z murco $ | |
|
5 | ''' | |
|
6 | ||
|
7 | from jroproc_voltage import * | |
|
8 | from jroproc_spectra import * | |
|
9 | from jroproc_heispectra import * | |
|
10 | from jroproc_amisr import * | |
|
11 | from jroproc_correlation import * | |
|
12 | from jroproc_parameters import * No newline at end of file |
@@ -1,4 +1,4 | |||
|
1 | import numpy | |
|
1 | # import numpy | |
|
2 | 2 | cimport numpy |
|
3 | 3 | |
|
4 | 4 | def decoder(numpy.ndarray[numpy.complex_t, ndim=2] fft_code, numpy.ndarray[numpy.complex_t, ndim=2] data): |
@@ -3,7 +3,7 | |||
|
3 | 3 | ''' |
|
4 | 4 | import numpy |
|
5 | 5 | from jroproc_base import ProcessingUnit, Operation |
|
6 | from model.data.jroamisr import AMISR | |
|
6 | from schainpy.model.data.jroamisr import AMISR | |
|
7 | 7 | |
|
8 | 8 | class AMISRProc(ProcessingUnit): |
|
9 | 9 | def __init__(self): |
@@ -1,7 +1,10 | |||
|
1 | 1 | ''' |
|
2 | ||
|
3 | $Author: murco $ | |
|
4 | $Id: jroproc_base.py 1 2012-11-12 18:56:07Z murco $ | |
|
2 | 5 | ''' |
|
3 | 6 | |
|
4 | class ProcessingUnit: | |
|
7 | class ProcessingUnit(object): | |
|
5 | 8 | |
|
6 | 9 | """ |
|
7 | 10 | Esta es la clase base para el procesamiento de datos. |
@@ -78,6 +81,8 class ProcessingUnit: | |||
|
78 | 81 | **kwargs : diccionario con los nombres y valores de la funcion a ejecutar. |
|
79 | 82 | |
|
80 | 83 | """ |
|
84 | ||
|
85 | #Checking the inputs | |
|
81 | 86 | if name == 'run': |
|
82 | 87 | |
|
83 | 88 | if not self.checkInputs(): |
@@ -137,26 +142,35 class ProcessingUnit: | |||
|
137 | 142 | def call(self, opType, opName=None, opId=None, **kwargs): |
|
138 | 143 | |
|
139 | 144 | """ |
|
140 |
Return True si ejecuta la operacion |
|
|
141 | argumentos "**kwargs". False si la operacion no se ha ejecutado. | |
|
142 | La operacion puede ser de dos tipos: | |
|
145 | Return True si ejecuta la operacion interna nombrada "opName" o la operacion externa | |
|
146 | identificada con el id "opId"; con los argumentos "**kwargs". | |
|
143 | 147 | |
|
144 | 1. Un metodo propio de esta clase: | |
|
145 |
|
|
|
146 | operation.type = "self" | |
|
148 | False si la operacion no se ha ejecutado. | |
|
149 | ||
|
150 | Input: | |
|
151 | ||
|
152 | opType : Puede ser "self" o "external" | |
|
153 | ||
|
154 | La operacion puede ser de dos tipos (callMethod or callObject): | |
|
147 | 155 | |
|
148 | 2. El metodo "run" de un objeto del tipo Operation o de un derivado de ella: | |
|
149 | operation.type = "other". | |
|
156 | 1. Un metodo propio de esta clase: | |
|
157 | ||
|
158 | opType = "self" | |
|
150 | 159 | |
|
151 |
|
|
|
152 | "addOperation" e identificado con el operation.id | |
|
153 | ||
|
160 | 2. El metodo "run" de un objeto del tipo Operation o de un derivado de ella: | |
|
161 | ||
|
162 | opType = "other" or "external". | |
|
154 | 163 | |
|
155 | con el id de la operacion. | |
|
156 | ||
|
157 |
|
|
|
164 | opName : Si la operacion es interna (opType = 'self'), entonces el "opName" sera | |
|
165 | usada para llamar a un metodo interno de la clase Processing | |
|
166 | ||
|
167 | opId : Si la operacion es externa (opType = 'other'), entonces el "opId" sera | |
|
168 | usada para llamar al metodo "run" de la clase Operation registrada con ese Id | |
|
158 | 169 | |
|
159 | Operation : Objeto del tipo operacion con los atributos: name, type y id. | |
|
170 | Exception: | |
|
171 | Este objeto de tipo Operation debe de haber sido agregado antes con el metodo: | |
|
172 | "addOperation" e identificado con el valor "opId" = el id de la operacion. | |
|
173 | De lo contrario retornara un error del tipo IOError | |
|
160 | 174 | |
|
161 | 175 | """ |
|
162 | 176 | |
@@ -205,7 +219,7 class ProcessingUnit: | |||
|
205 | 219 | |
|
206 | 220 | raise ValueError, "Not implemented" |
|
207 | 221 | |
|
208 | class Operation(): | |
|
222 | class Operation(object): | |
|
209 | 223 | |
|
210 | 224 | """ |
|
211 | 225 | Clase base para definir las operaciones adicionales que se pueden agregar a la clase ProcessingUnit |
@@ -233,7 +247,8 class Operation(): | |||
|
233 | 247 | def run(self, dataIn, **kwargs): |
|
234 | 248 | |
|
235 | 249 | """ |
|
236 |
Realiza las operaciones necesarias sobre la dataIn.data y actualiza los |
|
|
250 | Realiza las operaciones necesarias sobre la dataIn.data y actualiza los | |
|
251 | atributos del objeto dataIn. | |
|
237 | 252 | |
|
238 | 253 | Input: |
|
239 | 254 |
@@ -1,7 +1,7 | |||
|
1 | 1 | import numpy |
|
2 | 2 | |
|
3 | 3 | from jroproc_base import ProcessingUnit, Operation |
|
4 | from model.data.jrodata import Correlation | |
|
4 | from schainpy.model.data.jrodata import Correlation | |
|
5 | 5 | |
|
6 | 6 | class CorrelationProc(ProcessingUnit): |
|
7 | 7 | |
@@ -33,7 +33,7 class CorrelationProc(ProcessingUnit): | |||
|
33 | 33 | self.dataOut.nCode = self.dataIn.nCode |
|
34 | 34 | self.dataOut.code = self.dataIn.code |
|
35 | 35 | # self.dataOut.nProfiles = self.dataOut.nFFTPoints |
|
36 |
self.dataOut.flag |
|
|
36 | self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock | |
|
37 | 37 | self.dataOut.utctime = self.firstdatatime |
|
38 | 38 | self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada |
|
39 | 39 | self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip |
@@ -1,7 +1,7 | |||
|
1 | 1 | import numpy |
|
2 | 2 | |
|
3 | 3 | from jroproc_base import ProcessingUnit, Operation |
|
4 | from model.data.jrodata import SpectraHeis | |
|
4 | from schainpy.model.data.jrodata import SpectraHeis | |
|
5 | 5 | |
|
6 | 6 | class SpectraHeisProc(ProcessingUnit): |
|
7 | 7 | |
@@ -37,7 +37,7 class SpectraHeisProc(ProcessingUnit): | |||
|
37 | 37 | self.dataOut.nFFTPoints = self.dataIn.nHeights |
|
38 | 38 | # self.dataOut.channelIndexList = self.dataIn.channelIndexList |
|
39 | 39 | # self.dataOut.flagNoData = self.dataIn.flagNoData |
|
40 |
self.dataOut.flag |
|
|
40 | self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock | |
|
41 | 41 | self.dataOut.utctime = self.dataIn.utctime |
|
42 | 42 | # self.dataOut.utctime = self.firstdatatime |
|
43 | 43 | self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada |
@@ -12,7 +12,7 import importlib | |||
|
12 | 12 | import itertools |
|
13 | 13 | |
|
14 | 14 | from jroproc_base import ProcessingUnit, Operation |
|
15 | from model.data.jrodata import Parameters | |
|
15 | from schainpy.model.data.jrodata import Parameters | |
|
16 | 16 | |
|
17 | 17 | |
|
18 | 18 | class ParametersProc(ProcessingUnit): |
@@ -48,7 +48,7 class ParametersProc(ProcessingUnit): | |||
|
48 | 48 | self.dataOut.nCode = self.dataIn.nCode |
|
49 | 49 | self.dataOut.code = self.dataIn.code |
|
50 | 50 | # self.dataOut.nProfiles = self.dataOut.nFFTPoints |
|
51 |
self.dataOut.flag |
|
|
51 | self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock | |
|
52 | 52 | self.dataOut.utctime = self.firstdatatime |
|
53 | 53 | self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada |
|
54 | 54 | self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip |
@@ -2,8 +2,8 import numpy | |||
|
2 | 2 | import math |
|
3 | 3 | |
|
4 | 4 | from jroproc_base import ProcessingUnit, Operation |
|
5 | from model.data.jrodata import Spectra | |
|
6 | from model.data.jrodata import hildebrand_sekhon | |
|
5 | from schainpy.model.data.jrodata import Spectra | |
|
6 | from schainpy.model.data.jrodata import hildebrand_sekhon | |
|
7 | 7 | |
|
8 | 8 | class SpectraProc(ProcessingUnit): |
|
9 | 9 | |
@@ -37,7 +37,7 class SpectraProc(ProcessingUnit): | |||
|
37 | 37 | self.dataOut.code = self.dataIn.code |
|
38 | 38 | self.dataOut.nProfiles = self.dataOut.nFFTPoints |
|
39 | 39 | # self.dataOut.channelIndexList = self.dataIn.channelIndexList |
|
40 |
self.dataOut.flag |
|
|
40 | self.dataOut.flagDiscontinuousBlock = self.dataIn.flagDiscontinuousBlock | |
|
41 | 41 | self.dataOut.utctime = self.firstdatatime |
|
42 | 42 | self.dataOut.flagDecodeData = self.dataIn.flagDecodeData #asumo q la data esta decodificada |
|
43 | 43 | self.dataOut.flagDeflipData = self.dataIn.flagDeflipData #asumo q la data esta sin flip |
@@ -114,7 +114,8 class SpectraProc(ProcessingUnit): | |||
|
114 | 114 | raise ValueError, "This SpectraProc.run() need nFFTPoints input variable" |
|
115 | 115 | |
|
116 | 116 | if nProfiles == None: |
|
117 | raise ValueError, "This SpectraProc.run() need nProfiles input variable" | |
|
117 | nProfiles = nFFTPoints | |
|
118 | # raise ValueError, "This SpectraProc.run() need nProfiles input variable" | |
|
118 | 119 | |
|
119 | 120 | |
|
120 | 121 | if ippFactor == None: |
@@ -1,8 +1,7 | |||
|
1 | 1 | import numpy |
|
2 | 2 | |
|
3 | 3 | from jroproc_base import ProcessingUnit, Operation |
|
4 | from model.data.jrodata import Voltage | |
|
5 | ||
|
4 | from schainpy.model.data.jrodata import Voltage | |
|
6 | 5 | |
|
7 | 6 | class VoltageProc(ProcessingUnit): |
|
8 | 7 | |
@@ -207,7 +206,7 class VoltageProc(ProcessingUnit): | |||
|
207 | 206 | return 1 |
|
208 | 207 | |
|
209 | 208 | |
|
210 |
def filterByHeights(self, window |
|
|
209 | def filterByHeights(self, window): | |
|
211 | 210 | |
|
212 | 211 | deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0] |
|
213 | 212 | |
@@ -233,8 +232,17 class VoltageProc(ProcessingUnit): | |||
|
233 | 232 | self.dataOut.data = buffer.copy() |
|
234 | 233 | self.dataOut.heightList = numpy.arange(self.dataOut.heightList[0],newdelta*(self.dataOut.nHeights-r)/window,newdelta) |
|
235 | 234 | self.dataOut.windowOfFilter = window |
|
235 | ||
|
236 | def setH0(self, h0, deltaHeight = None): | |
|
236 | 237 | |
|
237 | return 1 | |
|
238 | if not deltaHeight: | |
|
239 | deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0] | |
|
240 | ||
|
241 | nHeights = self.dataOut.nHeights | |
|
242 | ||
|
243 | newHeiRange = h0 + numpy.arange(nHeights)*deltaHeight | |
|
244 | ||
|
245 | self.dataOut.heightList = newHeiRange | |
|
238 | 246 | |
|
239 | 247 | def deFlip(self, channelList = []): |
|
240 | 248 | |
@@ -267,8 +275,6 class VoltageProc(ProcessingUnit): | |||
|
267 | 275 | |
|
268 | 276 | self.dataOut.data = data |
|
269 | 277 | |
|
270 | ||
|
271 | ||
|
272 | 278 | def setRadarFrequency(self, frequency=None): |
|
273 | 279 | |
|
274 | 280 | if frequency != None: |
@@ -571,13 +577,15 class Decoder(Operation): | |||
|
571 | 577 | |
|
572 | 578 | def convolutionInFreqOpt(self, data): |
|
573 | 579 | |
|
574 | fft_code = self.fft_code[self.__profIndex].reshape(1,-1) | |
|
575 |
|
|
|
576 | data = cfunctions.decoder(fft_code, data) | |
|
577 | ||
|
578 | datadec = data#[:,:] | |
|
579 | ||
|
580 |
|
|
|
580 | raise NotImplementedError | |
|
581 | ||
|
582 | # fft_code = self.fft_code[self.__profIndex].reshape(1,-1) | |
|
583 | # | |
|
584 | # data = cfunctions.decoder(fft_code, data) | |
|
585 | # | |
|
586 | # datadec = data#[:,:] | |
|
587 | # | |
|
588 | # return datadec | |
|
581 | 589 | |
|
582 | 590 | def convolutionInTime(self, data): |
|
583 | 591 | |
@@ -887,4 +895,140 class Reshaper(Operation): | |||
|
887 | 895 | |
|
888 | 896 | dataOut.nProfiles = dataOut.data.shape[1] |
|
889 | 897 | |
|
890 | dataOut.ippSeconds *= factor No newline at end of file | |
|
898 | dataOut.ippSeconds *= factor | |
|
899 | ||
|
900 | import collections | |
|
901 | from scipy.stats import mode | |
|
902 | ||
|
903 | class Synchronize(Operation): | |
|
904 | ||
|
905 | isConfig = False | |
|
906 | __profIndex = 0 | |
|
907 | ||
|
908 | def __init__(self): | |
|
909 | ||
|
910 | Operation.__init__(self) | |
|
911 | # self.isConfig = False | |
|
912 | self.__powBuffer = None | |
|
913 | self.__startIndex = 0 | |
|
914 | self.__pulseFound = False | |
|
915 | ||
|
916 | def __findTxPulse(self, dataOut, channel=0, pulse_with = None): | |
|
917 | ||
|
918 | #Read data | |
|
919 | ||
|
920 | powerdB = dataOut.getPower(channel = channel) | |
|
921 | noisedB = dataOut.getNoise(channel = channel)[0] | |
|
922 | ||
|
923 | self.__powBuffer.extend(powerdB.flatten()) | |
|
924 | ||
|
925 | dataArray = numpy.array(self.__powBuffer) | |
|
926 | ||
|
927 | filteredPower = numpy.correlate(dataArray, dataArray[0:self.__nSamples], "same") | |
|
928 | ||
|
929 | maxValue = numpy.nanmax(filteredPower) | |
|
930 | ||
|
931 | if maxValue < noisedB + 10: | |
|
932 | #No se encuentra ningun pulso de transmision | |
|
933 | return None | |
|
934 | ||
|
935 | maxValuesIndex = numpy.where(filteredPower > maxValue - 0.1*abs(maxValue))[0] | |
|
936 | ||
|
937 | if len(maxValuesIndex) < 2: | |
|
938 | #Solo se encontro un solo pulso de transmision de un baudio, esperando por el siguiente TX | |
|
939 | return None | |
|
940 | ||
|
941 | phasedMaxValuesIndex = maxValuesIndex - self.__nSamples | |
|
942 | ||
|
943 | #Seleccionar solo valores con un espaciamiento de nSamples | |
|
944 | pulseIndex = numpy.intersect1d(maxValuesIndex, phasedMaxValuesIndex) | |
|
945 | ||
|
946 | if len(pulseIndex) < 2: | |
|
947 | #Solo se encontro un pulso de transmision con ancho mayor a 1 | |
|
948 | return None | |
|
949 | ||
|
950 | spacing = pulseIndex[1:] - pulseIndex[:-1] | |
|
951 | ||
|
952 | #remover senales que se distancien menos de 10 unidades o muestras | |
|
953 | #(No deberian existir IPP menor a 10 unidades) | |
|
954 | ||
|
955 | realIndex = numpy.where(spacing > 10 )[0] | |
|
956 | ||
|
957 | if len(realIndex) < 2: | |
|
958 | #Solo se encontro un pulso de transmision con ancho mayor a 1 | |
|
959 | return None | |
|
960 | ||
|
961 | #Eliminar pulsos anchos (deja solo la diferencia entre IPPs) | |
|
962 | realPulseIndex = pulseIndex[realIndex] | |
|
963 | ||
|
964 | period = mode(realPulseIndex[1:] - realPulseIndex[:-1])[0][0] | |
|
965 | ||
|
966 | print "IPP = %d samples" %period | |
|
967 | ||
|
968 | self.__newNSamples = dataOut.nHeights #int(period) | |
|
969 | self.__startIndex = int(realPulseIndex[0]) | |
|
970 | ||
|
971 | return 1 | |
|
972 | ||
|
973 | ||
|
974 | def setup(self, nSamples, nChannels, buffer_size = 4): | |
|
975 | ||
|
976 | self.__powBuffer = collections.deque(numpy.zeros( buffer_size*nSamples,dtype=numpy.float), | |
|
977 | maxlen = buffer_size*nSamples) | |
|
978 | ||
|
979 | bufferList = [] | |
|
980 | ||
|
981 | for i in range(nChannels): | |
|
982 | bufferByChannel = collections.deque(numpy.zeros( buffer_size*nSamples, dtype=numpy.complex) + numpy.NAN, | |
|
983 | maxlen = buffer_size*nSamples) | |
|
984 | ||
|
985 | bufferList.append(bufferByChannel) | |
|
986 | ||
|
987 | self.__nSamples = nSamples | |
|
988 | self.__nChannels = nChannels | |
|
989 | self.__bufferList = bufferList | |
|
990 | ||
|
991 | def run(self, dataOut, channel = 0): | |
|
992 | ||
|
993 | if not self.isConfig: | |
|
994 | nSamples = dataOut.nHeights | |
|
995 | nChannels = dataOut.nChannels | |
|
996 | self.setup(nSamples, nChannels) | |
|
997 | self.isConfig = True | |
|
998 | ||
|
999 | #Append new data to internal buffer | |
|
1000 | for thisChannel in range(self.__nChannels): | |
|
1001 | bufferByChannel = self.__bufferList[thisChannel] | |
|
1002 | bufferByChannel.extend(dataOut.data[thisChannel]) | |
|
1003 | ||
|
1004 | if self.__pulseFound: | |
|
1005 | self.__startIndex -= self.__nSamples | |
|
1006 | ||
|
1007 | #Finding Tx Pulse | |
|
1008 | if not self.__pulseFound: | |
|
1009 | indexFound = self.__findTxPulse(dataOut, channel) | |
|
1010 | ||
|
1011 | if indexFound == None: | |
|
1012 | dataOut.flagNoData = True | |
|
1013 | return | |
|
1014 | ||
|
1015 | self.__arrayBuffer = numpy.zeros((self.__nChannels, self.__newNSamples), dtype = numpy.complex) | |
|
1016 | self.__pulseFound = True | |
|
1017 | self.__startIndex = indexFound | |
|
1018 | ||
|
1019 | #If pulse was found ... | |
|
1020 | for thisChannel in range(self.__nChannels): | |
|
1021 | bufferByChannel = self.__bufferList[thisChannel] | |
|
1022 | #print self.__startIndex | |
|
1023 | x = numpy.array(bufferByChannel) | |
|
1024 | self.__arrayBuffer[thisChannel] = x[self.__startIndex:self.__startIndex+self.__newNSamples] | |
|
1025 | ||
|
1026 | deltaHeight = dataOut.heightList[1] - dataOut.heightList[0] | |
|
1027 | dataOut.heightList = numpy.arange(self.__newNSamples)*deltaHeight | |
|
1028 | # dataOut.ippSeconds = (self.__newNSamples / deltaHeight)/1e6 | |
|
1029 | ||
|
1030 | dataOut.data = self.__arrayBuffer | |
|
1031 | ||
|
1032 | self.__startIndex += self.__newNSamples | |
|
1033 | ||
|
1034 | return No newline at end of file |
@@ -0,0 +1,7 | |||
|
1 | ''' | |
|
2 | ||
|
3 | $Author: murco $ | |
|
4 | $Id: Processor.py 1 2012-11-12 18:56:07Z murco $ | |
|
5 | ''' | |
|
6 | ||
|
7 | from jroutils_ftp import * No newline at end of file |
@@ -5,7 +5,7 import os | |||
|
5 | 5 | import glob |
|
6 | 6 | import ftplib |
|
7 | 7 | import multiprocessing |
|
8 | from model.proc.jroproc_base import ProcessingUnit, Operation | |
|
8 | from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation | |
|
9 | 9 | |
|
10 | 10 | class FTP(object): |
|
11 | 11 | """ |
@@ -1,3 +1,8 | |||
|
1 | import os, sys | |
|
2 | ||
|
3 | path = os.path.split(os.getcwd())[0] | |
|
4 | sys.path.append(path) | |
|
5 | ||
|
1 | 6 | from controller import * |
|
2 | 7 | |
|
3 | 8 | if __name__ == '__main__': |
@@ -9,148 +14,86 if __name__ == '__main__': | |||
|
9 | 14 | |
|
10 | 15 | controllerObj.setup(id = '191', name='test01', description=desc) |
|
11 | 16 | |
|
12 | readUnitConfObj = controllerObj.addReadUnit(datatype='Voltage', | |
|
13 |
path='/ |
|
|
14 |
startDate='201 |
|
|
15 |
endDate='201 |
|
|
16 |
startTime=' |
|
|
17 |
endTime=' |
|
|
17 | readUnitConfObj = controllerObj.addReadUnit(datatype='VoltageReader', | |
|
18 | path='/Volumes/SOUSY/', | |
|
19 | startDate='2014/10/28', | |
|
20 | endDate='2014/10/28', | |
|
21 | startTime='15:40:00', | |
|
22 | endTime='16:20:00', | |
|
18 | 23 | online=0, |
|
19 | 24 | walk=1) |
|
20 | 25 | |
|
21 |
|
|
|
22 | ||
|
23 | procUnitConfObj0 = controllerObj.addProcUnit(datatype='Voltage', inputId=readUnitConfObj.getId()) | |
|
26 | opObj00 = readUnitConfObj.addOperation(name='printNumberOfBlock') | |
|
24 | 27 | |
|
25 | opObj10 = procUnitConfObj0.addOperation(name='selectChannels') | |
|
26 | opObj10.addParameter(name='channelList', value='0,1,2', format='intlist') | |
|
27 | # | |
|
28 | opObj10 = procUnitConfObj0.addOperation(name='selectHeights') | |
|
29 | opObj10.addParameter(name='minHei', value='140', format='float') | |
|
30 | opObj10.addParameter(name='maxHei', value='180', format='float') | |
|
28 | procUnitConfObj0 = controllerObj.addProcUnit(datatype='VoltageProc', | |
|
29 | inputId=readUnitConfObj.getId()) | |
|
31 | 30 | |
|
32 |
opObj1 |
|
|
33 | ||
|
34 | opObj12 = procUnitConfObj0.addOperation(name='CohInt', optype='other') | |
|
35 | opObj12.addParameter(name='n', value='4', format='int') | |
|
31 | # opObj10 = procUnitConfObj0.addOperation(name='Synchronize', optype='external') | |
|
36 | 32 | |
|
37 |
|
|
|
38 |
|
|
|
39 |
|
|
|
40 | # opObj11.addParameter(name='zmin', value='40', format='int') | |
|
41 | # opObj11.addParameter(name='zmax', value='90', format='int') | |
|
42 | ||
|
43 | procUnitConfObj1 = controllerObj.addProcUnit(datatype='Spectra', inputId=procUnitConfObj0.getId()) | |
|
44 | procUnitConfObj1.addParameter(name='nFFTPoints', value='16', format='int') | |
|
45 | procUnitConfObj1.addParameter(name='pairsList', value='(0,1),(0,2),(1,2)', format='pairslist') | |
|
46 | ||
|
33 | opObj10 = procUnitConfObj0.addOperation(name='selectHeights') | |
|
34 | opObj10.addParameter(name='minHei', value='0', format='float') | |
|
35 | opObj10.addParameter(name='maxHei', value='8', format='float') | |
|
47 | 36 | |
|
48 |
opObj1 |
|
|
49 |
opObj1 |
|
|
50 | ||
|
51 |
opObj1 |
|
|
52 |
opObj1 |
|
|
53 |
opObj1 |
|
|
54 |
opObj1 |
|
|
55 | opObj11.addParameter(name='zmax', value='90', format='int') | |
|
56 | opObj11.addParameter(name='showprofile', value='1', format='int') | |
|
57 | opObj11.addParameter(name='figpath', value='/home/roj-idl71/Data/RAWDATA/BIESTATIC/RAWDATA_8CH/graphs') | |
|
58 | opObj11.addParameter(name='save', value='1', format='int') | |
|
37 | opObj10 = procUnitConfObj0.addOperation(name='filterByHeights') | |
|
38 | opObj10.addParameter(name='window', value='2', format='float') | |
|
39 | ||
|
40 | opObj10 = procUnitConfObj0.addOperation(name='Decoder', optype='external') | |
|
41 | opObj10.addParameter(name='code', value='1,-1', format='intlist') | |
|
42 | opObj10.addParameter(name='nCode', value='2', format='float') | |
|
43 | opObj10.addParameter(name='nBaud', value='1', format='float') | |
|
44 | ||
|
59 | 45 | |
|
60 |
opObj1 |
|
|
61 |
opObj1 |
|
|
62 | opObj11.addParameter(name='wintitle', value='CrossSpectraPlot', format='str') | |
|
63 | opObj11.addParameter(name='zmin', value='35', format='int') | |
|
64 | opObj11.addParameter(name='zmax', value='90', format='int') | |
|
65 | opObj11.addParameter(name='figpath', value='/home/roj-idl71/Data/RAWDATA/BIESTATIC/RAWDATA_8CH/graphs') | |
|
66 | opObj11.addParameter(name='save', value='1', format='int') | |
|
67 | ||
|
46 | opObj10 = procUnitConfObj0.addOperation(name='CohInt', optype='external') | |
|
47 | opObj10.addParameter(name='n', value='1296', format='float') | |
|
68 | 48 | |
|
69 | opObj11 = procUnitConfObj1.addOperation(name='CoherenceMap', optype='other') | |
|
70 | opObj11.addParameter(name='idfigure', value='3', format='int') | |
|
71 | opObj11.addParameter(name='wintitle', value='CoherenciaMap', format='str') | |
|
72 | # opObj11.addParameter(name='timerange', value=str(60), format='int') | |
|
73 | opObj11.addParameter(name='figpath', value='/home/roj-idl71/Data/RAWDATA/BIESTATIC/RAWDATA_8CH/graphs') | |
|
49 | procUnitConfObj1 = controllerObj.addProcUnit(datatype='SpectraProc', | |
|
50 | inputId=procUnitConfObj0.getId()) | |
|
51 | ||
|
52 | #Creating a processing object with its parameters | |
|
53 | #schainpy.model.proc.jroproc_spectra.SpectraProc.run() | |
|
54 | #If you need to add more parameters can use the "addParameter method" | |
|
55 | procUnitConfObj1.addParameter(name='nFFTPoints', value='128', format='int') | |
|
56 | ||
|
57 | opObj10 = procUnitConfObj1.addOperation(name='IncohInt', optype='external') | |
|
58 | opObj10.addParameter(name='n', value='2', format='float') | |
|
59 | ||
|
60 | #Using internal methods | |
|
61 | #schainpy.model.proc.jroproc_spectra.SpectraProc.selectChannels() | |
|
62 | # opObj10 = procUnitConfObj1.addOperation(name='selectChannels') | |
|
63 | # opObj10.addParameter(name='channelList', value='0,1', format='intlist') | |
|
64 | ||
|
65 | #Using internal methods | |
|
66 | #schainpy.model.proc.jroproc_spectra.SpectraProc.selectHeights() | |
|
67 | # opObj10 = procUnitConfObj1.addOperation(name='selectHeights') | |
|
68 | # opObj10.addParameter(name='minHei', value='90', format='float') | |
|
69 | # opObj10.addParameter(name='maxHei', value='180', format='float') | |
|
70 | ||
|
71 | #Using external methods (new modules) | |
|
72 | # #schainpy.model.proc.jroproc_spectra.IncohInt.setup() | |
|
73 | # opObj12 = procUnitConfObj1.addOperation(name='IncohInt', optype='other') | |
|
74 | # opObj12.addParameter(name='n', value='1', format='int') | |
|
75 | ||
|
76 | #Using external methods (new modules) | |
|
77 | #schainpy.model.graphics.jroplot_spectra.SpectraPlot.setup() | |
|
78 | opObj11 = procUnitConfObj1.addOperation(name='SpectraPlot', optype='external') | |
|
79 | opObj11.addParameter(name='id', value='11', format='int') | |
|
80 | opObj11.addParameter(name='wintitle', value='SpectraPlot', format='str') | |
|
81 | opObj11.addParameter(name='zmin', value='-60', format='int') | |
|
82 | opObj11.addParameter(name='zmax', value='10', format='int') | |
|
74 | 83 | opObj11.addParameter(name='save', value='1', format='int') |
|
75 | 84 | |
|
85 | #Using external methods (new modules) | |
|
86 | #schainpy.model.graphics.jroplot_spectra.RTIPlot.setup() | |
|
76 | 87 | opObj11 = procUnitConfObj1.addOperation(name='RTIPlot', optype='other') |
|
77 |
opObj11.addParameter(name='id |
|
|
88 | opObj11.addParameter(name='id', value='30', format='int') | |
|
78 | 89 | opObj11.addParameter(name='wintitle', value='RTI', format='str') |
|
79 |
|
|
|
80 |
opObj11.addParameter(name='zm |
|
|
81 |
opObj11.addParameter(name=' |
|
|
82 | opObj11.addParameter(name='figpath', value='/home/roj-idl71/Data/RAWDATA/BIESTATIC/RAWDATA_8CH/graphs') | |
|
90 | opObj11.addParameter(name='zmin', value='-60', format='int') | |
|
91 | opObj11.addParameter(name='zmax', value='-10', format='int') | |
|
92 | opObj11.addParameter(name='showprofile', value='1', format='int') | |
|
93 | # opObj11.addParameter(name='timerange', value=str(5*60*60*60), format='int') | |
|
94 | opObj11.addParameter(name='xmin', value='14', format='float') | |
|
95 | opObj11.addParameter(name='xmax', value='23.9', format='float') | |
|
83 | 96 | opObj11.addParameter(name='save', value='1', format='int') |
|
84 | ||
|
85 | # procUnitConfObj2 = controllerObj.addProcUnit(datatype='Voltage', inputId=procUnitConfObj0.getId()) | |
|
86 | # | |
|
87 | # opObj12 = procUnitConfObj2.addOperation(name='CohInt', optype='other') | |
|
88 | # opObj12.addParameter(name='n', value='2', format='int') | |
|
89 | # opObj12.addParameter(name='overlapping', value='1', format='int') | |
|
90 | # | |
|
91 | # procUnitConfObj3 = controllerObj.addProcUnit(datatype='Spectra', inputId=procUnitConfObj2.getId()) | |
|
92 | # procUnitConfObj3.addParameter(name='nFFTPoints', value='32', format='int') | |
|
93 | # | |
|
94 | # opObj11 = procUnitConfObj3.addOperation(name='SpectraPlot', optype='other') | |
|
95 | # opObj11.addParameter(name='idfigure', value='2', format='int') | |
|
96 | # opObj11.addParameter(name='wintitle', value='SpectraPlot1', format='str') | |
|
97 | # opObj11.addParameter(name='zmin', value='40', format='int') | |
|
98 | # opObj11.addParameter(name='zmax', value='90', format='int') | |
|
99 | # opObj11.addParameter(name='showprofile', value='1', format='int') | |
|
100 | ||
|
101 | # opObj11 = procUnitConfObj1.addOperation(name='RTIPlot', optype='other') | |
|
102 | # opObj11.addParameter(name='idfigure', value='10', format='int') | |
|
103 | # opObj11.addParameter(name='wintitle', value='RTI', format='str') | |
|
104 | ## opObj11.addParameter(name='xmin', value='21', format='float') | |
|
105 | ## opObj11.addParameter(name='xmax', value='22', format='float') | |
|
106 | # opObj11.addParameter(name='zmin', value='40', format='int') | |
|
107 | # opObj11.addParameter(name='zmax', value='90', format='int') | |
|
108 | # opObj11.addParameter(name='showprofile', value='1', format='int') | |
|
109 | # opObj11.addParameter(name='timerange', value=str(60), format='int') | |
|
110 | ||
|
111 | # opObj10 = procUnitConfObj1.addOperation(name='selectChannels') | |
|
112 | # opObj10.addParameter(name='channelList', value='0,2,4,6', format='intlist') | |
|
113 | # | |
|
114 | # opObj12 = procUnitConfObj1.addOperation(name='IncohInt', optype='other') | |
|
115 | # opObj12.addParameter(name='n', value='2', format='int') | |
|
116 | # | |
|
117 | # opObj11 = procUnitConfObj1.addOperation(name='SpectraPlot', optype='other') | |
|
118 | # opObj11.addParameter(name='idfigure', value='2', format='int') | |
|
119 | # opObj11.addParameter(name='wintitle', value='SpectraPlot10', format='str') | |
|
120 | # opObj11.addParameter(name='zmin', value='70', format='int') | |
|
121 | # opObj11.addParameter(name='zmax', value='90', format='int') | |
|
122 | # | |
|
123 | # opObj10 = procUnitConfObj1.addOperation(name='selectChannels') | |
|
124 | # opObj10.addParameter(name='channelList', value='2,6', format='intlist') | |
|
125 | # | |
|
126 | # opObj12 = procUnitConfObj1.addOperation(name='IncohInt', optype='other') | |
|
127 | # opObj12.addParameter(name='n', value='2', format='int') | |
|
128 | # | |
|
129 | # opObj11 = procUnitConfObj1.addOperation(name='SpectraPlot', optype='other') | |
|
130 | # opObj11.addParameter(name='idfigure', value='3', format='int') | |
|
131 | # opObj11.addParameter(name='wintitle', value='SpectraPlot10', format='str') | |
|
132 | # opObj11.addParameter(name='zmin', value='70', format='int') | |
|
133 | # opObj11.addParameter(name='zmax', value='90', format='int') | |
|
134 | ||
|
135 | ||
|
136 | # opObj12 = procUnitConfObj1.addOperation(name='decoder') | |
|
137 | # opObj12.addParameter(name='ncode', value='2', format='int') | |
|
138 | # opObj12.addParameter(name='nbauds', value='8', format='int') | |
|
139 | # opObj12.addParameter(name='code0', value='001110011', format='int') | |
|
140 | # opObj12.addParameter(name='code1', value='001110011', format='int') | |
|
141 | ||
|
142 | ||
|
143 | ||
|
144 | # procUnitConfObj2 = controllerObj.addProcUnit(datatype='Spectra', inputId=procUnitConfObj1.getId()) | |
|
145 | # | |
|
146 | # opObj21 = procUnitConfObj2.addOperation(name='IncohInt', optype='other') | |
|
147 | # opObj21.addParameter(name='n', value='2', format='int') | |
|
148 | # | |
|
149 | # opObj11 = procUnitConfObj2.addOperation(name='SpectraPlot', optype='other') | |
|
150 | # opObj11.addParameter(name='idfigure', value='4', format='int') | |
|
151 | # opObj11.addParameter(name='wintitle', value='SpectraPlot OBJ 2', format='str') | |
|
152 | # opObj11.addParameter(name='zmin', value='70', format='int') | |
|
153 | # opObj11.addParameter(name='zmax', value='90', format='int') | |
|
154 | 97 | |
|
155 | 98 | print "Escribiendo el archivo XML" |
|
156 | 99 |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
General Comments 0
You need to be logged in to leave comments.
Login now