##// END OF EJS Templates
fixing merge conflicts
jespinoza -
r1090:36f30f86830d merge
parent child
Show More
This diff has been collapsed as it changes many lines, (751 lines changed) Show them Hide them
@@ -0,0 +1,751
1
2 '''
3 Created on Jul 3, 2014
4
5 @author: roj-idl71
6 '''
7 # SUBCHANNELS EN VEZ DE CHANNELS
8 # BENCHMARKS -> PROBLEMAS CON ARCHIVOS GRANDES -> INCONSTANTE EN EL TIEMPO
9 # ACTUALIZACION DE VERSION
10 # HEADERS
11 # MODULO DE ESCRITURA
12 # METADATA
13
14 import os
15 import datetime
16 import numpy
17 import timeit
18 from profilehooks import coverage, profile
19 from fractions import Fraction
20
21 try:
22 from gevent import sleep
23 except:
24 from time import sleep
25
26 from schainpy.model.data.jroheaderIO import RadarControllerHeader, SystemHeader
27 from schainpy.model.data.jrodata import Voltage
28 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
29 from time import time
30
31 import cPickle
32 try:
33 import digital_rf
34 except:
35 print 'You should install "digital_rf" module if you want to read Digital RF data'
36
37 class DigitalRFReader(ProcessingUnit):
38 '''
39 classdocs
40 '''
41
42 def __init__(self, **kwargs):
43 '''
44 Constructor
45 '''
46
47 ProcessingUnit.__init__(self, **kwargs)
48
49 self.dataOut = Voltage()
50 self.__printInfo = True
51 self.__flagDiscontinuousBlock = False
52 self.__bufferIndex = 9999999
53 self.__ippKm = None
54 self.__codeType = 0
55 self.__nCode = None
56 self.__nBaud = None
57 self.__code = None
58 self.dtype = None
59
60 def close(self):
61 print 'Average of writing to digital rf format is ', self.oldAverage * 1000
62 return
63
64 def __getCurrentSecond(self):
65
66 return self.__thisUnixSample/self.__sample_rate
67
68 thisSecond = property(__getCurrentSecond, "I'm the 'thisSecond' property.")
69
70 def __setFileHeader(self):
71 '''
72 In this method will be initialized every parameter of dataOut object (header, no data)
73 '''
74 ippSeconds = 1.0*self.__nSamples/self.__sample_rate
75
76 nProfiles = 1.0/ippSeconds # Number of profiles in one second
77
78 try:
79 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(self.__radarControllerHeader)
80 except:
81 self.dataOut.radarControllerHeaderObj = RadarControllerHeader(
82 txA=0,
83 txB=0,
84 nWindows=1,
85 nHeights=self.__nSamples,
86 firstHeight=self.__firstHeigth,
87 deltaHeight=self.__deltaHeigth,
88 codeType=self.__codeType,
89 nCode=self.__nCode, nBaud=self.__nBaud,
90 code = self.__code)
91
92 try:
93 self.dataOut.systemHeaderObj = SystemHeader(self.__systemHeader)
94 except:
95 self.dataOut.systemHeaderObj = SystemHeader(nSamples=self.__nSamples,
96 nProfiles=nProfiles,
97 nChannels=len(self.__channelList),
98 adcResolution=14)
99 self.dataOut.type = "Voltage"
100
101 self.dataOut.data = None
102
103 self.dataOut.dtype = self.dtype
104
105 # self.dataOut.nChannels = 0
106
107 # self.dataOut.nHeights = 0
108
109 self.dataOut.nProfiles = int(nProfiles)
110
111 self.dataOut.heightList = self.__firstHeigth + numpy.arange(self.__nSamples, dtype = numpy.float)*self.__deltaHeigth
112
113 self.dataOut.channelList = range(self.__num_subchannels)
114
115 self.dataOut.blocksize = self.dataOut.getNChannels() * self.dataOut.getNHeights()
116
117 # self.dataOut.channelIndexList = None
118
119 self.dataOut.flagNoData = True
120
121 self.dataOut.flagDataAsBlock = False
122 # Set to TRUE if the data is discontinuous
123 self.dataOut.flagDiscontinuousBlock = False
124
125 self.dataOut.utctime = None
126
127 self.dataOut.timeZone = self.__timezone/60 # timezone like jroheader, difference in minutes between UTC and localtime
128
129 self.dataOut.dstFlag = 0
130
131 self.dataOut.errorCount = 0
132
133 try:
134 self.dataOut.nCohInt = self.fixed_metadata_dict.get('nCohInt', 1)
135
136 self.dataOut.flagDecodeData = self.fixed_metadata_dict['flagDecodeData'] # asumo que la data esta decodificada
137
138 self.dataOut.flagDeflipData = self.fixed_metadata_dict['flagDeflipData'] # asumo que la data esta sin flip
139
140 self.dataOut.flagShiftFFT = self.fixed_metadata_dict['flagShiftFFT']
141
142 self.dataOut.useLocalTime = self.fixed_metadata_dict['useLocalTime']
143 except:
144 pass
145
146
147 self.dataOut.ippSeconds = ippSeconds
148
149 # Time interval between profiles
150 # self.dataOut.timeInterval = self.dataOut.ippSeconds * self.dataOut.nCohInt
151
152 self.dataOut.frequency = self.__frequency
153
154 self.dataOut.realtime = self.__online
155
156 def findDatafiles(self, path, startDate=None, endDate=None):
157
158 if not os.path.isdir(path):
159 return []
160
161 try:
162 digitalReadObj = digital_rf.DigitalRFReader(path, load_all_metadata=True)
163 except:
164 digitalReadObj = digital_rf.DigitalRFReader(path)
165
166 channelNameList = digitalReadObj.get_channels()
167
168 if not channelNameList:
169 return []
170
171 metadata_dict = digitalReadObj.get_rf_file_metadata(channelNameList[0])
172
173 sample_rate = metadata_dict['sample_rate'][0]
174
175 this_metadata_file = digitalReadObj.get_metadata(channelNameList[0])
176
177 try:
178 timezone = this_metadata_file['timezone'].value
179 except:
180 timezone = 0
181
182 startUTCSecond, endUTCSecond = digitalReadObj.get_bounds(channelNameList[0])/sample_rate - timezone
183
184 startDatetime = datetime.datetime.utcfromtimestamp(startUTCSecond)
185 endDatatime = datetime.datetime.utcfromtimestamp(endUTCSecond)
186
187 if not startDate:
188 startDate = startDatetime.date()
189
190 if not endDate:
191 endDate = endDatatime.date()
192
193 dateList = []
194
195 thisDatetime = startDatetime
196
197 while(thisDatetime<=endDatatime):
198
199 thisDate = thisDatetime.date()
200
201 if thisDate < startDate:
202 continue
203
204 if thisDate > endDate:
205 break
206
207 dateList.append(thisDate)
208 thisDatetime += datetime.timedelta(1)
209
210 return dateList
211
212 def setup(self, path = None,
213 startDate = None,
214 endDate = None,
215 startTime = datetime.time(0,0,0),
216 endTime = datetime.time(23,59,59),
217 channelList = None,
218 nSamples = None,
219 online = False,
220 delay = 60,
221 buffer_size = 1024,
222 ippKm=None,
223 **kwargs):
224 '''
225 In this method we should set all initial parameters.
226
227 Inputs:
228 path
229 startDate
230 endDate
231 startTime
232 endTime
233 set
234 expLabel
235 ext
236 online
237 delay
238 '''
239 self.i = 0
240 if not os.path.isdir(path):
241 raise ValueError, "[Reading] Directory %s does not exist" %path
242
243 try:
244 self.digitalReadObj = digital_rf.DigitalRFReader(path, load_all_metadata=True)
245 except:
246 self.digitalReadObj = digital_rf.DigitalRFReader(path)
247
248 channelNameList = self.digitalReadObj.get_channels()
249
250 if not channelNameList:
251 raise ValueError, "[Reading] Directory %s does not have any files" %path
252
253 if not channelList:
254 channelList = range(len(channelNameList))
255
256
257 ########## Reading metadata ######################
258
259 top_properties = self.digitalReadObj.get_properties(channelNameList[channelList[0]])
260
261
262 self.__num_subchannels = top_properties['num_subchannels']
263 self.__sample_rate = 1.0 * top_properties['sample_rate_numerator'] / top_properties['sample_rate_denominator']
264 # self.__samples_per_file = top_properties['samples_per_file'][0]
265 self.__deltaHeigth = 1e6*0.15/self.__sample_rate ## why 0.15?
266
267 this_metadata_file = self.digitalReadObj.get_digital_metadata(channelNameList[channelList[0]])
268 metadata_bounds = this_metadata_file.get_bounds()
269 self.fixed_metadata_dict = this_metadata_file.read(metadata_bounds[0])[metadata_bounds[0]] ## GET FIRST HEADER
270
271 try:
272 self.__processingHeader = self.fixed_metadata_dict['processingHeader']
273 self.__radarControllerHeader = self.fixed_metadata_dict['radarControllerHeader']
274 self.__systemHeader = self.fixed_metadata_dict['systemHeader']
275 self.dtype = cPickle.loads(self.fixed_metadata_dict['dtype'])
276 except:
277 pass
278
279
280 self.__frequency = None
281
282 self.__frequency = self.fixed_metadata_dict.get('frequency', 1)
283
284 self.__timezone = self.fixed_metadata_dict.get('timezone', 300)
285
286
287 try:
288 nSamples = self.fixed_metadata_dict['nSamples']
289 except:
290 nSamples = None
291
292 self.__firstHeigth = 0
293
294 try:
295 codeType = self.__radarControllerHeader['codeType']
296 except:
297 codeType = 0
298
299 nCode = 1
300 nBaud = 1
301 code = numpy.ones((nCode, nBaud), dtype=numpy.int)
302
303 try:
304 if codeType:
305 nCode = self.__radarControllerHeader['nCode']
306 nBaud = self.__radarControllerHeader['nBaud']
307 code = self.__radarControllerHeader['code']
308 except:
309 pass
310
311
312 if not ippKm:
313 try:
314 # seconds to km
315 ippKm = self.__radarControllerHeader['ipp']
316 except:
317 ippKm = None
318 ####################################################
319 self.__ippKm = ippKm
320 startUTCSecond = None
321 endUTCSecond = None
322
323 if startDate:
324 startDatetime = datetime.datetime.combine(startDate, startTime)
325 startUTCSecond = (startDatetime-datetime.datetime(1970,1,1)).total_seconds() + self.__timezone
326
327 if endDate:
328 endDatetime = datetime.datetime.combine(endDate, endTime)
329 endUTCSecond = (endDatetime-datetime.datetime(1970,1,1)).total_seconds() + self.__timezone
330
331 start_index, end_index = self.digitalReadObj.get_bounds(channelNameList[channelList[0]])
332
333 if not startUTCSecond:
334 startUTCSecond = start_index/self.__sample_rate
335
336 if start_index > startUTCSecond*self.__sample_rate:
337 startUTCSecond = start_index/self.__sample_rate
338
339 if not endUTCSecond:
340 endUTCSecond = end_index/self.__sample_rate
341
342 if end_index < endUTCSecond*self.__sample_rate:
343 endUTCSecond = end_index/self.__sample_rate
344 if not nSamples:
345 if not ippKm:
346 raise ValueError, "[Reading] nSamples or ippKm should be defined"
347 nSamples = int(ippKm / (1e6*0.15/self.__sample_rate))
348 channelBoundList = []
349 channelNameListFiltered = []
350
351 for thisIndexChannel in channelList:
352 thisChannelName = channelNameList[thisIndexChannel]
353 start_index, end_index = self.digitalReadObj.get_bounds(thisChannelName)
354 channelBoundList.append((start_index, end_index))
355 channelNameListFiltered.append(thisChannelName)
356
357 self.profileIndex = 0
358 self.i= 0
359 self.__delay = delay
360
361 self.__codeType = codeType
362 self.__nCode = nCode
363 self.__nBaud = nBaud
364 self.__code = code
365
366 self.__datapath = path
367 self.__online = online
368 self.__channelList = channelList
369 self.__channelNameList = channelNameListFiltered
370 self.__channelBoundList = channelBoundList
371 self.__nSamples = nSamples
372 self.__samples_to_read = long(nSamples) # FIJO: AHORA 40
373 self.__nChannels = len(self.__channelList)
374
375 self.__startUTCSecond = startUTCSecond
376 self.__endUTCSecond = endUTCSecond
377
378 self.__timeInterval = 1.0 * self.__samples_to_read/self.__sample_rate # Time interval
379
380 if online:
381 # self.__thisUnixSample = int(endUTCSecond*self.__sample_rate - 4*self.__samples_to_read)
382 startUTCSecond = numpy.floor(endUTCSecond)
383
384 self.__thisUnixSample = long(startUTCSecond*self.__sample_rate) - self.__samples_to_read ## por que en el otro metodo lo primero q se hace es sumar samplestoread
385
386 self.__data_buffer = numpy.zeros((self.__num_subchannels, self.__samples_to_read), dtype = numpy.complex)
387
388 self.__setFileHeader()
389 self.isConfig = True
390
391 print "[Reading] Digital RF Data was found from %s to %s " %(
392 datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
393 datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
394 )
395
396 print "[Reading] Starting process from %s to %s" %(datetime.datetime.utcfromtimestamp(startUTCSecond - self.__timezone),
397 datetime.datetime.utcfromtimestamp(endUTCSecond - self.__timezone)
398 )
399 self.oldAverage = None
400 self.count = 0
401 self.executionTime = 0
402 def __reload(self):
403 # print
404 # print "%s not in range [%s, %s]" %(
405 # datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
406 # datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
407 # datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
408 # )
409 print "[Reading] reloading metadata ..."
410
411 try:
412 self.digitalReadObj.reload(complete_update=True)
413 except:
414 self.digitalReadObj.reload()
415
416 start_index, end_index = self.digitalReadObj.get_bounds(self.__channelNameList[self.__channelList[0]])
417
418 if start_index > self.__startUTCSecond*self.__sample_rate:
419 self.__startUTCSecond = 1.0*start_index/self.__sample_rate
420
421 if end_index > self.__endUTCSecond*self.__sample_rate:
422 self.__endUTCSecond = 1.0*end_index/self.__sample_rate
423 print
424 print "[Reading] New timerange found [%s, %s] " %(
425 datetime.datetime.utcfromtimestamp(self.__startUTCSecond - self.__timezone),
426 datetime.datetime.utcfromtimestamp(self.__endUTCSecond - self.__timezone)
427 )
428
429 return True
430
431 return False
432
433 def timeit(self, toExecute):
434 t0 = time()
435 toExecute()
436 self.executionTime = time() - t0
437 if self.oldAverage is None: self.oldAverage = self.executionTime
438 self.oldAverage = (self.executionTime + self.count*self.oldAverage) / (self.count + 1.0)
439 self.count = self.count + 1.0
440 return
441
442 def __readNextBlock(self, seconds=30, volt_scale = 1):
443 '''
444 '''
445
446 # Set the next data
447 self.__flagDiscontinuousBlock = False
448 self.__thisUnixSample += self.__samples_to_read
449
450 if self.__thisUnixSample + 2*self.__samples_to_read > self.__endUTCSecond*self.__sample_rate:
451 print "[Reading] There are no more data into selected time-range"
452 if self.__online:
453 self.__reload()
454 else:
455 return False
456
457 if self.__thisUnixSample + 2*self.__samples_to_read > self.__endUTCSecond*self.__sample_rate:
458 return False
459 self.__thisUnixSample -= self.__samples_to_read
460
461 indexChannel = 0
462
463 dataOk = False
464 for thisChannelName in self.__channelNameList: ##TODO VARIOS CHANNELS?
465 for indexSubchannel in range(self.__num_subchannels):
466 try:
467 t0 = time()
468 result = self.digitalReadObj.read_vector_c81d(self.__thisUnixSample,
469 self.__samples_to_read,
470 thisChannelName, sub_channel=indexSubchannel)
471 self.executionTime = time() - t0
472 if self.oldAverage is None: self.oldAverage = self.executionTime
473 self.oldAverage = (self.executionTime + self.count*self.oldAverage) / (self.count + 1.0)
474 self.count = self.count + 1.0
475
476 except IOError, e:
477 #read next profile
478 self.__flagDiscontinuousBlock = True
479 print "[Reading] %s" %datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone), e
480 break
481
482 if result.shape[0] != self.__samples_to_read:
483 self.__flagDiscontinuousBlock = True
484 print "[Reading] %s: Too few samples were found, just %d/%d samples" %(datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
485 result.shape[0],
486 self.__samples_to_read)
487 break
488
489 self.__data_buffer[indexSubchannel,:] = result*volt_scale
490
491 indexChannel += 1
492
493 dataOk = True
494
495 self.__utctime = self.__thisUnixSample/self.__sample_rate
496
497 if not dataOk:
498 return False
499
500 print "[Reading] %s: %d samples <> %f sec" %(datetime.datetime.utcfromtimestamp(self.thisSecond - self.__timezone),
501 self.__samples_to_read,
502 self.__timeInterval)
503
504 self.__bufferIndex = 0
505
506 return True
507
508 def __isBufferEmpty(self):
509 return self.__bufferIndex > self.__samples_to_read - self.__nSamples #40960 - 40
510
511 def getData(self, seconds=30, nTries=5):
512
513 '''
514 This method gets the data from files and put the data into the dataOut object
515
516 In addition, increase el the buffer counter in one.
517
518 Return:
519 data : retorna un perfil de voltages (alturas * canales) copiados desde el
520 buffer. Si no hay mas archivos a leer retorna None.
521
522 Affected:
523 self.dataOut
524 self.profileIndex
525 self.flagDiscontinuousBlock
526 self.flagIsNewBlock
527 '''
528
529 err_counter = 0
530 self.dataOut.flagNoData = True
531
532 if self.__isBufferEmpty():
533 self.__flagDiscontinuousBlock = False
534
535 while True:
536 if self.__readNextBlock():
537 break
538 if self.__thisUnixSample > self.__endUTCSecond*self.__sample_rate:
539 return False
540
541 if self.__flagDiscontinuousBlock:
542 print '[Reading] discontinuous block found ... continue with the next block'
543 continue
544
545 if not self.__online:
546 return False
547
548 err_counter += 1
549 if err_counter > nTries:
550 return False
551
552 print '[Reading] waiting %d seconds to read a new block' %seconds
553 sleep(seconds)
554
555 self.dataOut.data = self.__data_buffer[:,self.__bufferIndex:self.__bufferIndex+self.__nSamples]
556 self.dataOut.utctime = (self.__thisUnixSample + self.__bufferIndex)/self.__sample_rate
557 self.dataOut.flagNoData = False
558 self.dataOut.flagDiscontinuousBlock = self.__flagDiscontinuousBlock
559 self.dataOut.profileIndex = self.profileIndex
560
561 self.__bufferIndex += self.__nSamples
562 self.profileIndex += 1
563
564 if self.profileIndex == self.dataOut.nProfiles:
565 self.profileIndex = 0
566
567 return True
568
569 def printInfo(self):
570 '''
571 '''
572 if self.__printInfo == False:
573 return
574
575 # self.systemHeaderObj.printInfo()
576 # self.radarControllerHeaderObj.printInfo()
577
578 self.__printInfo = False
579
580 def printNumberOfBlock(self):
581 '''
582 '''
583 return
584 # print self.profileIndex
585
586
587 def run(self, **kwargs):
588 '''
589 This method will be called many times so here you should put all your code
590 '''
591
592 if not self.isConfig:
593 self.setup(**kwargs)
594 #self.i = self.i+1
595 self.getData(seconds=self.__delay)
596
597 return
598
599 class DigitalRFWriter(Operation):
600 '''
601 classdocs
602 '''
603
604 def __init__(self, **kwargs):
605 '''
606 Constructor
607 '''
608 Operation.__init__(self, **kwargs)
609 self.metadata_dict = {}
610 self.dataOut = None
611 self.dtype = None
612
613 def setHeader(self):
614
615 self.metadata_dict['frequency'] = self.dataOut.frequency
616 self.metadata_dict['timezone'] = self.dataOut.timeZone
617 self.metadata_dict['dtype'] = cPickle.dumps(self.dataOut.dtype)
618 self.metadata_dict['nProfiles'] = self.dataOut.nProfiles
619 self.metadata_dict['heightList'] = self.dataOut.heightList
620 self.metadata_dict['channelList'] = self.dataOut.channelList
621 self.metadata_dict['flagDecodeData'] = self.dataOut.flagDecodeData
622 self.metadata_dict['flagDeflipData'] = self.dataOut.flagDeflipData
623 self.metadata_dict['flagShiftFFT'] = self.dataOut.flagShiftFFT
624 self.metadata_dict['flagDataAsBlock'] = self.dataOut.flagDataAsBlock
625 self.metadata_dict['useLocalTime'] = self.dataOut.useLocalTime
626 self.metadata_dict['nCohInt'] = self.dataOut.nCohInt
627
628 return
629
630 def setup(self, dataOut, path, frequency, fileCadence, dirCadence, metadataCadence, set=0, metadataFile='metadata', ext='.h5'):
631 '''
632 In this method we should set all initial parameters.
633 Input:
634 dataOut: Input data will also be outputa data
635 '''
636 self.setHeader()
637 self.__ippSeconds = dataOut.ippSeconds
638 self.__deltaH = dataOut.getDeltaH()
639 self.__sample_rate = 1e6*0.15/self.__deltaH
640 self.__dtype = dataOut.dtype
641 if len(dataOut.dtype) == 2:
642 self.__dtype = dataOut.dtype[0]
643 self.__nSamples = dataOut.systemHeaderObj.nSamples
644 self.__nProfiles = dataOut.nProfiles
645 self.__blocks_per_file = dataOut.processingHeaderObj.dataBlocksPerFile
646
647 self.arr_data = arr_data = numpy.ones((self.__nSamples, len(self.dataOut.channelList)), dtype=[('r', self.__dtype), ('i', self.__dtype)])
648
649 file_cadence_millisecs = long(1.0 * self.__blocks_per_file * self.__nProfiles * self.__nSamples / self.__sample_rate) * 1000
650 sub_cadence_secs = file_cadence_millisecs / 500
651
652 sample_rate_fraction = Fraction(self.__sample_rate).limit_denominator()
653 sample_rate_numerator = long(sample_rate_fraction.numerator)
654 sample_rate_denominator = long(sample_rate_fraction.denominator)
655 start_global_index = dataOut.utctime * self.__sample_rate
656
657 uuid = 'prueba'
658 compression_level = 1
659 checksum = False
660 is_complex = True
661 num_subchannels = len(dataOut.channelList)
662 is_continuous = True
663 marching_periods = False
664
665 self.digitalWriteObj = digital_rf.DigitalRFWriter(path, self.__dtype, dirCadence,
666 fileCadence, start_global_index,
667 sample_rate_numerator, sample_rate_denominator, uuid, compression_level, checksum,
668 is_complex, num_subchannels, is_continuous, marching_periods)
669
670 metadata_dir = os.path.join(path, 'metadata')
671 os.system('mkdir %s' % (metadata_dir))
672
673 self.digitalMetadataWriteObj = digital_rf.DigitalMetadataWriter(metadata_dir, dirCadence, 1, ##236, file_cadence_millisecs / 1000
674 sample_rate_numerator, sample_rate_denominator,
675 metadataFile)
676
677
678 self.isConfig = True
679 self.currentSample = 0
680 self.oldAverage = 0
681 self.count = 0
682 return
683
684 def writeMetadata(self):
685 print '[Writing] - Writing metadata'
686 start_idx = self.__sample_rate * self.dataOut.utctime
687
688 self.metadata_dict['processingHeader'] = self.dataOut.processingHeaderObj.getAsDict()
689 self.metadata_dict['radarControllerHeader'] = self.dataOut.radarControllerHeaderObj.getAsDict()
690 self.metadata_dict['systemHeader'] = self.dataOut.systemHeaderObj.getAsDict()
691 self.digitalMetadataWriteObj.write(start_idx, self.metadata_dict)
692 return
693
694
695 def timeit(self, toExecute):
696 t0 = time()
697 toExecute()
698 self.executionTime = time() - t0
699 if self.oldAverage is None: self.oldAverage = self.executionTime
700 self.oldAverage = (self.executionTime + self.count*self.oldAverage) / (self.count + 1.0)
701 self.count = self.count + 1.0
702 return
703
704
705 def writeData(self):
706 for i in range(self.dataOut.systemHeaderObj.nSamples):
707 for channel in self.dataOut.channelList:
708 self.arr_data[i][channel]['r'] = self.dataOut.data[channel][i].real
709 self.arr_data[i][channel]['i'] = self.dataOut.data[channel][i].imag
710
711 def f(): return self.digitalWriteObj.rf_write(self.arr_data)
712 self.timeit(f)
713
714 return
715
716 def run(self, dataOut, frequency=49.92e6, path=None, fileCadence=100, dirCadence=25, metadataCadence=1, **kwargs):
717 '''
718 This method will be called many times so here you should put all your code
719 Inputs:
720 dataOut: object with the data
721 '''
722 # print dataOut.__dict__
723 self.dataOut = dataOut
724 if not self.isConfig:
725 self.setup(dataOut, path, frequency, fileCadence, dirCadence, metadataCadence, **kwargs)
726 self.writeMetadata()
727
728 self.writeData()
729
730 ## self.currentSample += 1
731 ## if self.dataOut.flagDataAsBlock or self.currentSample == 1:
732 ## self.writeMetadata()
733 ## if self.currentSample == self.__nProfiles: self.currentSample = 0
734
735 def close(self):
736 print '[Writing] - Closing files '
737 print 'Average of writing to digital rf format is ', self.oldAverage * 1000
738 try:
739 self.digitalWriteObj.close()
740 except:
741 pass
742
743 # raise
744 if __name__ == '__main__':
745
746 readObj = DigitalRFReader()
747
748 while True:
749 readObj.run(path='/home/jchavez/jicamarca/mocked_data/')
750 # readObj.printInfo()
751 # readObj.printNumberOfBlock()
@@ -0,0 +1,15
1 Lectura
2 200samples -> 0.47m HDD 1000''
3 200samples -> 6 HDD 100ms file 100s folder
4 200samples -> 0.48ms HDD 100ms file 1000s folder
5 200samples -> 116ms HDD 5000ms file 100s folder
6 200samples -> 182 HDD 10000ms file 100s folder
7 200samples -> 143 HDD 10000ms file 100s folder SSD
8
9 Escritura
10 200samples -> 0.78m HDD 100ms file 1000s folder
11 200samples -> 0.066m HDD 100ms file 1000s folder
12 200samples -> 0.30 HDD 100ms file 100s folder
13 200samples -> 0.23 HDD 5000ms file 100s folder
14 200samples -> 0.176 HDD 10000ms file 100s folder
15
@@ -0,0 +1,117
1 #!/usr/bin/env python
2 '''
3 Created on Jul 7, 2014
4
5 @author: roj-idl71
6 '''
7 import os, sys
8
9 from schainpy.controller import Project
10
11 def main():
12
13 desc = "Testing USRP data reader"
14 filename = "schain.xml"
15 figpath = "./"
16 remotefolder = "/home/wmaster/graficos"
17
18 #this controller object save all user configuration and then execute each module
19 #with their parameters.
20 controllerObj = Project()
21
22 controllerObj.setup(id = '191', name='test01', description=desc)
23
24 #Creating a reader object with its parameters
25 #schainpy.model.io.jroIO_usrp.USRPReader.setup()
26 readUnitConfObj = controllerObj.addReadUnit(datatype='DigitalRF',
27 path='/home/nanosat/data/',
28 startDate='2000/07/03',
29 endDate='2017/07/03',
30 startTime='00:00:00',
31 endTime='23:59:59',
32 online=0)
33
34 # procUnitConfObj0 = controllerObj.addProcUnit(datatype='Voltage',
35 # inputId=readUnitConfObj.getId())
36
37 # opObj10 = procUnitConfObj0.addOperation(name='selectHeights')
38 # opObj10.addParameter(name='minHei', value='0', format='float')
39 # opObj10.addParameter(name='maxHei', value='8', format='float')
40
41 # opObj10 = procUnitConfObj0.addOperation(name='setH0')
42 # opObj10.addParameter(name='h0', value='5.4', format='float')
43
44 # opObj10 = procUnitConfObj0.addOperation(name='Decoder', optype='external')
45 # opObj10.addParameter(name='code', value='1,-1', format='intlist')
46 # opObj10.addParameter(name='nCode', value='2', format='float')
47 # opObj10.addParameter(name='nBaud', value='1', format='float')
48
49 # opObj10 = procUnitConfObj0.addOperation(name='CohInt', optype='external')
50 # opObj10.addParameter(name='n', value='128', format='float')
51
52 # opObj11 = procUnitConfObj0.addOperation(name='Scope', optype='external')
53 # opObj11.addParameter(name='id', value='121', format='int')
54 # opObj11.addParameter(name='wintitle', value='Scope', format='str')
55
56 # procUnitConfObj1 = controllerObj.addProcUnit(datatype='Spectra',
57 # inputId=procUnitConfObj0.getId())
58
59 # #Creating a processing object with its parameters
60 # #schainpy.model.proc.jroproc_spectra.SpectraProc.run()
61 # #If you need to add more parameters can use the "addParameter method"
62 # procUnitConfObj1.addParameter(name='nFFTPoints', value='8', format='int')
63 # procUnitConfObj1.addParameter(name='pairsList', value='(0,1)', format='pairslist')
64
65 # opObj10 = procUnitConfObj1.addOperation(name='IncohInt', optype='external')
66 # opObj10.addParameter(name='n', value='2', format='float')
67 #
68 #Using internal methods
69 #schainpy.model.proc.jroproc_spectra.SpectraProc.selectChannels()
70 # opObj10 = procUnitConfObj1.addOperation(name='selectChannels')
71 # opObj10.addParameter(name='channelList', value='0,1', format='intlist')
72
73 #Using internal methods
74 #schainpy.model.proc.jroproc_spectra.SpectraProc.selectHeights()
75 # opObj10 = procUnitConfObj1.addOperation(name='selectHeights')
76 # opObj10.addParameter(name='minHei', value='90', format='float')
77 # opObj10.addParameter(name='maxHei', value='180', format='float')
78
79 #Using external methods (new modules)
80 # #schainpy.model.proc.jroproc_spectra.IncohInt.setup()
81 # opObj12 = procUnitConfObj1.addOperation(name='IncohInt', optype='other')
82 # opObj12.addParameter(name='n', value='1', format='int')
83
84 #Using external methods (new modules)
85 #schainpy.model.graphics.jroplot_spectra.SpectraPlot.setup()
86 # opObj11 = procUnitConfObj1.addOperation(name='SpectraPlot', optype='external')
87 # opObj11.addParameter(name='id', value='11', format='int')
88 # opObj11.addParameter(name='wintitle', value='SpectraPlot', format='str')
89 # opObj11.addParameter(name='zmin', value='0', format='int')
90 # opObj11.addParameter(name='zmax', value='90', format='int')
91 # opObj11.addParameter(name='save', value='1', format='int')
92 # opObj11.addParameter(name='xmin', value='-20', format='float')
93 # opObj11.addParameter(name='xmax', value='20', format='float')
94
95 #Using external methods (new modules)
96 #schainpy.model.graphics.jroplot_spectra.RTIPlot.setup()
97 # opObj11 = procUnitConfObj1.addOperation(name='RTIPlot', optype='other')
98 # opObj11.addParameter(name='id', value='30', format='int')
99 # opObj11.addParameter(name='wintitle', value='RTI', format='str')
100 # # opObj11.addParameter(name='zmin', value='0', format='int')
101 # # opObj11.addParameter(name='zmax', value='90', format='int')
102 # opObj11.addParameter(name='showprofile', value='1', format='int')
103 # opObj11.addParameter(name='timerange', value=str(2*60*60), format='int')
104 # opObj11.addParameter(name='xmin', value='19.5', format='float')
105 # opObj11.addParameter(name='xmax', value='20', format='float')
106
107 # opObj11 = procUnitConfObj1.addOperation(name='CrossSpectraPlot', optype='other')
108 # opObj11.addParameter(name='id', value='3', format='int')
109 # opObj11.addParameter(name='wintitle', value='CrossSpectraPlot', format='str')
110 # opObj11.addParameter(name='zmin', value='30', format='int')
111 # opObj11.addParameter(name='zmax', value='120', format='int')
112 # opObj11.addParameter(name='pairsList', value='(0,1)', format='pairslist')
113
114 controllerObj.start()
115
116 if __name__ == '__main__':
117 main()
@@ -0,0 +1,98
1 import os, sys
2
3 from schainpy.controller import Project
4
5 if __name__ == '__main__':
6
7 desc = "Segundo Test"
8 filename = "schain.xml"
9
10 controllerObj = Project()
11
12 controllerObj.setup(id = '191', name='test01', description=desc)
13
14 readUnitConfObj = controllerObj.addReadUnit(datatype='VoltageReader',
15 path='/home/nanosat/data/John',
16 startDate='2010/10/28',
17 endDate='2017/10/28',
18 startTime='00:00:00',
19 endTime='23:59:59',
20 online=0,
21 walk=0)
22
23 opObj00 = readUnitConfObj.addOperation(name='printNumberOfBlock')
24
25 procUnitConfObj0 = controllerObj.addProcUnit(datatype='VoltageProc',
26 inputId=readUnitConfObj.getId())
27
28 # opObj11 = procUnitConfObj0.addOperation(name='Scope', optype='external')
29 # opObj11.addParameter(name='id', value='121', format='int')
30 # opObj11.addParameter(name='wintitle', value='Scope', format='str')
31
32 opObj10 = procUnitConfObj0.addOperation(name='DigitalRFWriter', optype='other')
33 opObj10.addParameter(name='path', value='/home/nanosat/data/digitalrf', format='str')
34 # opObj10.addParameter(name='minHei', value='0', format='float')
35 # opObj10.addParameter(name='maxHei', value='8', format='float')
36
37 # opObj10 = procUnitConfObj0.addOperation(name='filterByHeights')
38 # opObj10.addParameter(name='window', value='2', format='float')
39
40 # opObj10 = procUnitConfObj0.addOperation(name='Decoder', optype='external')
41 # opObj10.addParameter(name='code', value='1,-1', format='intlist')
42 # opObj10.addParameter(name='nCode', value='2', format='float')
43 # opObj10.addParameter(name='nBaud', value='1', format='float')
44
45
46 # opObj10 = procUnitConfObj0.addOperation(name='CohInt', optype='external')
47 # opObj10.addParameter(name='n', value='1296', format='float')
48
49 # procUnitConfObj1 = controllerObj.addProcUnit(datatype='SpectraProc',
50 # inputId=procUnitConfObj0.getId())
51
52 #Creating a processing object with its parameters
53 #schainpy.model.proc.jroproc_spectra.SpectraProc.run()
54 #If you need to add more parameters can use the "addParameter method"
55 # procUnitConfObj1.addParameter(name='nFFTPoints', value='128', format='int')
56
57 # opObj10 = procUnitConfObj1.addOperation(name='IncohInt', optype='external')
58 # opObj10.addParameter(name='n', value='2', format='float')
59
60 #Using internal methods
61 #schainpy.model.proc.jroproc_spectra.SpectraProc.selectChannels()
62 # opObj10 = procUnitConfObj1.addOperation(name='selectChannels')
63 # opObj10.addParameter(name='channelList', value='0,1', format='intlist')
64
65 #Using internal methods
66 #schainpy.model.proc.jroproc_spectra.SpectraProc.selectHeights()
67 # opObj10 = procUnitConfObj1.addOperation(name='selectHeights')
68 # opObj10.addParameter(name='minHei', value='90', format='float')
69 # opObj10.addParameter(name='maxHei', value='180', format='float')
70
71 #Using external methods (new modules)
72 # #schainpy.model.proc.jroproc_spectra.IncohInt.setup()
73 # opObj12 = procUnitConfObj1.addOperation(name='IncohInt', optype='other')
74 # opObj12.addParameter(name='n', value='1', format='int')
75
76 #Using external methods (new modules)
77 #schainpy.model.graphics.jroplot_spectra.SpectraPlot.setup()
78 # opObj11 = procUnitConfObj1.addOperation(name='SpectraPlot', optype='external')
79 # opObj11.addParameter(name='id', value='11', format='int')
80 # opObj11.addParameter(name='wintitle', value='SpectraPlot', format='str')
81 # opObj11.addParameter(name='zmin', value='-60', format='int')
82 # opObj11.addParameter(name='zmax', value='10', format='int')
83 # opObj11.addParameter(name='save', value='1', format='int')
84
85 # #Using external methods (new modules)
86 # #schainpy.model.graphics.jroplot_spectra.RTIPlot.setup()
87 # opObj11 = procUnitConfObj1.addOperation(name='RTIPlot', optype='other')
88 # opObj11.addParameter(name='id', value='30', format='int')
89 # opObj11.addParameter(name='wintitle', value='RTI', format='str')
90 # opObj11.addParameter(name='zmin', value='-60', format='int')
91 # opObj11.addParameter(name='zmax', value='-10', format='int')
92 # opObj11.addParameter(name='showprofile', value='1', format='int')
93 # # opObj11.addParameter(name='timerange', value=str(5*60*60*60), format='int')
94 # opObj11.addParameter(name='xmin', value='14', format='float')
95 # opObj11.addParameter(name='xmax', value='23.9', format='float')
96 # opObj11.addParameter(name='save', value='1', format='int')
97
98 controllerObj.start()
@@ -0,0 +1,1
1 You should install "digital_rf_hdf5" module if you want to read USRP data
@@ -100,16 +100,16 ENV/
100 100 # eclipse
101 101 .project
102 102 .pydevproject
103
104 103 # vscode
105 104
106 105 .vscode
107 106
108 schainpy/scripts/
109 107 schaingui/node_modules/
108 schainpy/scripts/
110 109 .svn/
111 110 *.png
112 111 *.pyc
113 *.xml
114 *.log
112 schainpy/scripts
113 .vscode
115 114 trash
115 *.log
@@ -1,4 +1,4
1 # schaing
1 # schain
2 2
3 3 Command Line Interface for SIGNAL CHAIN - jro
4 4
@@ -31,18 +31,17 PREFIX = 'experiment'
31 31
32 32 @click.command()
33 33 @click.option('--version', '-v', is_flag=True, callback=print_version, help='SChain version', type=str)
34 @click.option('--xml', '-x', default=None, help='run an XML file', type=click.Path(exists=True, resolve_path=True))
35 34 @click.argument('command', default='run', required=True)
36 35 @click.argument('nextcommand', default=None, required=False, type=str)
37 def main(command, nextcommand, version, xml):
36 def main(command, nextcommand, version):
38 37 """COMMAND LINE INTERFACE FOR SIGNAL CHAIN - JICAMARCA RADIO OBSERVATORY \n
39 38 Available commands.\n
40 39 --xml: runs a schain XML generated file\n
41 40 run: runs any python script starting 'experiment_'\n
42 41 generate: generates a template schain script\n
43 42 search: return avilable operations, procs or arguments of the give operation/proc\n"""
44 if xml is not None:
45 runFromXML(xml)
43 if command == 'xml':
44 runFromXML(nextcommand)
46 45 elif command == 'generate':
47 46 generate()
48 47 elif command == 'test':
@@ -54,6 +53,7 def main(command, nextcommand, version, xml):
54 53 else:
55 54 log.error('Command {} is not defined'.format(command))
56 55
56
57 57 def check_module(possible, instance):
58 58 def check(x):
59 59 try:
@@ -77,19 +77,23 def search(nextcommand):
77 77 log.error('There is no Operation/ProcessingUnit to search')
78 78 elif nextcommand == 'procs':
79 79 procs = paramsFinder.getProcs()
80 log.success('Current ProcessingUnits are:\n\033[1m{}\033[0m'.format('\n'.join(procs)))
80 log.success(
81 'Current ProcessingUnits are:\n\033[1m{}\033[0m'.format('\n'.join(procs)))
81 82
82 83 elif nextcommand == 'operations':
83 84 operations = paramsFinder.getOperations()
84 log.success('Current Operations are:\n\033[1m{}\033[0m'.format('\n'.join(operations)))
85 log.success('Current Operations are:\n\033[1m{}\033[0m'.format(
86 '\n'.join(operations)))
85 87 else:
86 88 try:
87 89 args = paramsFinder.getArgs(nextcommand)
88 log.warning('Use this feature with caution. It may not return all the allowed arguments')
90 log.warning(
91 'Use this feature with caution. It may not return all the allowed arguments')
89 92 if len(args) == 0:
90 93 log.success('{} has no arguments'.format(nextcommand))
91 94 else:
92 log.success('Showing arguments of {} are:\n\033[1m{}\033[0m'.format(nextcommand, '\n'.join(args)))
95 log.success('Showing {} arguments:\n\033[1m{}\033[0m'.format(
96 nextcommand, '\n'.join(args)))
93 97 except Exception as e:
94 98 log.error('Module {} does not exists'.format(nextcommand))
95 99 allModules = paramsFinder.getAll()
@@ -117,12 +121,18 def runschain(nextcommand):
117 121
118 122 def basicInputs():
119 123 inputs = {}
120 inputs['desc'] = click.prompt('Enter a description', default="A schain project", type=str)
121 inputs['name'] = click.prompt('Name of the project', default="project", type=str)
122 inputs['path'] = click.prompt('Data path', default=os.getcwd(), type=click.Path(exists=True, resolve_path=True))
123 inputs['startDate'] = click.prompt('Start date', default='1970/01/01', type=str)
124 inputs['endDate'] = click.prompt('End date', default='2017/12/31', type=str)
125 inputs['startHour'] = click.prompt('Start hour', default='00:00:00', type=str)
124 inputs['desc'] = click.prompt(
125 'Enter a description', default="A schain project", type=str)
126 inputs['name'] = click.prompt(
127 'Name of the project', default="project", type=str)
128 inputs['path'] = click.prompt('Data path', default=os.getcwd(
129 ), type=click.Path(exists=True, resolve_path=True))
130 inputs['startDate'] = click.prompt(
131 'Start date', default='1970/01/01', type=str)
132 inputs['endDate'] = click.prompt(
133 'End date', default='2017/12/31', type=str)
134 inputs['startHour'] = click.prompt(
135 'Start hour', default='00:00:00', type=str)
126 136 inputs['endHour'] = click.prompt('End hour', default='23:59:59', type=str)
127 137 inputs['figpath'] = inputs['path'] + '/figs'
128 138 return inputs
@@ -132,7 +142,8 def generate():
132 142 inputs = basicInputs()
133 143 inputs['multiprocess'] = click.confirm('Is this a multiprocess script?')
134 144 if inputs['multiprocess']:
135 inputs['nProcess'] = click.prompt('How many process?', default=cpu_count(), type=int)
145 inputs['nProcess'] = click.prompt(
146 'How many process?', default=cpu_count(), type=int)
136 147 current = templates.multiprocess.format(**inputs)
137 148 else:
138 149 current = templates.basic.format(**inputs)
@@ -1,11 +1,10
1 1 basic = '''from schainpy.controller import Project
2 2
3 3 desc = "{desc}"
4 project = Project()
5 project.setup(id='200', name="{name}", description=desc)
4 6
5 controller = Project()
6 controller.setup(id='191', name="{name}", description=desc)
7
8 readUnitConf = controller.addReadUnit(datatype='VoltageReader',
7 voltage_reader = project.addReadUnit(datatype='VoltageReader',
9 8 path="{path}",
10 9 startDate="{startDate}",
11 10 endDate="{endDate}",
@@ -16,60 +15,76 readUnitConf = controller.addReadUnit(datatype='VoltageReader',
16 15 walk=1,
17 16 )
18 17
19 procUnitConf1 = controller.addProcUnit(datatype='VoltageProc', inputId=readUnitConf.getId())
18 voltage_proc = project.addProcUnit(datatype='VoltageProc', inputId=voltage_reader.getId())
20 19
21 opObj11 = procUnitConf1.addOperation(name='ProfileSelector', optype='other')
22 opObj11.addParameter(name='profileRangeList', value='120,183', format='intlist')
20 profile = voltage_proc.addOperation(name='ProfileSelector', optype='other')
21 profile.addParameter(name='profileRangeList', value='120,183', format='intlist')
23 22
24 opObj11 = procUnitConf1.addOperation(name='RTIPlot', optype='other')
25 opObj11.addParameter(name='wintitle', value='Jicamarca Radio Observatory', format='str')
26 opObj11.addParameter(name='showprofile', value='0', format='int')
27 opObj11.addParameter(name='xmin', value='0', format='int')
28 opObj11.addParameter(name='xmax', value='24', format='int')
29 opObj11.addParameter(name='figpath', value="{figpath}", format='str')
30 opObj11.addParameter(name='wr_period', value='5', format='int')
31 opObj11.addParameter(name='exp_code', value='22', format='int')
23 rti = voltage_proc.addOperation(name='RTIPlot', optype='other')
24 rti.addParameter(name='wintitle', value='Jicamarca Radio Observatory', format='str')
25 rti.addParameter(name='showprofile', value='0', format='int')
26 rti.addParameter(name='xmin', value='0', format='int')
27 rti.addParameter(name='xmax', value='24', format='int')
28 rti.addParameter(name='figpath', value="{figpath}", format='str')
29 rti.addParameter(name='wr_period', value='5', format='int')
30 rti.addParameter(name='exp_code', value='22', format='int')
32 31
33 32
34 33 controller.start()
35 34 '''
36 35
37 multiprocess = '''from schainpy.controller import Project, multiSchain
38
36 multiprocess = '''from schainpy.controller import Project, MPProject
37 from time import sleep
39 38 desc = "{desc}"
40 39
41 def fiber(cursor, skip, q, day):
42 controller = Project()
43 controller.setup(id='191', name="{name}", description=desc)
40 ####################
41 # PLOTTER RECEIVER #
42 ####################
43 plotter = Project()
44 plotter.setup(id='100', name='receiver', description=desc)
45
46 receiver_plot = plotter.addProcUnit(name='PlotterReceiver')
47 receiver_plot.addParameter(name='throttle', value=20, format='int')
48 receiver_plot.addParameter(name='plottypes', value='rti', format='str')
49
50 rti = receiver_plot.addOperation(name='PlotRTIData', optype='other')
51 rti.addParameter(name='zmin', value='-40.0', format='float')
52 rti.addParameter(name='zmax', value='100.0', format='float')
53 rti.addParameter(name='decimation', value='200', format='int')
54 rti.addParameter(name='xmin', value='0.0', format='int')
55 rti.addParameter(name='colormap', value='jet', format='str')
56
57 plotter.start()
44 58
45 readUnitConf = controller.addReadUnit(datatype='SpectraReader',
59 sleep(2)
60
61 ################
62 # DATA EMITTER #
63 ################
64 project = Project()
65 project.setup(id='200', name="{name}", description=desc)
66
67 spectra_reader = project.addReadUnit(datatype='SpectraReader',
46 68 path="{path}",
47 startDate=day,
48 endDate=day,
69 startDate={startDate},
70 endDate={endDate},
49 71 startTime="{startHour}",
50 72 endTime="{endHour}",
51 73 online=0,
52 queue=q,
53 cursor=cursor,
54 skip=skip,
55 74 verbose=1,
56 75 walk=1,
57 76 )
58 77
59 procUnitConf1 = controller.addProcUnit(datatype='Spectra', inputId=readUnitConf.getId())
60
61 procUnitConf2 = controller.addProcUnit(datatype='ParametersProc', inputId=readUnitConf.getId())
62 opObj11 = procUnitConf2.addOperation(name='SpectralMoments', optype='other')
78 spectra_proc = project.addProcUnit(datatype='Spectra', inputId=spectra_reader.getId())
63 79
64 opObj12 = procUnitConf2.addOperation(name='PublishData', optype='other')
65 opObj12.addParameter(name='zeromq', value=1, format='int')
66 opObj12.addParameter(name='verbose', value=0, format='bool')
67
68 controller.start()
80 parameters_proc = project.addProcUnit(datatype='ParametersProc', inputId=spectra_proc.getId())
81 moments = parameters_proc.addOperation(name='SpectralMoments', optype='other')
69 82
83 publish = parameters_proc.addOperation(name='PublishData', optype='other')
84 publish.addParameter(name='zeromq', value=1, format='int')
85 publish.addParameter(name='verbose', value=0, format='bool')
70 86
71 if __name__ == '__main__':
72 multiSchain(fiber, nProcess={nProcess}, startDate="{startDate}", endDate="{endDate}")
87 MPProject(project, 16)
73 88
74 89
75 90 '''
@@ -1,7 +1,7
1 1 ## CHANGELOG:
2 2
3 3 ### 2.3
4 * Added high order function `multiSchain` for multiprocessing scripts.
4 * Added high order function `MPProject` for multiprocessing scripts.
5 5 * Added two new Processing Units `PublishData` and `ReceiverData` for receiving and sending dataOut through multiple ways (tcp, ipc, inproc).
6 6 * Added a new graphics Processing Unit `PlotterReceiver`. It is decoupled from normal processing sequence with support for data generated by multiprocessing scripts.
7 7 * Added support for sending realtime graphic to web server.
@@ -21,7 +21,7
21 21 ### 2.2.6
22 22 * Graphics generated by the GUI are now the same as generated by scripts. Issue #1074.
23 23 * Added support for C extensions.
24 * function `hildebrand_sehkon` optimized with a C wrapper.
24 * Function `hildebrand_sehkon` optimized with a C wrapper.
25 25 * Numpy version updated.
26 26 * Migration to GIT.
27 27
@@ -24,6 +24,7 DTYPES = {
24 24 'Spectra': '.pdata'
25 25 }
26 26
27
27 28 def MPProject(project, n=cpu_count()):
28 29 '''
29 30 Project wrapper to run schain in n processes
@@ -72,6 +73,7 def MPProject(project, n=cpu_count()):
72 73
73 74 time.sleep(3)
74 75
76
75 77 class ParameterConf():
76 78
77 79 id = None
@@ -261,6 +263,7 class ParameterConf():
261 263
262 264 print 'Parameter[%s]: name = %s, value = %s, format = %s' %(self.id, self.name, self.value, self.format)
263 265
266
264 267 class OperationConf():
265 268
266 269 id = None
@@ -279,7 +282,6 class OperationConf():
279 282 self.priority = None
280 283 self.type = 'self'
281 284
282
283 285 def __getNewId(self):
284 286
285 287 return int(self.id)*10 + len(self.parmConfObjList) + 1
@@ -337,7 +339,6 class OperationConf():
337 339
338 340 return value
339 341
340
341 342 def getKwargs(self):
342 343
343 344 kwargs = {}
@@ -440,7 +441,6 class OperationConf():
440 441
441 442 def createObject(self, plotter_queue=None):
442 443
443
444 444 if self.type == 'self':
445 445 raise ValueError, 'This operation type cannot be created'
446 446
@@ -505,7 +505,6 class ProcUnitConf():
505 505
506 506 def updateId(self, new_id, parentId=parentId):
507 507
508
509 508 new_id = int(parentId)*10 + (int(self.id) % 10)
510 509 new_inputId = int(parentId)*10 + (int(self.inputId) % 10)
511 510
@@ -525,7 +524,6 class ProcUnitConf():
525 524 self.id = str(new_id)
526 525 self.inputId = str(new_inputId)
527 526
528
529 527 def getInputId(self):
530 528
531 529 return self.inputId
@@ -659,7 +657,6 class ProcUnitConf():
659 657 for opConfObj in self.opConfObjList:
660 658 opConfObj.printattr()
661 659
662
663 660 def getKwargs(self):
664 661
665 662 opObj = self.opConfObjList[0]
@@ -678,7 +675,8 class ProcUnitConf():
678 675 if opConfObj.type=='self' and self.name=='run':
679 676 continue
680 677 elif opConfObj.type=='self':
681 procUnitObj.addOperationKwargs(opConfObj.id, **opConfObj.getKwargs())
678 procUnitObj.addOperationKwargs(
679 opConfObj.id, **opConfObj.getKwargs())
682 680 continue
683 681
684 682 opObj = opConfObj.createObject(plotter_queue)
@@ -725,6 +723,7 class ProcUnitConf():
725 723
726 724 return
727 725
726
728 727 class ReadUnitConf(ProcUnitConf):
729 728
730 729 path = None
@@ -757,7 +756,6 class ReadUnitConf(ProcUnitConf):
757 756 #Compatible with old signal chain version
758 757 if datatype==None and name==None:
759 758 raise ValueError, 'datatype or name should be defined'
760
761 759 if name == None:
762 760 if 'Reader' in datatype:
763 761 name = datatype
@@ -795,7 +793,8 class ReadUnitConf(ProcUnitConf):
795 793 self.name = '%sReader' %(datatype)
796 794 self.datatype = self.name.replace('Reader', '')
797 795
798 attrs = ('path', 'startDate', 'endDate', 'startTime', 'endTime', 'parentId')
796 attrs = ('path', 'startDate', 'endDate',
797 'startTime', 'endTime', 'parentId')
799 798
800 799 for attr in attrs:
801 800 if attr in kwargs:
@@ -816,19 +815,24 class ReadUnitConf(ProcUnitConf):
816 815 opObj = self.addOperation(name = 'run', optype = 'self')
817 816
818 817 if self.server is None:
819 opObj.addParameter(name='datatype', value=self.datatype, format='str')
818 opObj.addParameter(
819 name='datatype', value=self.datatype, format='str')
820 820 opObj.addParameter(name='path', value=self.path, format='str')
821 opObj.addParameter(name='startDate', value=self.startDate, format='date')
822 opObj.addParameter(name='endDate', value=self.endDate, format='date')
823 opObj.addParameter(name='startTime', value=self.startTime, format='time')
824 opObj.addParameter(name='endTime', value=self.endTime, format='time')
821 opObj.addParameter(
822 name='startDate', value=self.startDate, format='date')
823 opObj.addParameter(
824 name='endDate', value=self.endDate, format='date')
825 opObj.addParameter(
826 name='startTime', value=self.startTime, format='time')
827 opObj.addParameter(
828 name='endTime', value=self.endTime, format='time')
825 829
826 830 for key, value in kwargs.items():
827 opObj.addParameter(name=key, value=value, format=type(value).__name__)
831 opObj.addParameter(name=key, value=value,
832 format=type(value).__name__)
828 833 else:
829 834 opObj.addParameter(name='server' , value=self.server, format='str')
830 835
831
832 836 return opObj
833 837
834 838 def updateRunOperation(self, **kwargs):
@@ -838,13 +842,16 class ReadUnitConf(ProcUnitConf):
838 842
839 843 opObj.addParameter(name='datatype', value=self.datatype, format='str')
840 844 opObj.addParameter(name='path', value=self.path, format='str')
841 opObj.addParameter(name='startDate', value=self.startDate, format='date')
845 opObj.addParameter(
846 name='startDate', value=self.startDate, format='date')
842 847 opObj.addParameter(name='endDate', value=self.endDate, format='date')
843 opObj.addParameter(name='startTime', value=self.startTime, format='time')
848 opObj.addParameter(
849 name='startTime', value=self.startTime, format='time')
844 850 opObj.addParameter(name='endTime', value=self.endTime, format='time')
845 851
846 852 for key, value in kwargs.items():
847 opObj.addParameter(name=key, value=value, format=type(value).__name__)
853 opObj.addParameter(name=key, value=value,
854 format=type(value).__name__)
848 855
849 856 return opObj
850 857
@@ -877,6 +884,7 class ReadUnitConf(ProcUnitConf):
877 884 self.startTime = opConfObj.getParameterValue('startTime')
878 885 self.endTime = opConfObj.getParameterValue('endTime')
879 886
887
880 888 class Project(Process):
881 889
882 890 id = None
@@ -974,7 +982,8 class Project(Process):
974 982 idReadUnit = str(id)
975 983
976 984 readUnitConfObj = ReadUnitConf()
977 readUnitConfObj.setup(idReadUnit, name, datatype, parentId=self.id, **kwargs)
985 readUnitConfObj.setup(idReadUnit, name, datatype,
986 parentId=self.id, **kwargs)
978 987
979 988 self.procUnitConfObjDict[readUnitConfObj.getId()] = readUnitConfObj
980 989
@@ -985,7 +994,8 class Project(Process):
985 994 idProcUnit = self.__getNewId()
986 995
987 996 procUnitConfObj = ProcUnitConf()
988 procUnitConfObj.setup(idProcUnit, name, datatype, inputId, parentId=self.id)
997 procUnitConfObj.setup(idProcUnit, name, datatype,
998 inputId, parentId=self.id)
989 999
990 1000 self.procUnitConfObjDict[procUnitConfObj.getId()] = procUnitConfObj
991 1001
@@ -1101,7 +1111,8 class Project(Process):
1101 1111 self.name = self.projectElement.get('name')
1102 1112 self.description = self.projectElement.get('description')
1103 1113
1104 readUnitElementList = self.projectElement.iter(ReadUnitConf().getElementName())
1114 readUnitElementList = self.projectElement.iter(
1115 ReadUnitConf().getElementName())
1105 1116
1106 1117 for readUnitElement in readUnitElementList:
1107 1118 readUnitConfObj = ReadUnitConf()
@@ -1112,7 +1123,8 class Project(Process):
1112 1123
1113 1124 self.procUnitConfObjDict[readUnitConfObj.getId()] = readUnitConfObj
1114 1125
1115 procUnitElementList = self.projectElement.iter(ProcUnitConf().getElementName())
1126 procUnitElementList = self.projectElement.iter(
1127 ProcUnitConf().getElementName())
1116 1128
1117 1129 for procUnitElement in procUnitElementList:
1118 1130 procUnitConfObj = ProcUnitConf()
@@ -1181,10 +1193,13 class Project(Process):
1181 1193 if not send_email:
1182 1194 return
1183 1195
1184 subject = 'SChain v%s: Error running %s\n' %(schainpy.__version__, procUnitConfObj.name)
1196 subject = 'SChain v%s: Error running %s\n' % (
1197 schainpy.__version__, procUnitConfObj.name)
1185 1198
1186 subtitle = '%s: %s\n' %(procUnitConfObj.getElementName() ,procUnitConfObj.name)
1187 subtitle += 'Hostname: %s\n' %socket.gethostbyname(socket.gethostname())
1199 subtitle = '%s: %s\n' % (
1200 procUnitConfObj.getElementName(), procUnitConfObj.name)
1201 subtitle += 'Hostname: %s\n' % socket.gethostbyname(
1202 socket.gethostname())
1188 1203 subtitle += 'Working directory: %s\n' %os.path.abspath('./')
1189 1204 subtitle += 'Configuration file: %s\n' %self.filename
1190 1205 subtitle += 'Time: %s\n' %str(datetime.datetime.now())
@@ -50,7 +50,6 class ProjectParms():
50 50 indexDatatype = 2
51 51 if 'usrp' in self.datatype.lower():
52 52 indexDatatype = 3
53
54 53 return indexDatatype
55 54
56 55 def getExt(self):
@@ -65,7 +64,6 class ProjectParms():
65 64 ext = '.fits'
66 65 if self.datatype.lower() == 'usrp':
67 66 ext = '.hdf5'
68
69 67 return ext
70 68
71 69 def set(self, project_name, datatype, ext, dpath, online,
@@ -5,8 +5,8
5 5 # from schainpy.model.utils.jroutils import *
6 6 # from schainpy.serializer import *
7 7
8 from graphics import *
8 9 from data import *
9 10 from io import *
10 11 from proc import *
11 from graphics import *
12 12 from utils import *
@@ -292,11 +292,9 class JROData(GenericData):
292 292 return fmax
293 293
294 294 def getFmax(self):
295
296 295 PRF = 1./(self.ippSeconds * self.nCohInt)
297 296
298 297 fmax = PRF
299
300 298 return fmax
301 299
302 300 def getVmax(self):
@@ -7,6 +7,7 import sys
7 7 import numpy
8 8 import copy
9 9 import datetime
10 import inspect
10 11
11 12 SPEED_OF_LIGHT = 299792458
12 13 SPEED_OF_LIGHT = 3e8
@@ -83,6 +84,24 class Header(object):
83 84
84 85 raise NotImplementedError
85 86
87 def getAllowedArgs(self):
88 args = inspect.getargspec(self.__init__).args
89 try:
90 args.remove('self')
91 except:
92 pass
93 return args
94
95 def getAsDict(self):
96 args = self.getAllowedArgs()
97 asDict = {}
98 for x in args:
99 asDict[x] = self[x]
100 return asDict
101
102 def __getitem__(self, name):
103 return getattr(self, name)
104
86 105 def printInfo(self):
87 106
88 107 message = "#"*50 + "\n"
@@ -115,6 +134,7 class BasicHeader(Header):
115 134 dstFlag = None
116 135 errorCount = None
117 136 datatime = None
137 structure = BASIC_STRUCTURE
118 138 __LOCALTIME = None
119 139
120 140 def __init__(self, useLocalTime=True):
@@ -189,15 +209,16 class SystemHeader(Header):
189 209 nChannels = None
190 210 adcResolution = None
191 211 pciDioBusWidth = None
212 structure = SYSTEM_STRUCTURE
192 213
193 def __init__(self, nSamples=0, nProfiles=0, nChannels=0, adcResolution=14, pciDioBusWith=0):
214 def __init__(self, nSamples=0, nProfiles=0, nChannels=0, adcResolution=14, pciDioBusWidth=0):
194 215
195 216 self.size = 24
196 217 self.nSamples = nSamples
197 218 self.nProfiles = nProfiles
198 219 self.nChannels = nChannels
199 220 self.adcResolution = adcResolution
200 self.pciDioBusWidth = pciDioBusWith
221 self.pciDioBusWidth = pciDioBusWidth
201 222
202 223 def read(self, fp):
203 224 self.length = 0
@@ -260,15 +281,15 class RadarControllerHeader(Header):
260 281 line5Function = None
261 282 fClock = None
262 283 prePulseBefore = None
263 prePulserAfter = None
284 prePulseAfter = None
264 285 rangeIpp = None
265 286 rangeTxA = None
266 287 rangeTxB = None
267
288 structure = RADAR_STRUCTURE
268 289 __size = None
269 290
270 291 def __init__(self, expType=2, nTx=1,
271 ippKm=None, txA=0, txB=0,
292 ipp=None, txA=0, txB=0,
272 293 nWindows=None, nHeights=None, firstHeight=None, deltaHeight=None,
273 294 numTaus=0, line6Function=0, line5Function=0, fClock=None,
274 295 prePulseBefore=0, prePulseAfter=0,
@@ -278,10 +299,10 class RadarControllerHeader(Header):
278 299 # self.size = 116
279 300 self.expType = expType
280 301 self.nTx = nTx
281 self.ipp = ippKm
302 self.ipp = ipp
282 303 self.txA = txA
283 304 self.txB = txB
284 self.rangeIpp = ippKm
305 self.rangeIpp = ipp
285 306 self.rangeTxA = txA
286 307 self.rangeTxB = txB
287 308
@@ -292,7 +313,7 class RadarControllerHeader(Header):
292 313 self.line5Function = line5Function
293 314 self.fClock = fClock
294 315 self.prePulseBefore = prePulseBefore
295 self.prePulserAfter = prePulseAfter
316 self.prePulseAfter = prePulseAfter
296 317
297 318 self.nHeights = nHeights
298 319 self.firstHeight = firstHeight
@@ -342,7 +363,7 class RadarControllerHeader(Header):
342 363 self.line5Function = int(header['nLine5Function'][0])
343 364 self.fClock = float(header['fClock'][0])
344 365 self.prePulseBefore = int(header['nPrePulseBefore'][0])
345 self.prePulserAfter = int(header['nPrePulseAfter'][0])
366 self.prePulseAfter = int(header['nPrePulseAfter'][0])
346 367 self.rangeIpp = header['sRangeIPP'][0]
347 368 self.rangeTxA = header['sRangeTxA'][0]
348 369 self.rangeTxB = header['sRangeTxB'][0]
@@ -450,7 +471,7 class RadarControllerHeader(Header):
450 471 self.line5Function,
451 472 self.fClock,
452 473 self.prePulseBefore,
453 self.prePulserAfter,
474 self.prePulseAfter,
454 475 self.rangeIpp,
455 476 self.rangeTxA,
456 477 self.rangeTxB)
@@ -540,15 +561,18 class ProcessingHeader(Header):
540 561 nCohInt = None
541 562 nIncohInt = None
542 563 totalSpectra = None
543
564 structure = PROCESSING_STRUCTURE
544 565 flag_dc = None
545 566 flag_cspc = None
546 567
547 def __init__(self):
568 def __init__(self, dtype=0, blockSize=0, profilesPerBlock=0, dataBlocksPerFile=0, nWindows=0,processFlags=0, nCohInt=0,
569 nIncohInt=0, totalSpectra=0, nHeights=0, firstHeight=0, deltaHeight=0, samplesWin=0, spectraComb=0, nCode=0,
570 code=0, nBaud=None, shif_fft=False, flag_dc=False, flag_cspc=False, flag_decode=False, flag_deflip=False
571 ):
548 572
549 573 # self.size = 0
550 self.dtype = 0
551 self.blockSize = 0
574 self.dtype = dtype
575 self.blockSize = blockSize
552 576 self.profilesPerBlock = 0
553 577 self.dataBlocksPerFile = 0
554 578 self.nWindows = 0
@@ -572,6 +596,7 class ProcessingHeader(Header):
572 596 self.flag_decode = False
573 597 self.flag_deflip = False
574 598 self.length = 0
599
575 600 def read(self, fp):
576 601 self.length = 0
577 602 try:
@@ -13,8 +13,8 class CorrelationPlot(Figure):
13 13 HEIGHTPROF = None
14 14 PREFIX = 'corr'
15 15
16 def __init__(self):
17
16 def __init__(self, **kwargs):
17 Figure.__init__(self, **kwargs)
18 18 self.isConfig = False
19 19 self.__nsubplots = 1
20 20
@@ -16,8 +16,10 from schainpy.model.proc.jroproc_base import Operation
16 16 from schainpy.utils import log
17 17
18 18 jet_values = matplotlib.pyplot.get_cmap("jet", 100)(numpy.arange(100))[10:90]
19 blu_values = matplotlib.pyplot.get_cmap("seismic_r", 20)(numpy.arange(20))[10:15]
20 ncmap = matplotlib.colors.LinearSegmentedColormap.from_list("jro", numpy.vstack((blu_values, jet_values)))
19 blu_values = matplotlib.pyplot.get_cmap(
20 "seismic_r", 20)(numpy.arange(20))[10:15]
21 ncmap = matplotlib.colors.LinearSegmentedColormap.from_list(
22 "jro", numpy.vstack((blu_values, jet_values)))
21 23 matplotlib.pyplot.register_cmap(cmap=ncmap)
22 24
23 25 CMAPS = [plt.get_cmap(s) for s in ('jro', 'jet', 'RdBu_r', 'seismic')]
@@ -354,6 +356,8 class PlotData(Operation, Process):
354 356 i = 1 if numpy.where(ymax < Y)[0][0] < 0 else numpy.where(ymax < Y)[0][0]
355 357 ystep = Y[i-1]/5
356 358
359 ystep = 200 if ymax >= 800 else 100 if ymax >= 400 else 50 if ymax >= 200 else 20
360
357 361 for n, ax in enumerate(self.axes):
358 362 if ax.firsttime:
359 363 ax.set_facecolor(self.bgcolor)
@@ -370,7 +374,8 class PlotData(Operation, Process):
370 374 self.pf_axes[n].set_xlim(self.zmin, self.zmax)
371 375 self.pf_axes[n].set_xlabel('dB')
372 376 self.pf_axes[n].grid(b=True, axis='x')
373 [tick.set_visible(False) for tick in self.pf_axes[n].get_yticklabels()]
377 [tick.set_visible(False)
378 for tick in self.pf_axes[n].get_yticklabels()]
374 379 if self.colorbar:
375 380 ax.cbar = plt.colorbar(ax.plt, ax=ax, pad=0.02, aspect=10)
376 381 ax.cbar.ax.tick_params(labelsize=8)
@@ -438,7 +443,8 class PlotData(Operation, Process):
438 443 receiver.setsockopt(zmq.CONFLATE, self.CONFLATE)
439 444
440 445 if 'server' in self.kwargs['parent']:
441 receiver.connect('ipc:///tmp/{}.plots'.format(self.kwargs['parent']['server']))
446 receiver.connect(
447 'ipc:///tmp/{}.plots'.format(self.kwargs['parent']['server']))
442 448 else:
443 449 receiver.connect("ipc:///tmp/zmq.plots")
444 450
@@ -474,6 +480,7 class PlotData(Operation, Process):
474 480 if self.data:
475 481 self.__plot()
476 482
483
477 484 class PlotSpectraData(PlotData):
478 485 '''
479 486 Plot for Spectra data
@@ -531,7 +538,8 class PlotSpectraData(PlotData):
531 538 )
532 539
533 540 if self.showprofile:
534 ax.plt_profile= self.pf_axes[n].plot(self.data['rti'][n][-1], y)[0]
541 ax.plt_profile = self.pf_axes[n].plot(
542 self.data['rti'][n][-1], y)[0]
535 543 ax.plt_noise = self.pf_axes[n].plot(numpy.repeat(noise, len(y)), y,
536 544 color="k", linestyle="dashed", lw=1)[0]
537 545 if self.CODE == 'spc_mean':
@@ -628,7 +636,8 class PlotCrossSpectraData(PlotData):
628 636 )
629 637 else:
630 638 ax.plt.set_array(coh.T.ravel())
631 self.titles.append('Coherence Ch{} * Ch{}'.format(pair[0], pair[1]))
639 self.titles.append(
640 'Coherence Ch{} * Ch{}'.format(pair[0], pair[1]))
632 641
633 642 ax = self.axes[4*n+3]
634 643 if ax.firsttime:
@@ -667,7 +676,8 class PlotRTIData(PlotData):
667 676 self.nplots = len(self.data.channels)
668 677 self.ylabel = 'Range [Km]'
669 678 self.cb_label = 'dB'
670 self.titles = ['{} Channel {}'.format(self.CODE.upper(), x) for x in range(self.nrows)]
679 self.titles = ['{} Channel {}'.format(
680 self.CODE.upper(), x) for x in range(self.nrows)]
671 681
672 682 def plot(self):
673 683 self.x = self.times
@@ -686,7 +696,8 class PlotRTIData(PlotData):
686 696 cmap=plt.get_cmap(self.colormap)
687 697 )
688 698 if self.showprofile:
689 ax.plot_profile= self.pf_axes[n].plot(self.data['rti'][n][-1], self.y)[0]
699 ax.plot_profile = self.pf_axes[n].plot(
700 self.data['rti'][n][-1], self.y)[0]
690 701 ax.plot_noise = self.pf_axes[n].plot(numpy.repeat(self.data['noise'][n][-1], len(self.y)), self.y,
691 702 color="k", linestyle="dashed", lw=1)[0]
692 703 else:
@@ -698,7 +709,8 class PlotRTIData(PlotData):
698 709 )
699 710 if self.showprofile:
700 711 ax.plot_profile.set_data(self.data['rti'][n][-1], self.y)
701 ax.plot_noise.set_data(numpy.repeat(self.data['noise'][n][-1], len(self.y)), self.y)
712 ax.plot_noise.set_data(numpy.repeat(
713 self.data['noise'][n][-1], len(self.y)), self.y)
702 714
703 715 self.saveTime = self.min_time
704 716
@@ -718,10 +730,12 class PlotCOHData(PlotRTIData):
718 730 self.ylabel = 'Range [Km]'
719 731 if self.CODE == 'coh':
720 732 self.cb_label = ''
721 self.titles = ['Coherence Map Ch{} * Ch{}'.format(x[0], x[1]) for x in self.data.pairs]
733 self.titles = [
734 'Coherence Map Ch{} * Ch{}'.format(x[0], x[1]) for x in self.data.pairs]
722 735 else:
723 736 self.cb_label = 'Degrees'
724 self.titles = ['Phase Map Ch{} * Ch{}'.format(x[0], x[1]) for x in self.data.pairs]
737 self.titles = [
738 'Phase Map Ch{} * Ch{}'.format(x[0], x[1]) for x in self.data.pairs]
725 739
726 740
727 741 class PlotPHASEData(PlotCOHData):
@@ -813,13 +827,14 class PlotSkyMapData(PlotData):
813 827 else:
814 828 self.figure.clf()
815 829
816 self.ax = plt.subplot2grid((self.nrows, self.ncols), (0, 0), 1, 1, polar=True)
830 self.ax = plt.subplot2grid(
831 (self.nrows, self.ncols), (0, 0), 1, 1, polar=True)
817 832 self.ax.firsttime = True
818 833
819
820 834 def plot(self):
821 835
822 arrayParameters = numpy.concatenate([self.data['param'][t] for t in self.times])
836 arrayParameters = numpy.concatenate(
837 [self.data['param'][t] for t in self.times])
823 838 error = arrayParameters[:,-1]
824 839 indValid = numpy.where(error == 0)[0]
825 840 finalMeteor = arrayParameters[indValid,:]
@@ -847,9 +862,9 class PlotSkyMapData(PlotData):
847 862 dt2,
848 863 len(x))
849 864 self.ax.set_title(title, size=8)
850
851 865 self.saveTime = self.max_time
852 866
867
853 868 class PlotParamData(PlotRTIData):
854 869 '''
855 870 Plot for data_param object
@@ -20,8 +20,9 class SpectraHeisScope(Figure):
20 20 HEIGHTPROF = None
21 21 PREFIX = 'spc'
22 22
23 def __init__(self):
23 def __init__(self, **kwargs):
24 24
25 Figure.__init__(self, **kwargs)
25 26 self.isConfig = False
26 27 self.__nsubplots = 1
27 28
@@ -179,8 +180,8 class RTIfromSpectraHeis(Figure):
179 180
180 181 PREFIX = 'rtinoise'
181 182
182 def __init__(self):
183
183 def __init__(self, **kwargs):
184 Figure.__init__(self, **kwargs)
184 185 self.timerange = 24*60*60
185 186 self.isConfig = False
186 187 self.__nsubplots = 1
@@ -25,8 +25,8 class SpectraPlot(Figure):
25 25 self.isConfig = False
26 26 self.__nsubplots = 1
27 27
28 self.WIDTH = 250
29 self.HEIGHT = 250
28 self.WIDTH = 300
29 self.HEIGHT = 300
30 30 self.WIDTHPROF = 120
31 31 self.HEIGHTPROF = 0
32 32 self.counter_imagwr = 0
@@ -113,7 +113,7 class Scope(Figure):
113 113 def run(self, dataOut, id, wintitle="", channelList=None,
114 114 xmin=None, xmax=None, ymin=None, ymax=None, save=False,
115 115 figpath='./', figfile=None, show=True, wr_period=1,
116 ftp=False, server=None, folder=None, username=None, password=None, type='power'):
116 ftp=False, server=None, folder=None, username=None, password=None, type='power', **kwargs):
117 117
118 118 """
119 119
@@ -4,7 +4,7 import sys
4 4 import matplotlib
5 5
6 6 if 'linux' in sys.platform:
7 matplotlib.use("GTK3Agg")
7 matplotlib.use("TkAgg")
8 8
9 9 if 'darwin' in sys.platform:
10 10 matplotlib.use('TKAgg')
@@ -21,15 +21,19 from matplotlib.ticker import FuncFormatter, LinearLocator
21 21 # create jro colormap
22 22
23 23 jet_values = matplotlib.pyplot.get_cmap("jet", 100)(numpy.arange(100))[10:90]
24 blu_values = matplotlib.pyplot.get_cmap("seismic_r", 20)(numpy.arange(20))[10:15]
25 ncmap = matplotlib.colors.LinearSegmentedColormap.from_list("jro", numpy.vstack((blu_values, jet_values)))
24 blu_values = matplotlib.pyplot.get_cmap(
25 "seismic_r", 20)(numpy.arange(20))[10:15]
26 ncmap = matplotlib.colors.LinearSegmentedColormap.from_list(
27 "jro", numpy.vstack((blu_values, jet_values)))
26 28 matplotlib.pyplot.register_cmap(cmap=ncmap)
27 29
30
28 31 def createFigure(id, wintitle, width, height, facecolor="w", show=True, dpi = 80):
29 32
30 33 matplotlib.pyplot.ioff()
31 34
32 fig = matplotlib.pyplot.figure(num=id, facecolor=facecolor, figsize=(1.0*width/dpi, 1.0*height/dpi))
35 fig = matplotlib.pyplot.figure(num=id, facecolor=facecolor, figsize=(
36 1.0 * width / dpi, 1.0 * height / dpi))
33 37 fig.canvas.manager.set_window_title(wintitle)
34 38 # fig.canvas.manager.resize(width, height)
35 39 matplotlib.pyplot.ion()
@@ -39,6 +43,7 def createFigure(id, wintitle, width, height, facecolor="w", show=True, dpi = 80
39 43
40 44 return fig
41 45
46
42 47 def closeFigure(show=False, fig=None):
43 48
44 49 # matplotlib.pyplot.ioff()
@@ -60,24 +65,29 def closeFigure(show=False, fig=None):
60 65
61 66 return
62 67
68
63 69 def saveFigure(fig, filename):
64 70
65 71 # matplotlib.pyplot.ioff()
66 72 fig.savefig(filename, dpi=matplotlib.pyplot.gcf().dpi)
67 73 # matplotlib.pyplot.ion()
68 74
75
69 76 def clearFigure(fig):
70 77
71 78 fig.clf()
72 79
80
73 81 def setWinTitle(fig, title):
74 82
75 83 fig.canvas.manager.set_window_title(title)
76 84
85
77 86 def setTitle(fig, title):
78 87
79 88 fig.suptitle(title)
80 89
90
81 91 def createAxes(fig, nrow, ncol, xpos, ypos, colspan, rowspan, polar=False):
82 92
83 93 matplotlib.pyplot.ioff()
@@ -88,11 +98,10 def createAxes(fig, nrow, ncol, xpos, ypos, colspan, rowspan, polar=False):
88 98 rowspan=rowspan,
89 99 polar=polar)
90 100
91 axes.grid(True)
92
93 101 matplotlib.pyplot.ion()
94 102 return axes
95 103
104
96 105 def setAxesText(ax, text):
97 106
98 107 ax.annotate(text,
@@ -102,17 +111,18 def setAxesText(ax, text):
102 111 verticalalignment = 'top',
103 112 fontsize = 10)
104 113
114
105 115 def printLabels(ax, xlabel, ylabel, title):
106 116
107 117 ax.set_xlabel(xlabel, size=11)
108 118 ax.set_ylabel(ylabel, size=11)
109 119 ax.set_title(title, size=8)
110 120
121
111 122 def createPline(ax, x, y, xmin, xmax, ymin, ymax, xlabel='', ylabel='', title='',
112 123 ticksize=9, xtick_visible=True, ytick_visible=True,
113 124 nxticks=4, nyticks=10,
114 125 grid=None,color='blue'):
115
116 126 """
117 127
118 128 Input:
@@ -132,7 +142,8 def createPline(ax, x, y, xmin, xmax, ymin, ymax, xlabel='', ylabel='', title=''
132 142 xtickspos = numpy.array([float("%.1f"%i) for i in xtickspos])
133 143 ax.set_xticks(xtickspos)
134 144 else:
135 xtickspos = numpy.arange(nxticks)*int((xmax-xmin)/(nxticks)) + int(xmin)
145 xtickspos = numpy.arange(nxticks) * \
146 int((xmax - xmin) / (nxticks)) + int(xmin)
136 147 # xtickspos = numpy.arange(nxticks)*float(xmax-xmin)/float(nxticks) + int(xmin)
137 148 ax.set_xticks(xtickspos)
138 149
@@ -170,18 +181,21 def createPline(ax, x, y, xmin, xmax, ymin, ymax, xlabel='', ylabel='', title=''
170 181
171 182 return iplot
172 183
184
173 185 def set_linedata(ax, x, y, idline):
174 186
175 187 ax.lines[idline].set_data(x,y)
176 188
189
177 190 def pline(iplot, x, y, xlabel='', ylabel='', title=''):
178 191
179 ax = iplot.get_axes()
192 ax = iplot.axes
180 193
181 194 printLabels(ax, xlabel, ylabel, title)
182 195
183 196 set_linedata(ax, x, y, idline=0)
184 197
198
185 199 def addpline(ax, x, y, color, linestyle, lw):
186 200
187 201 ax.plot(x,y,color=color,linestyle=linestyle,lw=lw)
@@ -206,7 +220,7 def createPcolor(ax, x, y, z, xmin, xmax, ymin, ymax, zmin, zmax,
206 220
207 221 z = numpy.ma.masked_invalid(z)
208 222 cmap=matplotlib.pyplot.get_cmap(colormap)
209 cmap.set_bad('white',1.)
223 cmap.set_bad('black', 1.)
210 224 imesh = ax.pcolormesh(x,y,z.T, vmin=zmin, vmax=zmax, cmap=cmap)
211 225 cb = matplotlib.pyplot.colorbar(imesh, cax=ax_cb)
212 226 cb.set_label(cblabel)
@@ -237,36 +251,30 def createPcolor(ax, x, y, z, xmin, xmax, ymin, ymax, zmin, zmax,
237 251
238 252 if XAxisAsTime:
239 253
240 func = lambda x, pos: ('%s') %(datetime.datetime.utcfromtimestamp(x).strftime("%H:%M:%S"))
254 def func(x, pos): return ('%s') % (
255 datetime.datetime.utcfromtimestamp(x).strftime("%H:%M:%S"))
241 256 ax.xaxis.set_major_formatter(FuncFormatter(func))
242 257 ax.xaxis.set_major_locator(LinearLocator(7))
243 258
244 ax.grid(True)
245 259 matplotlib.pyplot.ion()
246 260 return imesh
247 261
248 def pcolor(imesh, z, xlabel='', ylabel='', title=''):
249 262
250 z = numpy.ma.masked_invalid(z)
251
252 cmap=matplotlib.pyplot.get_cmap('jet')
253 cmap.set_bad('white',1.)
263 def pcolor(imesh, z, xlabel='', ylabel='', title=''):
254 264
255 265 z = z.T
256 ax = imesh.get_axes()
266 ax = imesh.axes
257 267 printLabels(ax, xlabel, ylabel, title)
258 268 imesh.set_array(z.ravel())
259 ax.grid(True)
260 269
261 270
262 271 def addpcolor(ax, x, y, z, zmin, zmax, xlabel='', ylabel='', title='', colormap='jet'):
263 272
264 273 printLabels(ax, xlabel, ylabel, title)
265 z = numpy.ma.masked_invalid(z)
266 cmap=matplotlib.pyplot.get_cmap(colormap)
267 cmap.set_bad('white',1.)
268 ax.pcolormesh(x,y,z.T,vmin=zmin,vmax=zmax, cmap=matplotlib.pyplot.get_cmap(colormap))
269 ax.grid(True)
274
275 ax.pcolormesh(x, y, z.T, vmin=zmin, vmax=zmax,
276 cmap=matplotlib.pyplot.get_cmap(colormap))
277
270 278
271 279 def addpcolorbuffer(ax, x, y, z, zmin, zmax, xlabel='', ylabel='', title='', colormap='jet'):
272 280
@@ -277,17 +285,15 def addpcolorbuffer(ax, x, y, z, zmin, zmax, xlabel='', ylabel='', title='', col
277 285 z = numpy.ma.masked_invalid(z)
278 286
279 287 cmap=matplotlib.pyplot.get_cmap(colormap)
280 cmap.set_bad('white',1.)
288 cmap.set_bad('black', 1.)
281 289
282 290 ax.pcolormesh(x,y,z.T,vmin=zmin,vmax=zmax, cmap=cmap)
283 ax.grid(True)
284 291
285 292
286 293 def createPmultiline(ax, x, y, xmin, xmax, ymin, ymax, xlabel='', ylabel='', title='', legendlabels=None,
287 294 ticksize=9, xtick_visible=True, ytick_visible=True,
288 295 nxticks=4, nyticks=10,
289 296 grid=None):
290
291 297 """
292 298
293 299 Input:
@@ -303,7 +309,8 def createPmultiline(ax, x, y, xmin, xmax, ymin, ymax, xlabel='', ylabel='', tit
303 309 ax.set_ylim([ymin,ymax])
304 310 printLabels(ax, xlabel, ylabel, title)
305 311
306 xtickspos = numpy.arange(nxticks)*int((xmax-xmin)/(nxticks)) + int(xmin)
312 xtickspos = numpy.arange(nxticks) * \
313 int((xmax - xmin) / (nxticks)) + int(xmin)
307 314 ax.set_xticks(xtickspos)
308 315
309 316 for tick in ax.get_xticklabels():
@@ -340,7 +347,7 def createPmultiline(ax, x, y, xmin, xmax, ymin, ymax, xlabel='', ylabel='', tit
340 347
341 348 def pmultiline(iplot, x, y, xlabel='', ylabel='', title=''):
342 349
343 ax = iplot.get_axes()
350 ax = iplot.axes
344 351
345 352 printLabels(ax, xlabel, ylabel, title)
346 353
@@ -348,11 +355,11 def pmultiline(iplot, x, y, xlabel='', ylabel='', title=''):
348 355 line = ax.lines[i]
349 356 line.set_data(x[i,:],y)
350 357
358
351 359 def createPmultilineYAxis(ax, x, y, xmin, xmax, ymin, ymax, xlabel='', ylabel='', title='', legendlabels=None,
352 360 ticksize=9, xtick_visible=True, ytick_visible=True,
353 361 nxticks=4, nyticks=10, marker='.', markersize=10, linestyle="None",
354 362 grid=None, XAxisAsTime=False):
355
356 363 """
357 364
358 365 Input:
@@ -369,7 +376,8 def createPmultilineYAxis(ax, x, y, xmin, xmax, ymin, ymax, xlabel='', ylabel=''
369 376 leg = ax.legend(lines, legendlabels,
370 377 loc='upper right', bbox_to_anchor=(1.16, 1), borderaxespad=0)
371 378
372 for label in leg.get_texts(): label.set_fontsize(9)
379 for label in leg.get_texts():
380 label.set_fontsize(9)
373 381
374 382 ax.set_xlim([xmin,xmax])
375 383 ax.set_ylim([ymin,ymax])
@@ -407,7 +415,8 def createPmultilineYAxis(ax, x, y, xmin, xmax, ymin, ymax, xlabel='', ylabel=''
407 415
408 416 if XAxisAsTime:
409 417
410 func = lambda x, pos: ('%s') %(datetime.datetime.utcfromtimestamp(x).strftime("%H:%M:%S"))
418 def func(x, pos): return ('%s') % (
419 datetime.datetime.utcfromtimestamp(x).strftime("%H:%M:%S"))
411 420 ax.xaxis.set_major_formatter(FuncFormatter(func))
412 421 ax.xaxis.set_major_locator(LinearLocator(7))
413 422
@@ -415,15 +424,18 def createPmultilineYAxis(ax, x, y, xmin, xmax, ymin, ymax, xlabel='', ylabel=''
415 424
416 425 return iplot
417 426
427
418 428 def pmultilineyaxis(iplot, x, y, xlabel='', ylabel='', title=''):
419 429
420 ax = iplot.get_axes()
430 ax = iplot.axes
431
421 432 printLabels(ax, xlabel, ylabel, title)
422 433
423 434 for i in range(len(ax.lines)):
424 435 line = ax.lines[i]
425 436 line.set_data(x,y[i,:])
426 437
438
427 439 def createPolar(ax, x, y,
428 440 xlabel='', ylabel='', title='', ticksize = 9,
429 441 colormap='jet',cblabel='', cbsize="5%",
@@ -438,7 +450,7 def createPolar(ax, x, y,
438 450 # ax.text(0, -110, ylabel, rotation='vertical', va ='center', ha = 'center' ,size='11')
439 451 # ax.text(0, 50, ylabel, rotation='vertical', va ='center', ha = 'left' ,size='11')
440 452 # ax.text(100, 100, 'example', ha='left', va='center', rotation='vertical')
441 ax.yaxis.labelpad = 230
453 ax.yaxis.labelpad = 40
442 454 printLabels(ax, xlabel, ylabel, title)
443 455 iplot = ax.lines[-1]
444 456
@@ -457,18 +469,19 def createPolar(ax, x, y,
457 469
458 470 matplotlib.pyplot.ion()
459 471
460
461 472 return iplot
462 473
474
463 475 def polar(iplot, x, y, xlabel='', ylabel='', title=''):
464 476
465 ax = iplot.get_axes()
477 ax = iplot.axes
466 478
467 479 # ax.text(0, -110, ylabel, rotation='vertical', va ='center', ha = 'center',size='11')
468 480 printLabels(ax, xlabel, ylabel, title)
469 481
470 482 set_linedata(ax, x, y, idline=0)
471 483
484
472 485 def draw(fig):
473 486
474 487 if type(fig) == 'int':
@@ -476,6 +489,7 def draw(fig):
476 489
477 490 fig.canvas.draw()
478 491
492
479 493 def pause(interval=0.000001):
480 494
481 495 matplotlib.pyplot.pause(interval)
@@ -1,4 +1,5
1 import os, sys
1 import os
2 import sys
2 3 import glob
3 4 import fnmatch
4 5 import datetime
@@ -6,9 +7,7 import time
6 7 import re
7 8 import h5py
8 9 import numpy
9 import matplotlib.pyplot as plt
10 10
11 import pylab as plb
12 11 from scipy.optimize import curve_fit
13 12 from scipy import asarray as ar,exp
14 13 from scipy import stats
@@ -31,15 +30,20 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
31 30 from numpy import imag, shape, NaN
32 31
33 32
34 startFp = open('/home/erick/Documents/MIRA35C/20160117/20160117_0000.zspc',"rb")
33 startFp = open(
34 '/home/erick/Documents/MIRA35C/20160117/20160117_0000.zspc', "rb")
35 35
36 36
37 37 FILE_HEADER = numpy.dtype([ #HEADER 1024bytes
38 38 ('Hname',numpy.str_,32), #Original file name
39 ('Htime',numpy.str_,32), #Date and time when the file was created
40 ('Hoper',numpy.str_,64), #Name of operator who created the file
41 ('Hplace',numpy.str_,128), #Place where the measurements was carried out
42 ('Hdescr',numpy.str_,256), #Description of measurements
39 # Date and time when the file was created
40 ('Htime', numpy.str_, 32),
41 # Name of operator who created the file
42 ('Hoper', numpy.str_, 64),
43 # Place where the measurements was carried out
44 ('Hplace', numpy.str_, 128),
45 # Description of measurements
46 ('Hdescr', numpy.str_, 256),
43 47 ('Hdummy',numpy.str_,512), #Reserved space
44 48 #Main chunk
45 49 ('Msign','<i4'), #Main chunk signature FZKF or NUIG
@@ -50,19 +54,28 FILE_HEADER = numpy.dtype([ #HEADER 1024bytes
50 54 ('PPARprf','<i4'), #Pulse repetition frequency
51 55 ('PPARpdr','<i4'), #Pulse duration
52 56 ('PPARsft','<i4'), #FFT length
53 ('PPARavc','<i4'), #Number of spectral (in-coherent) averages
54 ('PPARihp','<i4'), #Number of lowest range gate for moment estimation
55 ('PPARchg','<i4'), #Count for gates for moment estimation
56 ('PPARpol','<i4'), #switch on/off polarimetric measurements. Should be 1.
57 # Number of spectral (in-coherent) averages
58 ('PPARavc', '<i4'),
59 # Number of lowest range gate for moment estimation
60 ('PPARihp', '<i4'),
61 # Count for gates for moment estimation
62 ('PPARchg', '<i4'),
63 # switch on/off polarimetric measurements. Should be 1.
64 ('PPARpol', '<i4'),
57 65 #Service DSP parameters
58 ('SPARatt','<i4'), #STC attenuation on the lowest ranges on/off
66 # STC attenuation on the lowest ranges on/off
67 ('SPARatt', '<i4'),
59 68 ('SPARtx','<i4'), #OBSOLETE
60 69 ('SPARaddGain0','<f4'), #OBSOLETE
61 70 ('SPARaddGain1','<f4'), #OBSOLETE
62 ('SPARwnd','<i4'), #Debug only. It normal mode it is 0.
63 ('SPARpos','<i4'), #Delay between sync pulse and tx pulse for phase corr, ns
64 ('SPARadd','<i4'), #"add to pulse" to compensate for delay between the leading edge of driver pulse and envelope of the RF signal.
65 ('SPARlen','<i4'), #Time for measuring txn pulse phase. OBSOLETE
71 # Debug only. It normal mode it is 0.
72 ('SPARwnd', '<i4'),
73 # Delay between sync pulse and tx pulse for phase corr, ns
74 ('SPARpos', '<i4'),
75 # "add to pulse" to compensate for delay between the leading edge of driver pulse and envelope of the RF signal.
76 ('SPARadd', '<i4'),
77 # Time for measuring txn pulse phase. OBSOLETE
78 ('SPARlen', '<i4'),
66 79 ('SPARcal','<i4'), #OBSOLETE
67 80 ('SPARnos','<i4'), #OBSOLETE
68 81 ('SPARof0','<i4'), #detection threshold
@@ -73,19 +86,23 FILE_HEADER = numpy.dtype([ #HEADER 1024bytes
73 86 ('SPARtst','<i4'), #OBSOLETE
74 87 ('SPARcor','<i4'), #OBSOLETE
75 88 ('SPARofs','<i4'), #OBSOLETE
76 ('SPARhsn','<i4'), #Hildebrand div noise detection on noise gate
77 ('SPARhsa','<f4'), #Hildebrand div noise detection on all gates
89 # Hildebrand div noise detection on noise gate
90 ('SPARhsn', '<i4'),
91 # Hildebrand div noise detection on all gates
92 ('SPARhsa', '<f4'),
78 93 ('SPARcalibPow_M','<f4'), #OBSOLETE
79 94 ('SPARcalibSNR_M','<f4'), #OBSOLETE
80 95 ('SPARcalibPow_S','<f4'), #OBSOLETE
81 96 ('SPARcalibSNR_S','<f4'), #OBSOLETE
82 ('SPARrawGate1','<i4'), #Lowest range gate for spectra saving Raw_Gate1 >=5
83 ('SPARrawGate2','<i4'), #Number of range gates with atmospheric signal
84 ('SPARraw','<i4'), #flag - IQ or spectra saving on/off
97 # Lowest range gate for spectra saving Raw_Gate1 >=5
98 ('SPARrawGate1', '<i4'),
99 # Number of range gates with atmospheric signal
100 ('SPARrawGate2', '<i4'),
101 # flag - IQ or spectra saving on/off
102 ('SPARraw', '<i4'),
85 103 ('SPARprc','<i4'),]) #flag - Moment estimation switched on/off
86 104
87 105
88
89 106 self.Hname= None
90 107 self.Htime= None
91 108 self.Hoper= None
@@ -136,7 +153,6 self.SPARraw=None
136 153 self.SPARprc=None
137 154
138 155
139
140 156 header = numpy.fromfile(fp, FILE_HEADER,1)
141 157 ''' numpy.fromfile(file, dtype, count, sep='')
142 158 file : file or str
@@ -207,9 +223,8 SPARraw=header['SPARraw'][0]
207 223 SPARprc=header['SPARprc'][0]
208 224
209 225
210
211 226 SRVI_STRUCTURE = numpy.dtype([
212 ('frame_cnt','<u4'),#
227 ('frame_cnt', '<u4'),
213 228 ('time_t','<u4'), #
214 229 ('tpow','<f4'), #
215 230 ('npw1','<f4'), #
@@ -225,19 +240,18 SRVI_STRUCTURE = numpy.dtype([
225 240 ('azivel','<f4'), #
226 241 ('elvpos','<f4'), #
227 242 ('elvvel','<f4'), #
228 ('northAngle','<f4'), #
243 ('northAngle', '<f4'),
229 244 ('microsec','<u4'), #
230 245 ('azisetvel','<f4'), #
231 246 ('elvsetpos','<f4'), #
232 247 ('RadarConst','<f4'),]) #
233 248
234 249 JUMP_STRUCTURE = numpy.dtype([
235 ('jump','<u140'),#
236 ('SizeOfDataBlock1',numpy.str_,32),#
237 ('jump','<i4'),#
238 ('DataBlockTitleSRVI1',numpy.str_,32),#
239 ('SizeOfSRVI1','<i4'),])#
240
250 ('jump', '<u140'),
251 ('SizeOfDataBlock1', numpy.str_, 32),
252 ('jump', '<i4'),
253 ('DataBlockTitleSRVI1', numpy.str_, 32),
254 ('SizeOfSRVI1', '<i4'), ])
241 255
242 256
243 257 #frame_cnt=0, time_t= 0, tpow=0, npw1=0, npw2=0,
@@ -269,7 +283,6 elvsetpos = elvsetpos
269 283 RadarConst5 = RadarConst
270 284
271 285
272
273 286 #print fp
274 287 #startFp = open('/home/erick/Documents/Data/huancayo.20161019.22.fdt',"rb") #The method tell() returns the current position of the file read/write pointer within the file.
275 288 #startFp = open(fp,"rb") #The method tell() returns the current position of the file read/write pointer within the file.
@@ -283,7 +296,7 print 'Posicion del bloque: ',OffRHeader
283 296
284 297 header = numpy.fromfile(startFp,SRVI_STRUCTURE,1)
285 298
286 self.frame_cnt = header['frame_cnt'][0]#
299 self.frame_cnt = header['frame_cnt'][0]
287 300 self.time_t = header['frame_cnt'][0] #
288 301 self.tpow = header['frame_cnt'][0] #
289 302 self.npw1 = header['frame_cnt'][0] #
@@ -316,6 +329,3 endFp = self.OffsetStartHeader + self.RecCounter*self.Off2StartNxtRec
316 329 print '=============================================='
317 330
318 331 print '=============================================='
319
320
321 No newline at end of file
@@ -8,7 +8,7 from jroIO_voltage import *
8 8 from jroIO_spectra import *
9 9 from jroIO_heispectra import *
10 10 from jroIO_usrp import *
11
11 from jroIO_digitalRF import *
12 12 from jroIO_kamisr import *
13 13 from jroIO_param import *
14 14 from jroIO_hf import *
@@ -83,6 +83,7 DATA_STRUCTURE = numpy.dtype([
83 83 ('sea_algorithm', '<u4')
84 84 ])
85 85
86
86 87 class BLTRParamReader(JRODataReader, ProcessingUnit):
87 88 '''
88 89 Boundary Layer and Tropospheric Radar (BLTR) reader, Wind velocities and SNR from *.sswma files
@@ -130,7 +131,8 class BLTRParamReader(JRODataReader, ProcessingUnit):
130 131 self.fileIndex = 0
131 132
132 133 if not self.fileList:
133 raise Warning, "There is no files matching these date in the folder: %s. \n Check 'startDate' and 'endDate' "%(path)
134 raise Warning, "There is no files matching these date in the folder: %s. \n Check 'startDate' and 'endDate' " % (
135 path)
134 136
135 137 self.setNextFile()
136 138
@@ -144,7 +146,6 class BLTRParamReader(JRODataReader, ProcessingUnit):
144 146 startDate - Select file from this date
145 147 enDate - Select file until this date
146 148 ext - Extension of the file to read
147
148 149 '''
149 150
150 151 log.success('Searching files in {} '.format(path), 'BLTRParamReader')
@@ -192,7 +193,8 class BLTRParamReader(JRODataReader, ProcessingUnit):
192 193 filename = os.path.join(self.path, self.fileList[file_id])
193 194
194 195 dirname, name = os.path.split(filename)
195 self.siteFile = name.split('.')[0] # 'peru2' ---> Piura - 'peru1' ---> Huancayo or Porcuya
196 # 'peru2' ---> Piura - 'peru1' ---> Huancayo or Porcuya
197 self.siteFile = name.split('.')[0]
196 198 if self.filename is not None:
197 199 self.fp.close()
198 200 self.filename = filename
@@ -308,7 +310,8 class BLTRParamReader(JRODataReader, ProcessingUnit):
308 310 data = numpy.fromfile(self.fp, data_structure, self.nranges)
309 311
310 312 height = data['range']
311 winds = numpy.array((data['zonal'], data['meridional'], data['vertical']))
313 winds = numpy.array(
314 (data['zonal'], data['meridional'], data['vertical']))
312 315 snr = data['rx_snr'].T
313 316
314 317 winds[numpy.where(winds == -9999.)] = numpy.nan
@@ -10,7 +10,8 import time
10 10 import numpy
11 11 import fnmatch
12 12 import inspect
13 import time, datetime
13 import time
14 import datetime
14 15 import traceback
15 16 import zmq
16 17
@@ -24,6 +25,7 from schainpy.model.data.jroheaderIO import get_dtype_index, get_numpy_dtype, ge
24 25
25 26 LOCALTIME = True
26 27
28
27 29 def isNumber(cad):
28 30 """
29 31 Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero.
@@ -43,6 +45,7 def isNumber(cad):
43 45 except:
44 46 return False
45 47
48
46 49 def isFileInEpoch(filename, startUTSeconds, endUTSeconds):
47 50 """
48 51 Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado.
@@ -84,19 +87,18 def isFileInEpoch(filename, startUTSeconds, endUTSeconds):
84 87
85 88 return 1
86 89
87 def isTimeInRange(thisTime, startTime, endTime):
88 90
91 def isTimeInRange(thisTime, startTime, endTime):
89 92 if endTime >= startTime:
90 93 if (thisTime < startTime) or (thisTime > endTime):
91 94 return 0
92
93 95 return 1
94 96 else:
95 97 if (thisTime < startTime) and (thisTime > endTime):
96 98 return 0
97
98 99 return 1
99 100
101
100 102 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
101 103 """
102 104 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
@@ -122,7 +124,6 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
122 124
123 125 """
124 126
125
126 127 try:
127 128 fp = open(filename,'rb')
128 129 except IOError:
@@ -185,7 +186,6 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
185 186
186 187 #If endTime < startTime then endTime belongs to the next day
187 188
188
189 189 #<<<<<<<<<<<o o>>>>>>>>>>>
190 190 #-----------o----------------------------o-----------
191 191 # endTime startTime
@@ -201,6 +201,7 def isFileInTimeRange(filename, startDate, endDate, startTime, endTime):
201 201
202 202 return thisDatetime
203 203
204
204 205 def isFolderInDateRange(folder, startDate=None, endDate=None):
205 206 """
206 207 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
@@ -241,6 +242,7 def isFolderInDateRange(folder, startDate=None, endDate=None):
241 242
242 243 return 1
243 244
245
244 246 def isFileInDateRange(filename, startDate=None, endDate=None):
245 247 """
246 248 Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado.
@@ -283,6 +285,7 def isFileInDateRange(filename, startDate=None, endDate=None):
283 285
284 286 return 1
285 287
288
286 289 def getFileFromSet(path, ext, set):
287 290 validFilelist = []
288 291 fileList = os.listdir(path)
@@ -302,7 +305,8 def getFileFromSet(path, ext, set):
302 305
303 306 validFilelist.append(thisFile)
304 307
305 myfile = fnmatch.filter(validFilelist,'*%4.4d%3.3d%3.3d*'%(year,doy,set))
308 myfile = fnmatch.filter(
309 validFilelist, '*%4.4d%3.3d%3.3d*' % (year, doy, set))
306 310
307 311 if len(myfile)!= 0:
308 312 return myfile[0]
@@ -317,6 +321,7 def getFileFromSet(path, ext, set):
317 321
318 322 return None
319 323
324
320 325 def getlastFileFromPath(path, ext):
321 326 """
322 327 Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext"
@@ -359,6 +364,7 def getlastFileFromPath(path, ext):
359 364
360 365 return None
361 366
367
362 368 def checkForRealPath(path, foldercounter, year, doy, set, ext):
363 369 """
364 370 Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path,
@@ -400,12 +406,16 def checkForRealPath(path, foldercounter, year, doy, set, ext):
400 406 if prefixDir != None:
401 407 #formo el nombre del directorio xYYYYDDD (x=d o x=D)
402 408 if foldercounter == 0:
403 thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy ))
409 thispath = os.path.join(path, "%s%04d%03d" %
410 (prefixDir, year, doy))
404 411 else:
405 thispath = os.path.join(path, "%s%04d%03d_%02d" % ( prefixDir, year, doy , foldercounter))
412 thispath = os.path.join(path, "%s%04d%03d_%02d" % (
413 prefixDir, year, doy, foldercounter))
406 414 for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D"
407 filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext
408 fullfilename = os.path.join( thispath, filename ) #formo el path completo
415 # formo el nombre del file xYYYYDDDSSS.ext
416 filename = "%s%04d%03d%03d%s" % (prefixFile, year, doy, set, ext)
417 fullfilename = os.path.join(
418 thispath, filename) # formo el path completo
409 419
410 420 if os.path.exists( fullfilename ): #verifico que exista
411 421 find_flag = True
@@ -418,6 +428,7 def checkForRealPath(path, foldercounter, year, doy, set, ext):
418 428
419 429 return fullfilename, filename
420 430
431
421 432 def isRadarFolder(folder):
422 433 try:
423 434 year = int(folder[1:5])
@@ -427,6 +438,7 def isRadarFolder(folder):
427 438
428 439 return 1
429 440
441
430 442 def isRadarFile(file):
431 443 try:
432 444 year = int(file[1:5])
@@ -437,6 +449,7 def isRadarFile(file):
437 449
438 450 return 1
439 451
452
440 453 def getDateFromRadarFile(file):
441 454 try:
442 455 year = int(file[1:5])
@@ -448,6 +461,7 def getDateFromRadarFile(file):
448 461 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy-1)
449 462 return thisDate
450 463
464
451 465 def getDateFromRadarFolder(folder):
452 466 try:
453 467 year = int(folder[1:5])
@@ -458,6 +472,7 def getDateFromRadarFolder(folder):
458 472 thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy-1)
459 473 return thisDate
460 474
475
461 476 class JRODataIO:
462 477
463 478 c = 3E8
@@ -540,6 +555,7 class JRODataIO:
540 555 def getAllowedArgs(self):
541 556 return inspect.getargspec(self.run).args
542 557
558
543 559 class JRODataReader(JRODataIO):
544 560
545 561 online = 0
@@ -579,7 +595,6 class JRODataReader(JRODataIO):
579 595 selBlocktime = None
580 596
581 597 def __init__(self):
582
583 598 """
584 599 This class is used to find data files
585 600
@@ -590,7 +605,6 class JRODataReader(JRODataIO):
590 605 """
591 606 pass
592 607
593
594 608 def createObjByDefault(self):
595 609 """
596 610
@@ -619,7 +633,8 class JRODataReader(JRODataIO):
619 633
620 634 pathList = []
621 635
622 dateList, pathList = self.findDatafiles(path, startDate, endDate, expLabel, ext, walk, include_path=True)
636 dateList, pathList = self.findDatafiles(
637 path, startDate, endDate, expLabel, ext, walk, include_path=True)
623 638
624 639 if dateList == []:
625 640 return [], []
@@ -644,7 +659,8 class JRODataReader(JRODataIO):
644 659 if skip == 0:
645 660 skippedFileList = []
646 661 else:
647 skippedFileList = fileList[cursor*skip: cursor*skip + skip]
662 skippedFileList = fileList[cursor *
663 skip: cursor * skip + skip]
648 664
649 665 else:
650 666 skippedFileList = fileList
@@ -656,7 +672,8 class JRODataReader(JRODataIO):
656 672 if not isFileInDateRange(filename, startDate, endDate):
657 673 continue
658 674
659 thisDatetime = isFileInTimeRange(filename, startDate, endDate, startTime, endTime)
675 thisDatetime = isFileInTimeRange(
676 filename, startDate, endDate, startTime, endTime)
660 677
661 678 if not(thisDatetime):
662 679 continue
@@ -680,7 +697,6 class JRODataReader(JRODataIO):
680 697 return pathList, filenameList
681 698
682 699 def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True, set=None):
683
684 700 """
685 701 Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y
686 702 devuelve el archivo encontrado ademas de otros datos.
@@ -727,10 +743,10 class JRODataReader(JRODataIO):
727 743 dirList = sorted( dirList, key=str.lower )
728 744
729 745 doypath = dirList[-1]
730 foldercounter = int(doypath.split('_')[1]) if len(doypath.split('_'))>1 else 0
746 foldercounter = int(doypath.split('_')[1]) if len(
747 doypath.split('_')) > 1 else 0
731 748 fullpath = os.path.join(path, doypath, expLabel)
732 749
733
734 750 print "[Reading] %s folder was found: " %(fullpath )
735 751
736 752 if set == None:
@@ -814,14 +830,16 class JRODataReader(JRODataIO):
814 830 self.foldercounter += 1
815 831
816 832 #busca el 1er file disponible
817 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
833 fullfilename, filename = checkForRealPath(
834 self.path, self.foldercounter, self.year, self.doy, self.set, self.ext)
818 835 if fullfilename:
819 836 if self.__verifyFile(fullfilename, False):
820 837 fileOk_flag = True
821 838
822 839 #si no encuentra un file entonces espera y vuelve a buscar
823 840 if not(fileOk_flag):
824 for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles
841 # busco en los siguientes self.nFiles+1 files posibles
842 for nFiles in range(self.nFiles + 1):
825 843
826 844 if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces
827 845 tries = self.nTries
@@ -835,7 +853,8 class JRODataReader(JRODataIO):
835 853 else:
836 854 print "\t[Reading] Searching the next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext)
837 855
838 fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext )
856 fullfilename, filename = checkForRealPath(
857 self.path, self.foldercounter, self.year, self.doy, self.set, self.ext)
839 858 if fullfilename:
840 859 if self.__verifyFile(fullfilename):
841 860 fileOk_flag = True
@@ -849,7 +868,8 class JRODataReader(JRODataIO):
849 868 print "\t[Reading] Skipping the file \"%s\" due to this file doesn't exist" % filename
850 869 self.set += 1
851 870
852 if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
871 # si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta
872 if nFiles == (self.nFiles - 1):
853 873 self.set = 0
854 874 self.doy += 1
855 875 self.foldercounter = 0
@@ -858,7 +878,8 class JRODataReader(JRODataIO):
858 878 self.fileSize = os.path.getsize( fullfilename )
859 879 self.filename = fullfilename
860 880 self.flagIsNewFile = 1
861 if self.fp != None: self.fp.close()
881 if self.fp != None:
882 self.fp.close()
862 883 self.fp = open(fullfilename, 'rb')
863 884 self.flagNoMoreFiles = 0
864 885 # print '[Reading] Setting the file: %s' % fullfilename
@@ -928,7 +949,6 class JRODataReader(JRODataIO):
928 949 print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1)
929 950 sleep( self.delay )
930 951
931
932 952 return 0
933 953
934 954 def waitDataBlock(self,pointer_location):
@@ -1022,7 +1042,7 class JRODataReader(JRODataIO):
1022 1042 if not(self.setNextFile()):
1023 1043 return 0
1024 1044
1025 deltaTime = self.basicHeaderObj.utc - self.lastUTTime #
1045 deltaTime = self.basicHeaderObj.utc - self.lastUTTime
1026 1046 self.lastUTTime = self.basicHeaderObj.utc
1027 1047
1028 1048 self.flagDiscontinuousBlock = 0
@@ -1043,9 +1063,7 class JRODataReader(JRODataIO):
1043 1063 return 0
1044 1064
1045 1065 self.getBasicHeader()
1046
1047 if not isTimeInRange(self.dataOut.datatime.time(), self.startTime, self.endTime):
1048
1066 if (self.dataOut.datatime < datetime.datetime.combine(self.startDate, self.startTime)) or (self.dataOut.datatime > datetime.datetime.combine(self.endDate, self.endTime)):
1049 1067 print "[Reading] Block No. %d/%d -> %s [Skipping]" %(self.nReadBlocks,
1050 1068 self.processingHeaderObj.dataBlocksPerFile,
1051 1069 self.dataOut.datatime.ctime())
@@ -1068,7 +1086,8 class JRODataReader(JRODataIO):
1068 1086
1069 1087 self.firstHeaderSize = self.basicHeaderObj.size
1070 1088
1071 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
1089 datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
1090 PROCFLAG.DATATYPE_MASK)) - numpy.log2(PROCFLAG.DATATYPE_CHAR))
1072 1091 if datatype == 0:
1073 1092 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
1074 1093 elif datatype == 1:
@@ -1086,7 +1105,9 class JRODataReader(JRODataIO):
1086 1105
1087 1106 self.dtype = datatype_str
1088 1107 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
1089 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
1108 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + \
1109 self.firstHeaderSize + self.basicHeaderSize * \
1110 (self.processingHeaderObj.dataBlocksPerFile - 1)
1090 1111 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
1091 1112 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
1092 1113 self.getBlockDimension()
@@ -1297,7 +1318,8 class JRODataReader(JRODataIO):
1297 1318 print "[Reading] Searching files in online mode..."
1298 1319
1299 1320 for nTries in range( self.nTries ):
1300 fullpath, foldercounter, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk, set=set)
1321 fullpath, foldercounter, file, year, doy, set = self.__searchFilesOnLine(
1322 path=path, expLabel=expLabel, ext=ext, walk=walk, set=set)
1301 1323
1302 1324 if fullpath:
1303 1325 break
@@ -1345,7 +1367,8 class JRODataReader(JRODataIO):
1345 1367 self.nTxs = nTxs
1346 1368 self.startTime = startTime
1347 1369 self.endTime = endTime
1348
1370 self.endDate = endDate
1371 self.startDate = startDate
1349 1372 #Added-----------------
1350 1373 self.selBlocksize = blocksize
1351 1374 self.selBlocktime = blocktime
@@ -1370,12 +1393,14 class JRODataReader(JRODataIO):
1370 1393 # self.getBasicHeader()
1371 1394
1372 1395 if last_set != None:
1373 self.dataOut.last_block = last_set * self.processingHeaderObj.dataBlocksPerFile + self.basicHeaderObj.dataBlock
1396 self.dataOut.last_block = last_set * \
1397 self.processingHeaderObj.dataBlocksPerFile + self.basicHeaderObj.dataBlock
1374 1398 return
1375 1399
1376 1400 def getBasicHeader(self):
1377 1401
1378 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds
1402 self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond / \
1403 1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds
1379 1404
1380 1405 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
1381 1406
@@ -1391,7 +1416,6 class JRODataReader(JRODataIO):
1391 1416
1392 1417 # self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock*self.nTxs
1393 1418
1394
1395 1419 def getFirstHeader(self):
1396 1420
1397 1421 raise NotImplementedError
@@ -1499,6 +1523,7 class JRODataReader(JRODataIO):
1499 1523 else:
1500 1524 self.getFromServer()
1501 1525
1526
1502 1527 class JRODataWriter(JRODataIO):
1503 1528
1504 1529 """
@@ -1523,23 +1548,18 class JRODataWriter(JRODataIO):
1523 1548 def __init__(self, dataOut=None):
1524 1549 raise NotImplementedError
1525 1550
1526
1527 1551 def hasAllDataInBuffer(self):
1528 1552 raise NotImplementedError
1529 1553
1530
1531 1554 def setBlockDimension(self):
1532 1555 raise NotImplementedError
1533 1556
1534
1535 1557 def writeBlock(self):
1536 1558 raise NotImplementedError
1537 1559
1538
1539 1560 def putData(self):
1540 1561 raise NotImplementedError
1541 1562
1542
1543 1563 def getProcessFlags(self):
1544 1564
1545 1565 processFlags = 0
@@ -1618,7 +1638,8 class JRODataWriter(JRODataIO):
1618 1638
1619 1639 # CALCULAR PARAMETROS
1620 1640
1621 sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1641 sizeLongHeader = self.systemHeaderObj.size + \
1642 self.radarControllerHeaderObj.size + self.processingHeaderObj.size
1622 1643 self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader
1623 1644
1624 1645 self.basicHeaderObj.write(self.fp)
@@ -1649,7 +1670,6 class JRODataWriter(JRODataIO):
1649 1670
1650 1671 return 1
1651 1672
1652
1653 1673 def writeNextBlock(self):
1654 1674 """
1655 1675 Selecciona el bloque siguiente de datos y los escribe en un file
@@ -1707,7 +1727,8 class JRODataWriter(JRODataIO):
1707 1727 # 0 1234 567 89A BCDE (hex)
1708 1728 # x YYYY DDD SSS .ext
1709 1729 if isNumber( filen[8:11] ):
1710 setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file
1730 # inicializo mi contador de seteo al seteo del ultimo file
1731 setFile = int(filen[8:11])
1711 1732 else:
1712 1733 setFile = -1
1713 1734 else:
@@ -1720,7 +1741,8 class JRODataWriter(JRODataIO):
1720 1741 setFile = 0
1721 1742 self.nTotalBlocks = 0
1722 1743
1723 filen = '%s%4.4d%3.3d%3.3d%s' % (self.optchar, timeTuple.tm_year, timeTuple.tm_yday, setFile, ext )
1744 filen = '%s%4.4d%3.3d%3.3d%s' % (
1745 self.optchar, timeTuple.tm_year, timeTuple.tm_yday, setFile, ext)
1724 1746
1725 1747 filename = os.path.join( path, subfolder, filen )
1726 1748
@@ -1801,7 +1823,8 class JRODataWriter(JRODataIO):
1801 1823
1802 1824 if not(self.isConfig):
1803 1825
1804 self.setup(dataOut, path, blocksPerFile, profilesPerBlock=profilesPerBlock, set=set, ext=ext, datatype=datatype, **kwargs)
1826 self.setup(dataOut, path, blocksPerFile, profilesPerBlock=profilesPerBlock,
1827 set=set, ext=ext, datatype=datatype, **kwargs)
1805 1828 self.isConfig = True
1806 1829
1807 1830 self.putData()
@@ -1,4 +1,5
1 import os, sys
1 import os
2 import sys
2 3 import glob
3 4 import fnmatch
4 5 import datetime
@@ -6,7 +7,6 import time
6 7 import re
7 8 import h5py
8 9 import numpy
9 import matplotlib.pyplot as plt
10 10
11 11 import pylab as plb
12 12 from scipy.optimize import curve_fit
@@ -37,7 +37,6 class Header(object):
37 37 def __init__(self):
38 38 raise NotImplementedError
39 39
40
41 40 def read(self):
42 41
43 42 raise NotImplementedError
@@ -67,23 +66,23 class Header(object):
67 66 #print message
68 67
69 68
70
71
72
73 69 FILE_STRUCTURE = numpy.dtype([ #HEADER 48bytes
74 70 ('FileMgcNumber','<u4'), #0x23020100
75 ('nFDTdataRecors','<u4'), #No Of FDT data records in this file (0 or more)
71 # No Of FDT data records in this file (0 or more)
72 ('nFDTdataRecors', '<u4'),
76 73 ('OffsetStartHeader','<u4'),
77 74 ('RadarUnitId','<u4'),
78 75 ('SiteName',numpy.str_,32), #Null terminated
79 76 ])
80 77
78
81 79 class FileHeaderBLTR(Header):
82 80
83 81 def __init__(self):
84 82
85 83 self.FileMgcNumber= 0 #0x23020100
86 self.nFDTdataRecors=0 #No Of FDT data records in this file (0 or more)
84 # No Of FDT data records in this file (0 or more)
85 self.nFDTdataRecors = 0
87 86 self.RadarUnitId= 0
88 87 self.OffsetStartHeader=0
89 88 self.SiteName= ""
@@ -99,7 +98,6 class FileHeaderBLTR(Header):
99 98 print 'puntero file header', startFp.tell()
100 99 print ' '
101 100
102
103 101 ''' numpy.fromfile(file, dtype, count, sep='')
104 102 file : file or str
105 103 Open file object or filename.
@@ -119,23 +117,20 class FileHeaderBLTR(Header):
119 117
120 118 '''
121 119
122
123
124 120 self.FileMgcNumber= hex(header['FileMgcNumber'][0])
125 self.nFDTdataRecors=int(header['nFDTdataRecors'][0]) #No Of FDT data records in this file (0 or more)
121 # No Of FDT data records in this file (0 or more)
122 self.nFDTdataRecors = int(header['nFDTdataRecors'][0])
126 123 self.RadarUnitId= int(header['RadarUnitId'][0])
127 124 self.OffsetStartHeader= int(header['OffsetStartHeader'][0])
128 125 self.SiteName= str(header['SiteName'][0])
129 126
130 127 #print 'Numero de bloques', self.nFDTdataRecors
131 128
132
133 129 if self.size <48:
134 130 return 0
135 131
136 132 return 1
137 133
138
139 134 def write(self, fp):
140 135
141 136 headerTuple = (self.FileMgcNumber,
@@ -144,7 +139,6 class FileHeaderBLTR(Header):
144 139 self.SiteName,
145 140 self.size)
146 141
147
148 142 header = numpy.array(headerTuple, FILE_STRUCTURE)
149 143 # numpy.array(object, dtype=None, copy=True, order=None, subok=False, ndmin=0)
150 144 header.tofile(fp)
@@ -166,56 +160,92 class FileHeaderBLTR(Header):
166 160 return 1
167 161
168 162
169
170
171
172 163 RECORD_STRUCTURE = numpy.dtype([ #RECORD HEADER 180+20N bytes
173 164 ('RecMgcNumber','<u4'), #0x23030001
174 165 ('RecCounter','<u4'), #Record counter(0,1, ...)
175 ('Off2StartNxtRec','<u4'), #Offset to start of next record form start of this record
176 ('Off2StartData','<u4'), #Offset to start of data from start of this record
177 ('nUtime','<i4'), #Epoch time stamp of start of acquisition (seconds)
178 ('nMilisec','<u4'), #Millisecond component of time stamp (0,...,999)
179 ('ExpTagName',numpy.str_,32), #Experiment tag name (null terminated)
180 ('ExpComment',numpy.str_,32), #Experiment comment (null terminated)
181 ('SiteLatDegrees','<f4'), #Site latitude (from GPS) in degrees (positive implies North)
182 ('SiteLongDegrees','<f4'), #Site longitude (from GPS) in degrees (positive implies East)
183 ('RTCgpsStatus','<u4'), #RTC GPS engine status (0=SEEK, 1=LOCK, 2=NOT FITTED, 3=UNAVAILABLE)
166 # Offset to start of next record form start of this record
167 ('Off2StartNxtRec', '<u4'),
168 # Offset to start of data from start of this record
169 ('Off2StartData', '<u4'),
170 # Epoch time stamp of start of acquisition (seconds)
171 ('nUtime', '<i4'),
172 # Millisecond component of time stamp (0,...,999)
173 ('nMilisec', '<u4'),
174 # Experiment tag name (null terminated)
175 ('ExpTagName', numpy.str_, 32),
176 # Experiment comment (null terminated)
177 ('ExpComment', numpy.str_, 32),
178 # Site latitude (from GPS) in degrees (positive implies North)
179 ('SiteLatDegrees', '<f4'),
180 # Site longitude (from GPS) in degrees (positive implies East)
181 ('SiteLongDegrees', '<f4'),
182 # RTC GPS engine status (0=SEEK, 1=LOCK, 2=NOT FITTED, 3=UNAVAILABLE)
183 ('RTCgpsStatus', '<u4'),
184 184 ('TransmitFrec','<u4'), #Transmit frequency (Hz)
185 185 ('ReceiveFrec','<u4'), #Receive frequency
186 ('FirstOsciFrec','<u4'), #First local oscillator frequency (Hz)
187 ('Polarisation','<u4'), #(0="O", 1="E", 2="linear 1", 3="linear2")
188 ('ReceiverFiltSett','<u4'), #Receiver filter settings (0,1,2,3)
189 ('nModesInUse','<u4'), #Number of modes in use (1 or 2)
190 ('DualModeIndex','<u4'), #Dual Mode index number for these data (0 or 1)
191 ('DualModeRange','<u4'), #Dual Mode range correction for these data (m)
192 ('nDigChannels','<u4'), #Number of digital channels acquired (2*N)
193 ('SampResolution','<u4'), #Sampling resolution (meters)
194 ('nHeights','<u4'), #Number of range gates sampled
195 ('StartRangeSamp','<u4'), #Start range of sampling (meters)
186 # First local oscillator frequency (Hz)
187 ('FirstOsciFrec', '<u4'),
188 # (0="O", 1="E", 2="linear 1", 3="linear2")
189 ('Polarisation', '<u4'),
190 # Receiver filter settings (0,1,2,3)
191 ('ReceiverFiltSett', '<u4'),
192 # Number of modes in use (1 or 2)
193 ('nModesInUse', '<u4'),
194 # Dual Mode index number for these data (0 or 1)
195 ('DualModeIndex', '<u4'),
196 # Dual Mode range correction for these data (m)
197 ('DualModeRange', '<u4'),
198 # Number of digital channels acquired (2*N)
199 ('nDigChannels', '<u4'),
200 # Sampling resolution (meters)
201 ('SampResolution', '<u4'),
202 # Number of range gates sampled
203 ('nHeights', '<u4'),
204 # Start range of sampling (meters)
205 ('StartRangeSamp', '<u4'),
196 206 ('PRFhz','<u4'), #PRF (Hz)
197 207 ('nCohInt','<u4'), #Integrations
198 ('nProfiles','<u4'), #Number of data points transformed
199 ('nChannels','<u4'), #Number of receive beams stored in file (1 or N)
208 # Number of data points transformed
209 ('nProfiles', '<u4'),
210 # Number of receive beams stored in file (1 or N)
211 ('nChannels', '<u4'),
200 212 ('nIncohInt','<u4'), #Number of spectral averages
201 ('FFTwindowingInd','<u4'), #FFT windowing index (0 = no window)
202 ('BeamAngleAzim','<f4'), #Beam steer angle (azimuth) in degrees (clockwise from true North)
203 ('BeamAngleZen','<f4'), #Beam steer angle (zenith) in degrees (0=> vertical)
204 ('AntennaCoord0','<f4'), #Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
205 ('AntennaAngl0','<f4'), #Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
206 ('AntennaCoord1','<f4'), #Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
207 ('AntennaAngl1','<f4'), #Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
208 ('AntennaCoord2','<f4'), #Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
209 ('AntennaAngl2','<f4'), #Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
210 ('RecPhaseCalibr0','<f4'), #Receiver phase calibration (degrees) - N values
211 ('RecPhaseCalibr1','<f4'), #Receiver phase calibration (degrees) - N values
212 ('RecPhaseCalibr2','<f4'), #Receiver phase calibration (degrees) - N values
213 ('RecAmpCalibr0','<f4'), #Receiver amplitude calibration (ratio relative to receiver one) - N values
214 ('RecAmpCalibr1','<f4'), #Receiver amplitude calibration (ratio relative to receiver one) - N values
215 ('RecAmpCalibr2','<f4'), #Receiver amplitude calibration (ratio relative to receiver one) - N values
216 ('ReceiverGaindB0','<i4'), #Receiver gains in dB - N values
217 ('ReceiverGaindB1','<i4'), #Receiver gains in dB - N values
218 ('ReceiverGaindB2','<i4'), #Receiver gains in dB - N values
213 # FFT windowing index (0 = no window)
214 ('FFTwindowingInd', '<u4'),
215 # Beam steer angle (azimuth) in degrees (clockwise from true North)
216 ('BeamAngleAzim', '<f4'),
217 # Beam steer angle (zenith) in degrees (0=> vertical)
218 ('BeamAngleZen', '<f4'),
219 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
220 ('AntennaCoord0', '<f4'),
221 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
222 ('AntennaAngl0', '<f4'),
223 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
224 ('AntennaCoord1', '<f4'),
225 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
226 ('AntennaAngl1', '<f4'),
227 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
228 ('AntennaCoord2', '<f4'),
229 # Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs
230 ('AntennaAngl2', '<f4'),
231 # Receiver phase calibration (degrees) - N values
232 ('RecPhaseCalibr0', '<f4'),
233 # Receiver phase calibration (degrees) - N values
234 ('RecPhaseCalibr1', '<f4'),
235 # Receiver phase calibration (degrees) - N values
236 ('RecPhaseCalibr2', '<f4'),
237 # Receiver amplitude calibration (ratio relative to receiver one) - N values
238 ('RecAmpCalibr0', '<f4'),
239 # Receiver amplitude calibration (ratio relative to receiver one) - N values
240 ('RecAmpCalibr1', '<f4'),
241 # Receiver amplitude calibration (ratio relative to receiver one) - N values
242 ('RecAmpCalibr2', '<f4'),
243 # Receiver gains in dB - N values
244 ('ReceiverGaindB0', '<i4'),
245 # Receiver gains in dB - N values
246 ('ReceiverGaindB1', '<i4'),
247 # Receiver gains in dB - N values
248 ('ReceiverGaindB2', '<i4'),
219 249 ])
220 250
221 251
@@ -285,12 +315,11 class RecordHeaderBLTR(Header):
285 315 self.ReceiverGaindB2 = ReceiverGaindB2
286 316 self.OffsetStartHeader = 48
287 317
288
289
290 318 def RHread(self, fp):
291 319 #print fp
292 320 #startFp = open('/home/erick/Documents/Data/huancayo.20161019.22.fdt',"rb") #The method tell() returns the current position of the file read/write pointer within the file.
293 startFp = open(fp,"rb") #The method tell() returns the current position of the file read/write pointer within the file.
321 # The method tell() returns the current position of the file read/write pointer within the file.
322 startFp = open(fp, "rb")
294 323 #RecCounter=0
295 324 #Off2StartNxtRec=811248
296 325 OffRHeader= self.OffsetStartHeader + self.RecCounter*self.Off2StartNxtRec
@@ -298,7 +327,6 class RecordHeaderBLTR(Header):
298 327 print 'puntero Record Header', startFp.tell()
299 328 print ' '
300 329
301
302 330 startFp.seek(OffRHeader, os.SEEK_SET)
303 331
304 332 print ' '
@@ -416,11 +444,13 class RecordHeaderBLTR(Header):
416 444 print '=============================================='
417 445
418 446 if OffRHeader > endFp:
419 sys.stderr.write("Warning %s: Size value read from System Header is lower than it has to be\n" %fp)
447 sys.stderr.write(
448 "Warning %s: Size value read from System Header is lower than it has to be\n" % fp)
420 449 return 0
421 450
422 451 if OffRHeader < endFp:
423 sys.stderr.write("Warning %s: Size value read from System Header size is greater than it has to be\n" %fp)
452 sys.stderr.write(
453 "Warning %s: Size value read from System Header size is greater than it has to be\n" % fp)
424 454 return 0
425 455
426 456 return 1
@@ -436,7 +466,6 class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa
436 466 walk = None
437 467 isConfig = False
438 468
439
440 469 fileList= None
441 470
442 471 #metadata
@@ -448,8 +477,6 class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa
448 477 data= None
449 478 utctime= None
450 479
451
452
453 480 def __init__(self, **kwargs):
454 481
455 482 #Eliminar de la base la herencia
@@ -494,8 +521,6 class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa
494 521 self.dataOut.velocityY=[]
495 522 self.dataOut.velocityV=[]
496 523
497
498
499 524 def Files2Read(self, fp):
500 525 '''
501 526 Function that indicates the number of .fdt files that exist in the folder to be read.
@@ -503,8 +528,10 class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa
503 528 '''
504 529 #self.__checkPath()
505 530
506 ListaData=os.listdir(fp) #Gets the list of files within the fp address
507 ListaData=sorted(ListaData) #Sort the list of files from least to largest by names
531 # Gets the list of files within the fp address
532 ListaData = os.listdir(fp)
533 # Sort the list of files from least to largest by names
534 ListaData = sorted(ListaData)
508 535 nFiles=0 #File Counter
509 536 FileList=[] #A list is created that will contain the .fdt files
510 537 for IndexFile in ListaData :
@@ -517,7 +544,6 class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa
517 544
518 545 self.filenameList=FileList #List of files from least to largest by names
519 546
520
521 547 def run(self, **kwargs):
522 548 '''
523 549 This method will be the one that will initiate the data entry, will be called constantly.
@@ -531,7 +557,6 class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa
531 557 self.getData()
532 558 #print 'running'
533 559
534
535 560 def setup(self, path=None,
536 561 startDate=None,
537 562 endDate=None,
@@ -556,7 +581,6 class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa
556 581
557 582 pass
558 583
559
560 584 def getData(self):
561 585 '''
562 586 Before starting this function, you should check that there is still an unread file,
@@ -583,7 +607,6 class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa
583 607 #self.removeDC()
584 608 return self.dataOut.data_spc
585 609
586
587 610 def readFile(self,fp):
588 611 '''
589 612 You must indicate if you are reading in Online or Offline mode and load the
@@ -600,7 +623,8 class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa
600 623
601 624 if self.fileSelector < len(self.filenameList):
602 625
603 self.fpFile=str(fp)+'/'+str(self.filenameList[self.fileSelector])
626 self.fpFile = str(fp) + '/' + \
627 str(self.filenameList[self.fileSelector])
604 628 #print self.fpFile
605 629 fheader = FileHeaderBLTR()
606 630 fheader.FHread(self.fpFile) #Bltr FileHeader Reading
@@ -615,12 +639,15 class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa
615 639
616 640 def getVelRange(self, extrapoints=0):
617 641 Lambda= SPEED_OF_LIGHT/50000000
618 PRF = self.dataOut.PRF#1./(self.dataOut.ippSeconds * self.dataOut.nCohInt)
642 # 1./(self.dataOut.ippSeconds * self.dataOut.nCohInt)
643 PRF = self.dataOut.PRF
619 644 Vmax=-Lambda/(4.*(1./PRF)*self.dataOut.nCohInt*2.)
620 645 deltafreq = PRF / (self.nProfiles)
621 646 deltavel = (Vmax*2) / (self.nProfiles)
622 freqrange = deltafreq*(numpy.arange(self.nProfiles)-self.nProfiles/2.) - deltafreq/2
623 velrange = deltavel*(numpy.arange(self.nProfiles)-self.nProfiles/2.)
647 freqrange = deltafreq * \
648 (numpy.arange(self.nProfiles) - self.nProfiles / 2.) - deltafreq / 2
649 velrange = deltavel * \
650 (numpy.arange(self.nProfiles) - self.nProfiles / 2.)
624 651 return velrange
625 652
626 653 def readBlock(self):
@@ -661,7 +688,8 class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa
661 688
662 689 self.__firstHeigth=rheader.StartRangeSamp
663 690 self.__deltaHeigth=rheader.SampResolution
664 self.dataOut.heightList= self.__firstHeigth + numpy.array(range(self.nHeights))*self.__deltaHeigth
691 self.dataOut.heightList = self.__firstHeigth + \
692 numpy.array(range(self.nHeights)) * self.__deltaHeigth
665 693 self.dataOut.channelList = range(self.nChannels)
666 694 self.dataOut.nProfiles=rheader.nProfiles
667 695 self.dataOut.nIncohInt=rheader.nIncohInt
@@ -671,8 +699,10 class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa
671 699 self.dataOut.nFFTPoints=rheader.nProfiles
672 700 self.dataOut.utctime=rheader.nUtime
673 701 self.dataOut.timeZone=0
674 self.dataOut.normFactor= self.dataOut.nProfiles*self.dataOut.nIncohInt*self.dataOut.nCohInt
675 self.dataOut.outputInterval= self.dataOut.ippSeconds * self.dataOut.nCohInt * self.dataOut.nIncohInt * self.nProfiles
702 self.dataOut.normFactor = self.dataOut.nProfiles * \
703 self.dataOut.nIncohInt * self.dataOut.nCohInt
704 self.dataOut.outputInterval = self.dataOut.ippSeconds * \
705 self.dataOut.nCohInt * self.dataOut.nIncohInt * self.nProfiles
676 706
677 707 self.data_output=numpy.ones([3,rheader.nHeights])*numpy.NaN
678 708 print 'self.data_output', shape(self.data_output)
@@ -686,7 +716,8 class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa
686 716
687 717 #Procedure to take the pointer to where the date block starts
688 718 startDATA = open(self.fpFile,"rb")
689 OffDATA= self.OffsetStartHeader + self.RecCounter*self.Off2StartNxtRec+self.Off2StartData
719 OffDATA = self.OffsetStartHeader + self.RecCounter * \
720 self.Off2StartNxtRec + self.Off2StartData
690 721 startDATA.seek(OffDATA, os.SEEK_SET)
691 722
692 723 def moving_average(x, N=2):
@@ -705,27 +736,26 class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa
705 736 y = rho * numpy.sin(phi)
706 737 return(x, y)
707 738
708
709
710
711 739 if self.DualModeIndex==self.ReadMode:
712 740
713 self.data_fft = numpy.fromfile( startDATA, [('complex','<c8')],self.nProfiles*self.nChannels*self.nHeights )
741 self.data_fft = numpy.fromfile(
742 startDATA, [('complex', '<c8')], self.nProfiles * self.nChannels * self.nHeights)
714 743
715 744 self.data_fft=self.data_fft.astype(numpy.dtype('complex'))
716 745
717 self.data_block=numpy.reshape(self.data_fft,(self.nHeights, self.nChannels, self.nProfiles ))
746 self.data_block = numpy.reshape(
747 self.data_fft, (self.nHeights, self.nChannels, self.nProfiles))
718 748
719 749 self.data_block = numpy.transpose(self.data_block, (1,2,0))
720 750
721 751 copy = self.data_block.copy()
722 752 spc = copy * numpy.conjugate(copy)
723 753
724 self.data_spc = numpy.absolute(spc) # valor absoluto o magnitud
754 self.data_spc = numpy.absolute(
755 spc) # valor absoluto o magnitud
725 756
726 757 factor = self.dataOut.normFactor
727 758
728
729 759 z = self.data_spc.copy()#/factor
730 760 z = numpy.where(numpy.isfinite(z), z, numpy.NAN)
731 761 #zdB = 10*numpy.log10(z)
@@ -737,13 +767,14 class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa
737 767
738 768 self.dataOut.data_spc=self.data_spc
739 769
740 self.noise = self.dataOut.getNoise(ymin_index=80, ymax_index=132)#/factor
770 self.noise = self.dataOut.getNoise(
771 ymin_index=80, ymax_index=132) # /factor
741 772 #noisedB = 10*numpy.log10(self.noise)
742 773
743
744 774 ySamples=numpy.ones([3,self.nProfiles])
745 775 phase=numpy.ones([3,self.nProfiles])
746 CSPCSamples=numpy.ones([3,self.nProfiles],dtype=numpy.complex_)
776 CSPCSamples = numpy.ones(
777 [3, self.nProfiles], dtype=numpy.complex_)
747 778 coherence=numpy.ones([3,self.nProfiles])
748 779 PhaseSlope=numpy.ones(3)
749 780 PhaseInter=numpy.ones(3)
@@ -766,13 +797,16 class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa
766 797 chan_index0 = self.dataOut.pairsList[i][0]
767 798 chan_index1 = self.dataOut.pairsList[i][1]
768 799
769 self.data_cspc[i,:,:]=cspc[chan_index0,:,:] * numpy.conjugate(cspc[chan_index1,:,:])
770
800 self.data_cspc[i, :, :] = cspc[chan_index0, :,
801 :] * numpy.conjugate(cspc[chan_index1, :, :])
771 802
772 803 '''Getting Eij and Nij'''
773 (AntennaX0,AntennaY0)=pol2cart(rheader.AntennaCoord0, rheader.AntennaAngl0*numpy.pi/180)
774 (AntennaX1,AntennaY1)=pol2cart(rheader.AntennaCoord1, rheader.AntennaAngl1*numpy.pi/180)
775 (AntennaX2,AntennaY2)=pol2cart(rheader.AntennaCoord2, rheader.AntennaAngl2*numpy.pi/180)
804 (AntennaX0, AntennaY0) = pol2cart(
805 rheader.AntennaCoord0, rheader.AntennaAngl0 * numpy.pi / 180)
806 (AntennaX1, AntennaY1) = pol2cart(
807 rheader.AntennaCoord1, rheader.AntennaAngl1 * numpy.pi / 180)
808 (AntennaX2, AntennaY2) = pol2cart(
809 rheader.AntennaCoord2, rheader.AntennaAngl2 * numpy.pi / 180)
776 810
777 811 E01=AntennaX0-AntennaX1
778 812 N01=AntennaY0-AntennaY1
@@ -783,7 +817,8 class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa
783 817 E12=AntennaX1-AntennaX2
784 818 N12=AntennaY1-AntennaY2
785 819
786 self.ChanDist= numpy.array([[E01, N01],[E02,N02],[E12,N12]])
820 self.ChanDist = numpy.array(
821 [[E01, N01], [E02, N02], [E12, N12]])
787 822
788 823 self.dataOut.ChanDist = self.ChanDist
789 824
@@ -1139,16 +1174,9 class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODa
1139 1174 #
1140 1175 # print ' '
1141 1176
1142
1143
1144 1177 self.BlockCounter+=2
1145 1178
1146 1179 else:
1147 1180 self.fileSelector+=1
1148 1181 self.BlockCounter=0
1149 1182 print "Next File"
1150
1151
1152
1153
1154
@@ -1,4 +1,5
1 import os, sys
1 import os
2 import sys
2 3 import glob
3 4 import fnmatch
4 5 import datetime
@@ -6,9 +7,7 import time
6 7 import re
7 8 import h5py
8 9 import numpy
9 import matplotlib.pyplot as plt
10 10
11 import pylab as plb
12 11 from scipy.optimize import curve_fit
13 12 from scipy import asarray as ar,exp
14 13 from scipy import stats
@@ -30,13 +29,11 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
30 29 from numpy import imag, shape, NaN, empty
31 30
32 31
33
34 32 class Header(object):
35 33
36 34 def __init__(self):
37 35 raise NotImplementedError
38 36
39
40 37 def read(self):
41 38
42 39 raise NotImplementedError
@@ -68,13 +65,18 class Header(object):
68 65
69 66 FILE_HEADER = numpy.dtype([ #HEADER 1024bytes
70 67 ('Hname','a32'), #Original file name
71 ('Htime',numpy.str_,32), #Date and time when the file was created
72 ('Hoper',numpy.str_,64), #Name of operator who created the file
73 ('Hplace',numpy.str_,128), #Place where the measurements was carried out
74 ('Hdescr',numpy.str_,256), #Description of measurements
68 # Date and time when the file was created
69 ('Htime', numpy.str_, 32),
70 # Name of operator who created the file
71 ('Hoper', numpy.str_, 64),
72 # Place where the measurements was carried out
73 ('Hplace', numpy.str_, 128),
74 # Description of measurements
75 ('Hdescr', numpy.str_, 256),
75 76 ('Hdummy',numpy.str_,512), #Reserved space
76 77 #Main chunk 8bytes
77 ('Msign',numpy.str_,4), #Main chunk signature FZKF or NUIG
78 # Main chunk signature FZKF or NUIG
79 ('Msign', numpy.str_, 4),
78 80 ('MsizeData','<i4'), #Size of data block main chunk
79 81 #Processing DSP parameters 36bytes
80 82 ('PPARsign',numpy.str_,4), #PPAR signature
@@ -82,19 +84,28 FILE_HEADER = numpy.dtype([ #HEADER 1024bytes
82 84 ('PPARprf','<i4'), #Pulse repetition frequency
83 85 ('PPARpdr','<i4'), #Pulse duration
84 86 ('PPARsft','<i4'), #FFT length
85 ('PPARavc','<i4'), #Number of spectral (in-coherent) averages
86 ('PPARihp','<i4'), #Number of lowest range gate for moment estimation
87 ('PPARchg','<i4'), #Count for gates for moment estimation
88 ('PPARpol','<i4'), #switch on/off polarimetric measurements. Should be 1.
87 # Number of spectral (in-coherent) averages
88 ('PPARavc', '<i4'),
89 # Number of lowest range gate for moment estimation
90 ('PPARihp', '<i4'),
91 # Count for gates for moment estimation
92 ('PPARchg', '<i4'),
93 # switch on/off polarimetric measurements. Should be 1.
94 ('PPARpol', '<i4'),
89 95 #Service DSP parameters 112bytes
90 ('SPARatt','<i4'), #STC attenuation on the lowest ranges on/off
96 # STC attenuation on the lowest ranges on/off
97 ('SPARatt', '<i4'),
91 98 ('SPARtx','<i4'), #OBSOLETE
92 99 ('SPARaddGain0','<f4'), #OBSOLETE
93 100 ('SPARaddGain1','<f4'), #OBSOLETE
94 ('SPARwnd','<i4'), #Debug only. It normal mode it is 0.
95 ('SPARpos','<i4'), #Delay between sync pulse and tx pulse for phase corr, ns
96 ('SPARadd','<i4'), #"add to pulse" to compensate for delay between the leading edge of driver pulse and envelope of the RF signal.
97 ('SPARlen','<i4'), #Time for measuring txn pulse phase. OBSOLETE
101 # Debug only. It normal mode it is 0.
102 ('SPARwnd', '<i4'),
103 # Delay between sync pulse and tx pulse for phase corr, ns
104 ('SPARpos', '<i4'),
105 # "add to pulse" to compensate for delay between the leading edge of driver pulse and envelope of the RF signal.
106 ('SPARadd', '<i4'),
107 # Time for measuring txn pulse phase. OBSOLETE
108 ('SPARlen', '<i4'),
98 109 ('SPARcal','<i4'), #OBSOLETE
99 110 ('SPARnos','<i4'), #OBSOLETE
100 111 ('SPARof0','<i4'), #detection threshold
@@ -105,19 +116,23 FILE_HEADER = numpy.dtype([ #HEADER 1024bytes
105 116 ('SPARtst','<i4'), #OBSOLETE
106 117 ('SPARcor','<i4'), #OBSOLETE
107 118 ('SPARofs','<i4'), #OBSOLETE
108 ('SPARhsn','<i4'), #Hildebrand div noise detection on noise gate
109 ('SPARhsa','<f4'), #Hildebrand div noise detection on all gates
119 # Hildebrand div noise detection on noise gate
120 ('SPARhsn', '<i4'),
121 # Hildebrand div noise detection on all gates
122 ('SPARhsa', '<f4'),
110 123 ('SPARcalibPow_M','<f4'), #OBSOLETE
111 124 ('SPARcalibSNR_M','<f4'), #OBSOLETE
112 125 ('SPARcalibPow_S','<f4'), #OBSOLETE
113 126 ('SPARcalibSNR_S','<f4'), #OBSOLETE
114 ('SPARrawGate1','<i4'), #Lowest range gate for spectra saving Raw_Gate1 >=5
115 ('SPARrawGate2','<i4'), #Number of range gates with atmospheric signal
116 ('SPARraw','<i4'), #flag - IQ or spectra saving on/off
127 # Lowest range gate for spectra saving Raw_Gate1 >=5
128 ('SPARrawGate1', '<i4'),
129 # Number of range gates with atmospheric signal
130 ('SPARrawGate2', '<i4'),
131 # flag - IQ or spectra saving on/off
132 ('SPARraw', '<i4'),
117 133 ('SPARprc','<i4'),]) #flag - Moment estimation switched on/off
118 134
119 135
120
121 136 class FileHeaderMIRA35c(Header):
122 137
123 138 def __init__(self):
@@ -195,7 +210,6 class FileHeaderMIRA35c(Header):
195 210
196 211 '''
197 212
198
199 213 self.Hname= str(header['Hname'][0])
200 214 self.Htime= str(header['Htime'][0])
201 215 self.Hoper= str(header['Hoper'][0])
@@ -272,7 +286,6 class FileHeaderMIRA35c(Header):
272 286 self.Hdescr,
273 287 self.Hdummy)
274 288
275
276 289 header = numpy.array(headerTuple, FILE_HEADER)
277 290 # numpy.array(object, dtype=None, copy=True, order=None, subok=False, ndmin=0)
278 291 header.tofile(fp)
@@ -293,11 +306,13 class FileHeaderMIRA35c(Header):
293 306
294 307 return 1
295 308
309
296 310 SRVI_HEADER = numpy.dtype([
297 ('SignatureSRVI1',numpy.str_,4),#
298 ('SizeOfDataBlock1','<i4'),#
299 ('DataBlockTitleSRVI1',numpy.str_,4),#
300 ('SizeOfSRVI1','<i4'),])#
311 ('SignatureSRVI1', numpy.str_, 4),
312 ('SizeOfDataBlock1', '<i4'),
313 ('DataBlockTitleSRVI1', numpy.str_, 4),
314 ('SizeOfSRVI1', '<i4'), ])
315
301 316
302 317 class SRVIHeader(Header):
303 318 def __init__(self, SignatureSRVI1=0, SizeOfDataBlock1=0, DataBlockTitleSRVI1=0, SizeOfSRVI1=0):
@@ -322,7 +337,7 class SRVIHeader(Header):
322 337
323 338
324 339 SRVI_STRUCTURE = numpy.dtype([
325 ('frame_cnt','<u4'),#
340 ('frame_cnt', '<u4'),
326 341 ('time_t','<u4'), #
327 342 ('tpow','<f4'), #
328 343 ('npw1','<f4'), #
@@ -338,24 +353,20 SRVI_STRUCTURE = numpy.dtype([
338 353 ('azivel','<f4'), #
339 354 ('elvpos','<f4'), #
340 355 ('elvvel','<f4'), #
341 ('northAngle','<f4'), #
356 ('northAngle', '<f4'),
342 357 ('microsec','<u4'), #
343 358 ('azisetvel','<f4'), #
344 359 ('elvsetpos','<f4'), #
345 360 ('RadarConst','<f4'),]) #
346 361
347 362
348
349
350 363 class RecordHeader(Header):
351 364
352
353 365 def __init__(self, frame_cnt=0, time_t= 0, tpow=0, npw1=0, npw2=0,
354 366 cpw1=0, pcw2=0, ps_err=0, te_err=0, rc_err=0, grs1=0,
355 367 grs2=0, azipos=0, azivel=0, elvpos=0, elvvel=0, northangle=0,
356 368 microsec=0, azisetvel=0, elvsetpos=0, RadarConst=0 , RecCounter=0, Off2StartNxtRec=0):
357 369
358
359 370 self.frame_cnt = frame_cnt
360 371 self.dwell = time_t
361 372 self.tpow = tpow
@@ -392,7 +403,7 class RecordHeader(Header):
392 403
393 404 header = numpy.fromfile(fp,SRVI_STRUCTURE,1)
394 405
395 self.frame_cnt = header['frame_cnt'][0]#
406 self.frame_cnt = header['frame_cnt'][0]
396 407 self.time_t = header['time_t'][0] #
397 408 self.tpow = header['tpow'][0] #
398 409 self.npw1 = header['npw1'][0] #
@@ -428,9 +439,9 class RecordHeader(Header):
428 439
429 440 print '=============================================='
430 441
431
432 442 return 1
433 443
444
434 445 class MIRA35CReader (ProcessingUnit,FileHeaderMIRA35c,SRVIHeader,RecordHeader):
435 446
436 447 path = None
@@ -441,7 +452,6 class MIRA35CReader (ProcessingUnit,FileHeaderMIRA35c,SRVIHeader,RecordHeader):
441 452 walk = None
442 453 isConfig = False
443 454
444
445 455 fileList= None
446 456
447 457 #metadata
@@ -453,8 +463,6 class MIRA35CReader (ProcessingUnit,FileHeaderMIRA35c,SRVIHeader,RecordHeader):
453 463 data= None
454 464 utctime= None
455 465
456
457
458 466 def __init__(self, **kwargs):
459 467
460 468 #Eliminar de la base la herencia
@@ -498,7 +506,6 class MIRA35CReader (ProcessingUnit,FileHeaderMIRA35c,SRVIHeader,RecordHeader):
498 506 self.dataOut.COFA = []
499 507 self.dataOut.noise = 0
500 508
501
502 509 def Files2Read(self, fp):
503 510 '''
504 511 Function that indicates the number of .fdt files that exist in the folder to be read.
@@ -506,8 +513,10 class MIRA35CReader (ProcessingUnit,FileHeaderMIRA35c,SRVIHeader,RecordHeader):
506 513 '''
507 514 #self.__checkPath()
508 515
509 ListaData=os.listdir(fp) #Gets the list of files within the fp address
510 ListaData=sorted(ListaData) #Sort the list of files from least to largest by names
516 # Gets the list of files within the fp address
517 ListaData = os.listdir(fp)
518 # Sort the list of files from least to largest by names
519 ListaData = sorted(ListaData)
511 520 nFiles=0 #File Counter
512 521 FileList=[] #A list is created that will contain the .fdt files
513 522 for IndexFile in ListaData :
@@ -520,7 +529,6 class MIRA35CReader (ProcessingUnit,FileHeaderMIRA35c,SRVIHeader,RecordHeader):
520 529
521 530 self.filenameList=FileList #List of files from least to largest by names
522 531
523
524 532 def run(self, **kwargs):
525 533 '''
526 534 This method will be the one that will initiate the data entry, will be called constantly.
@@ -533,7 +541,6 class MIRA35CReader (ProcessingUnit,FileHeaderMIRA35c,SRVIHeader,RecordHeader):
533 541
534 542 self.getData()
535 543
536
537 544 def setup(self, path=None,
538 545 startDate=None,
539 546 endDate=None,
@@ -557,7 +564,6 class MIRA35CReader (ProcessingUnit,FileHeaderMIRA35c,SRVIHeader,RecordHeader):
557 564
558 565 pass
559 566
560
561 567 def getData(self):
562 568 '''
563 569 Before starting this function, you should check that there is still an unread file,
@@ -584,10 +590,8 class MIRA35CReader (ProcessingUnit,FileHeaderMIRA35c,SRVIHeader,RecordHeader):
584 590 self.dataOut.data_spc = self.dataOut.data_spc+self.dataOut.noise
585 591 #print 'self.dataOut.noise',self.dataOut.noise
586 592
587
588 593 return self.dataOut.data_spc
589 594
590
591 595 def readFile(self,fp):
592 596 '''
593 597 You must indicate if you are reading in Online or Offline mode and load the
@@ -604,7 +608,8 class MIRA35CReader (ProcessingUnit,FileHeaderMIRA35c,SRVIHeader,RecordHeader):
604 608
605 609 if self.fileSelector < len(self.filenameList):
606 610
607 self.fpFile=str(fp)+'/'+str(self.filenameList[self.fileSelector])
611 self.fpFile = str(fp) + '/' + \
612 str(self.filenameList[self.fileSelector])
608 613
609 614 if self.nextfileflag==True:
610 615 self.fp = open(self.fpFile,"rb")
@@ -612,18 +617,15 class MIRA35CReader (ProcessingUnit,FileHeaderMIRA35c,SRVIHeader,RecordHeader):
612 617
613 618 '''HERE STARTING THE FILE READING'''
614 619
615
616 620 self.fheader = FileHeaderMIRA35c()
617 621 self.fheader.FHread(self.fp) #Bltr FileHeader Reading
618 622
619
620 623 self.SPARrawGate1 = self.fheader.SPARrawGate1
621 624 self.SPARrawGate2 = self.fheader.SPARrawGate2
622 625 self.Num_Hei = self.SPARrawGate2 - self.SPARrawGate1
623 626 self.Num_Bins = self.fheader.PPARsft
624 627 self.dataOut.nFFTPoints = self.fheader.PPARsft
625 628
626
627 629 self.Num_inCoh = self.fheader.PPARavc
628 630 self.dataOut.PRF = self.fheader.PPARprf
629 631 self.dataOut.frequency = 34.85*10**9
@@ -633,14 +635,12 class MIRA35CReader (ProcessingUnit,FileHeaderMIRA35c,SRVIHeader,RecordHeader):
633 635 pulse_width = self.fheader.PPARpdr * 10**-9
634 636 self.__deltaHeigth = 0.5 * SPEED_OF_LIGHT * pulse_width
635 637
636 self.data_spc = numpy.zeros((self.Num_Hei, self.Num_Bins,2))#
638 self.data_spc = numpy.zeros((self.Num_Hei, self.Num_Bins, 2))
637 639 self.dataOut.HSDV = numpy.zeros((self.Num_Hei, 2))
638 640
639 641 self.Ze = numpy.zeros(self.Num_Hei)
640 642 self.ETA = numpy.zeros(([2,self.Num_Hei]))
641 643
642
643
644 644 self.readBlock() #Block reading
645 645
646 646 else:
@@ -650,8 +650,6 class MIRA35CReader (ProcessingUnit,FileHeaderMIRA35c,SRVIHeader,RecordHeader):
650 650 self.FileHeaderFlag == True
651 651 return 0
652 652
653
654
655 653 def readBlock(self):
656 654 '''
657 655 It should be checked if the block has data, if it is not passed to the next file.
@@ -670,8 +668,6 class MIRA35CReader (ProcessingUnit,FileHeaderMIRA35c,SRVIHeader,RecordHeader):
670 668 else :
671 669 self.FirstPoint = 1180
672 670
673
674
675 671 self.srviHeader = SRVIHeader()
676 672
677 673 self.srviHeader.SRVIread(self.fp) #Se obtiene la cabecera del SRVI
@@ -682,7 +678,8 class MIRA35CReader (ProcessingUnit,FileHeaderMIRA35c,SRVIHeader,RecordHeader):
682 678 print 'blocksize == 148 bug'
683 679 jump = numpy.fromfile(self.fp,[('jump',numpy.str_,140)] ,1)
684 680
685 self.srviHeader.SRVIread(self.fp) #Se obtiene la cabecera del SRVI
681 # Se obtiene la cabecera del SRVI
682 self.srviHeader.SRVIread(self.fp)
686 683
687 684 if not self.srviHeader.SizeOfSRVI1:
688 685 self.fileSelector+=1
@@ -696,7 +693,6 class MIRA35CReader (ProcessingUnit,FileHeaderMIRA35c,SRVIHeader,RecordHeader):
696 693 npw1 = self.recordheader.npw1
697 694 npw2 = self.recordheader.npw2
698 695
699
700 696 self.dataOut.channelList = range(1)
701 697 self.dataOut.nIncohInt = self.Num_inCoh
702 698 self.dataOut.nProfiles = self.Num_Bins
@@ -706,53 +702,65 class MIRA35CReader (ProcessingUnit,FileHeaderMIRA35c,SRVIHeader,RecordHeader):
706 702 self.dataOut.timeZone=0
707 703
708 704 self.dataOut.outputInterval = self.dataOut.getTimeInterval()
709 self.dataOut.heightList = self.SPARrawGate1*self.__deltaHeigth + numpy.array(range(self.Num_Hei))*self.__deltaHeigth
710
711
705 self.dataOut.heightList = self.SPARrawGate1 * self.__deltaHeigth + \
706 numpy.array(range(self.Num_Hei)) * self.__deltaHeigth
712 707
713 708 self.HSDVsign = numpy.fromfile( self.fp, [('HSDV',numpy.str_,4)],1)
714 709 self.SizeHSDV = numpy.fromfile( self.fp, [('SizeHSDV','<i4')],1)
715 self.HSDV_Co = numpy.fromfile( self.fp, [('HSDV_Co','<f4')],self.Num_Hei)
716 self.HSDV_Cx = numpy.fromfile( self.fp, [('HSDV_Cx','<f4')],self.Num_Hei)
710 self.HSDV_Co = numpy.fromfile(
711 self.fp, [('HSDV_Co', '<f4')], self.Num_Hei)
712 self.HSDV_Cx = numpy.fromfile(
713 self.fp, [('HSDV_Cx', '<f4')], self.Num_Hei)
717 714
718 715 self.COFAsign = numpy.fromfile( self.fp, [('COFA',numpy.str_,4)],1)
719 716 self.SizeCOFA = numpy.fromfile( self.fp, [('SizeCOFA','<i4')],1)
720 self.COFA_Co = numpy.fromfile( self.fp, [('COFA_Co','<f4')],self.Num_Hei)
721 self.COFA_Cx = numpy.fromfile( self.fp, [('COFA_Cx','<f4')],self.Num_Hei)
717 self.COFA_Co = numpy.fromfile(
718 self.fp, [('COFA_Co', '<f4')], self.Num_Hei)
719 self.COFA_Cx = numpy.fromfile(
720 self.fp, [('COFA_Cx', '<f4')], self.Num_Hei)
722 721
723 self.ZSPCsign = numpy.fromfile(self.fp, [('ZSPCsign',numpy.str_,4)],1)
722 self.ZSPCsign = numpy.fromfile(
723 self.fp, [('ZSPCsign', numpy.str_, 4)], 1)
724 724 self.SizeZSPC = numpy.fromfile(self.fp, [('SizeZSPC','<i4')],1)
725 725
726 726 self.dataOut.HSDV[0]=self.HSDV_Co[:][0]
727 727 self.dataOut.HSDV[1]=self.HSDV_Cx[:][0]
728 728
729 729 for irg in range(self.Num_Hei):
730 nspc = numpy.fromfile(self.fp, [('nspc','int16')],1)[0][0] # Number of spectral sub pieces containing significant power
730 # Number of spectral sub pieces containing significant power
731 nspc = numpy.fromfile(self.fp, [('nspc', 'int16')], 1)[0][0]
731 732
732 733 for k in range(nspc):
733 binIndex = numpy.fromfile(self.fp, [('binIndex','int16')],1)[0][0] # Index of the spectral bin where the piece is beginning
734 nbins = numpy.fromfile(self.fp, [('nbins','int16')],1)[0][0] # Number of bins of the piece
734 # Index of the spectral bin where the piece is beginning
735 binIndex = numpy.fromfile(
736 self.fp, [('binIndex', 'int16')], 1)[0][0]
737 nbins = numpy.fromfile(self.fp, [('nbins', 'int16')], 1)[
738 0][0] # Number of bins of the piece
735 739
736 740 #Co_Channel
737 jbin = numpy.fromfile(self.fp, [('jbin','uint16')],nbins)[0][0] # Spectrum piece to be normaliced
738 jmax = numpy.fromfile(self.fp, [('jmax','float32')],1)[0][0] # Maximun piece to be normaliced
739
741 jbin = numpy.fromfile(self.fp, [('jbin', 'uint16')], nbins)[
742 0][0] # Spectrum piece to be normaliced
743 jmax = numpy.fromfile(self.fp, [('jmax', 'float32')], 1)[
744 0][0] # Maximun piece to be normaliced
740 745
741 self.data_spc[irg,binIndex:binIndex+nbins,0] = self.data_spc[irg,binIndex:binIndex+nbins,0]+jbin/65530.*jmax
746 self.data_spc[irg, binIndex:binIndex + nbins, 0] = self.data_spc[irg,
747 binIndex:binIndex + nbins, 0] + jbin / 65530. * jmax
742 748
743 749 #Cx_Channel
744 jbin = numpy.fromfile(self.fp, [('jbin','uint16')],nbins)[0][0]
750 jbin = numpy.fromfile(
751 self.fp, [('jbin', 'uint16')], nbins)[0][0]
745 752 jmax = numpy.fromfile(self.fp, [('jmax','float32')],1)[0][0]
746 753
747
748 self.data_spc[irg,binIndex:binIndex+nbins,1] = self.data_spc[irg,binIndex:binIndex+nbins,1]+jbin/65530.*jmax
754 self.data_spc[irg, binIndex:binIndex + nbins, 1] = self.data_spc[irg,
755 binIndex:binIndex + nbins, 1] + jbin / 65530. * jmax
749 756
750 757 for bin in range(self.Num_Bins):
751 758
752 self.data_spc[:,bin,0] = self.data_spc[:,bin,0] - self.dataOut.HSDV[:,0]
753
754 self.data_spc[:,bin,1] = self.data_spc[:,bin,1] - self.dataOut.HSDV[:,1]
759 self.data_spc[:, bin, 0] = self.data_spc[:,
760 bin, 0] - self.dataOut.HSDV[:, 0]
755 761
762 self.data_spc[:, bin, 1] = self.data_spc[:,
763 bin, 1] - self.dataOut.HSDV[:, 1]
756 764
757 765 numpy.set_printoptions(threshold='nan')
758 766
@@ -787,17 +795,8 class MIRA35CReader (ProcessingUnit,FileHeaderMIRA35c,SRVIHeader,RecordHeader):
787 795 #shift = self.Num_Bins/2 + fix
788 796 #self.data_spc = numpy.array([ self.data_spc[: , self.Num_Bins-shift+1: , :] , self.data_spc[: , 0:self.Num_Bins-shift , :]])
789 797
790
791
792 798 '''Block Reading, the Block Data is received and Reshape is used to give it
793 799 shape.
794 800 '''
795 801
796 802 self.PointerReader = self.fp.tell()
797
798
799
800
801
802
803 No newline at end of file
@@ -11,7 +11,6 from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader,
11 11 from schainpy.model.data.jrodata import Spectra
12 12
13 13 class SpectraReader(JRODataReader, ProcessingUnit):
14
15 14 """
16 15 Esta clase permite leer datos de espectros desde archivos procesados (.pdata). La lectura
17 16 de los datos siempre se realiza por bloques. Los datos leidos (array de 3 dimensiones)
@@ -21,7 +20,6 class SpectraReader(JRODataReader, ProcessingUnit):
21 20 paresCanalesDiferentes * alturas * perfiles (Cross Spectra)
22 21 canales * alturas (DC Channels)
23 22
24
25 23 Esta clase contiene instancias (objetos) de las clases BasicHeader, SystemHeader,
26 24 RadarControllerHeader y Spectra. Los tres primeros se usan para almacenar informacion de la
27 25 cabecera de datos (metadata), y el cuarto (Spectra) para obtener y almacenar un bloque de
@@ -76,7 +74,6 class SpectraReader(JRODataReader, ProcessingUnit):
76 74 Inicializador de la clase SpectraReader para la lectura de datos de espectros.
77 75
78 76 Inputs:
79
80 77 dataOut : Objeto de la clase Spectra. Este objeto sera utilizado para
81 78 almacenar un perfil de datos cada vez que se haga un requerimiento
82 79 (getData). El perfil sera obtenido a partir del buffer de datos,
@@ -84,15 +81,12 class SpectraReader(JRODataReader, ProcessingUnit):
84 81 bloque de datos.
85 82 Si este parametro no es pasado se creara uno internamente.
86 83
87
88 84 Affected:
89
90 85 self.dataOut
91 86
92 87 Return : None
93 88 """
94 89
95
96 90 #Eliminar de la base la herencia
97 91 ProcessingUnit.__init__(self, **kwargs)
98 92
@@ -213,7 +207,6 class SpectraReader(JRODataReader, ProcessingUnit):
213 207 for i in range(0, self.processingHeaderObj.totalSpectra*2, 2):
214 208 if self.processingHeaderObj.spectraComb[i] == self.processingHeaderObj.spectraComb[i+1]:
215 209 self.nRdChannels = self.nRdChannels + 1 #par de canales iguales
216
217 210 else:
218 211 self.nRdPairs = self.nRdPairs + 1 #par de canales diferentes
219 212 self.rdPairList.append((self.processingHeaderObj.spectraComb[i], self.processingHeaderObj.spectraComb[i+1]))
@@ -245,7 +238,6 class SpectraReader(JRODataReader, ProcessingUnit):
245 238
246 239 Variables afectadas:
247 240
248
249 241 self.flagIsNewFile
250 242 self.flagIsNewBlock
251 243 self.nTotalBlocks
@@ -256,7 +248,6 class SpectraReader(JRODataReader, ProcessingUnit):
256 248 Exceptions:
257 249 Si un bloque leido no es un bloque valido
258 250 """
259
260 251 blockOk_flag = False
261 252 fpointer = self.fp.tell()
262 253
@@ -286,13 +277,11 class SpectraReader(JRODataReader, ProcessingUnit):
286 277 self.data_spc = spc
287 278
288 279 if self.processingHeaderObj.flag_cspc:
289
290 280 cspc = numpy.transpose( cspc, (0,2,1) )
291 281 self.data_cspc = cspc['real'] + cspc['imag']*1j
292 282 else:
293 283 self.data_cspc = None
294 284
295
296 285 if self.processingHeaderObj.flag_dc:
297 286 self.data_dc = dc['real'] + dc['imag']*1j
298 287 else:
@@ -377,7 +366,6 class SpectraReader(JRODataReader, ProcessingUnit):
377 366 self.dataOut.flagNoData = True
378 367 return 0
379 368
380
381 369 #data es un numpy array de 3 dmensiones (perfiles, alturas y canales)
382 370
383 371 if self.data_spc is None:
@@ -425,12 +413,11 class SpectraWriter(JRODataWriter, Operation):
425 413
426 414 # dataOut = None
427 415
428 def __init__(self):
416 def __init__(self, **kwargs):
429 417 """
430 418 Inicializador de la clase SpectraWriter para la escritura de datos de espectros.
431 419
432 420 Affected:
433
434 421 self.dataOut
435 422 self.basicHeaderObj
436 423 self.systemHeaderObj
@@ -440,7 +427,7 class SpectraWriter(JRODataWriter, Operation):
440 427 Return: None
441 428 """
442 429
443 Operation.__init__(self)
430 Operation.__init__(self, **kwargs)
444 431
445 432 self.isConfig = False
446 433
@@ -450,7 +437,6 class SpectraWriter(JRODataWriter, Operation):
450 437
451 438 self.data_cspc = None
452 439
453
454 440 self.data_dc = None
455 441
456 442 self.fp = None
@@ -484,7 +470,6 class SpectraWriter(JRODataWriter, Operation):
484 470 return 1
485 471
486 472
487
488 473 def setBlockDimension(self):
489 474 """
490 475 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
@@ -512,7 +497,6 class SpectraWriter(JRODataWriter, Operation):
512 497 """
513 498 Escribe el buffer en el file designado
514 499
515
516 500 Affected:
517 501 self.data_spc
518 502 self.data_cspc
@@ -542,7 +526,6 class SpectraWriter(JRODataWriter, Operation):
542 526 data = data.reshape((-1))
543 527 data.tofile(self.fp)
544 528
545
546 529 if self.data_dc is not None:
547 530 data = numpy.zeros( self.shape_dc_Buffer, self.dtype )
548 531 dc = self.data_dc
@@ -559,7 +542,6 class SpectraWriter(JRODataWriter, Operation):
559 542 # if self.data_cspc is not None:
560 543 # self.data_cspc.fill(0)
561 544
562
563 545 self.flagIsNewFile = 0
564 546 self.flagIsNewBlock = 1
565 547 self.nTotalBlocks += 1
@@ -572,7 +554,6 class SpectraWriter(JRODataWriter, Operation):
572 554 """
573 555 Setea un bloque de datos y luego los escribe en un file
574 556
575
576 557 Affected:
577 558 self.data_spc
578 559 self.data_cspc
@@ -590,7 +571,9 class SpectraWriter(JRODataWriter, Operation):
590 571
591 572 if self.dataOut.flagDiscontinuousBlock:
592 573 self.data_spc.fill(0)
574 if self.dataOut.data_cspc is not None:
593 575 self.data_cspc.fill(0)
576 if self.dataOut.data_dc is not None:
594 577 self.data_dc.fill(0)
595 578 self.setNextFile()
596 579
@@ -612,7 +595,6 class SpectraWriter(JRODataWriter, Operation):
612 595
613 596 return 1
614 597
615
616 598 def __getBlockSize(self):
617 599 '''
618 600 Este metodos determina el cantidad de bytes para un bloque de datos de tipo Spectra
@@ -665,7 +647,6 class SpectraWriter(JRODataWriter, Operation):
665 647 self.processingHeaderObj.totalSpectra = self.dataOut.nPairs + self.dataOut.nChannels
666 648 self.processingHeaderObj.shif_fft = self.dataOut.flagShiftFFT
667 649
668
669 650 if self.processingHeaderObj.totalSpectra > 0:
670 651 channelList = []
671 652 for channel in range(self.dataOut.nChannels):
@@ -15,6 +15,7 import tempfile
15 15 from StringIO import StringIO
16 16 # from _sha import blocksize
17 17
18
18 19 class VoltageReader(JRODataReader, ProcessingUnit):
19 20 """
20 21 Esta clase permite leer datos de voltage desde archivos en formato rawdata (.r). La lectura
@@ -177,7 +178,6 class VoltageReader(JRODataReader, ProcessingUnit):
177 178
178 179 return 0
179 180
180
181 181 def getBlockDimension(self):
182 182 """
183 183 Obtiene la cantidad de puntos a leer por cada bloque de datos
@@ -188,11 +188,10 class VoltageReader(JRODataReader, ProcessingUnit):
188 188 Return:
189 189 None
190 190 """
191 pts2read = self.processingHeaderObj.profilesPerBlock * self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
191 pts2read = self.processingHeaderObj.profilesPerBlock * \
192 self.processingHeaderObj.nHeights * self.systemHeaderObj.nChannels
192 193 self.blocksize = pts2read
193 194
194
195
196 195 def readBlock(self):
197 196 """
198 197 readBlock lee el bloque de datos desde la posicion actual del puntero del archivo
@@ -230,13 +229,15 class VoltageReader(JRODataReader, ProcessingUnit):
230 229 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
231 230
232 231 try:
233 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
232 junk = junk.reshape((self.processingHeaderObj.profilesPerBlock,
233 self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels))
234 234 except:
235 235 #print "The read block (%3d) has not enough data" %self.nReadBlocks
236 236
237 237 if self.waitDataBlock(pointer_location=current_pointer_location):
238 238 junk = numpy.fromfile( self.fp, self.dtype, self.blocksize )
239 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
239 junk = junk.reshape((self.processingHeaderObj.profilesPerBlock,
240 self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels))
240 241 # return 0
241 242
242 243 #Dimensions : nChannels, nProfiles, nSamples
@@ -264,7 +265,6 class VoltageReader(JRODataReader, ProcessingUnit):
264 265
265 266 if self.nTxs > 1:
266 267 self.dataOut.radarControllerHeaderObj.ippSeconds = self.radarControllerHeaderObj.ippSeconds/self.nTxs
267
268 268 #Time interval and code are propierties of dataOut. Its value depends of radarControllerHeaderObj.
269 269
270 270 # self.dataOut.timeInterval = self.radarControllerHeaderObj.ippSeconds * self.processingHeaderObj.nCohInt
@@ -281,15 +281,18 class VoltageReader(JRODataReader, ProcessingUnit):
281 281
282 282 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock
283 283
284 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.nHeights) *self.processingHeaderObj.deltaHeight + self.processingHeaderObj.firstHeight
284 self.dataOut.heightList = numpy.arange(
285 self.processingHeaderObj.nHeights) * self.processingHeaderObj.deltaHeight + self.processingHeaderObj.firstHeight
285 286
286 287 self.dataOut.channelList = range(self.systemHeaderObj.nChannels)
287 288
288 289 self.dataOut.nCohInt = self.processingHeaderObj.nCohInt
289 290
290 self.dataOut.flagDecodeData = self.processingHeaderObj.flag_decode #asumo q la data no esta decodificada
291 # asumo q la data no esta decodificada
292 self.dataOut.flagDecodeData = self.processingHeaderObj.flag_decode
291 293
292 self.dataOut.flagDeflipData = self.processingHeaderObj.flag_deflip #asumo q la data no esta sin flip
294 # asumo q la data no esta sin flip
295 self.dataOut.flagDeflipData = self.processingHeaderObj.flag_deflip
293 296
294 297 self.dataOut.flagShiftFFT = self.processingHeaderObj.shif_fft
295 298
@@ -302,15 +305,19 class VoltageReader(JRODataReader, ProcessingUnit):
302 305 return
303 306
304 307 if self.nTxs < 1 and self.processingHeaderObj.profilesPerBlock % (1./self.nTxs) != 0:
305 raise ValueError, "1./nTxs (=%f), should be a multiple of nProfiles (=%d)" %(1./self.nTxs, self.processingHeaderObj.profilesPerBlock)
308 raise ValueError, "1./nTxs (=%f), should be a multiple of nProfiles (=%d)" % (
309 1. / self.nTxs, self.processingHeaderObj.profilesPerBlock)
306 310
307 311 if self.nTxs > 1 and self.processingHeaderObj.nHeights % self.nTxs != 0:
308 raise ValueError, "nTxs (=%d), should be a multiple of nHeights (=%d)" %(self.nTxs, self.processingHeaderObj.nHeights)
312 raise ValueError, "nTxs (=%d), should be a multiple of nHeights (=%d)" % (
313 self.nTxs, self.processingHeaderObj.nHeights)
309 314
310 self.datablock = self.datablock.reshape((self.systemHeaderObj.nChannels, self.processingHeaderObj.profilesPerBlock*self.nTxs, self.processingHeaderObj.nHeights/self.nTxs))
315 self.datablock = self.datablock.reshape(
316 (self.systemHeaderObj.nChannels, self.processingHeaderObj.profilesPerBlock * self.nTxs, self.processingHeaderObj.nHeights / self.nTxs))
311 317
312 318 self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock*self.nTxs
313 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.nHeights/self.nTxs) *self.processingHeaderObj.deltaHeight + self.processingHeaderObj.firstHeight
319 self.dataOut.heightList = numpy.arange(self.processingHeaderObj.nHeights / self.nTxs) * \
320 self.processingHeaderObj.deltaHeight + self.processingHeaderObj.firstHeight
314 321 self.dataOut.radarControllerHeaderObj.ippSeconds = self.radarControllerHeaderObj.ippSeconds/self.nTxs
315 322
316 323 return
@@ -321,7 +328,8 class VoltageReader(JRODataReader, ProcessingUnit):
321 328
322 329 self.firstHeaderSize = self.basicHeaderObj.size
323 330
324 datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR))
331 datatype = int(numpy.log2((self.processingHeaderObj.processFlags &
332 PROCFLAG.DATATYPE_MASK)) - numpy.log2(PROCFLAG.DATATYPE_CHAR))
325 333 if datatype == 0:
326 334 datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')])
327 335 elif datatype == 1:
@@ -339,12 +347,13 class VoltageReader(JRODataReader, ProcessingUnit):
339 347
340 348 self.dtype = datatype_str
341 349 #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c
342 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1)
350 self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + \
351 self.firstHeaderSize + self.basicHeaderSize * \
352 (self.processingHeaderObj.dataBlocksPerFile - 1)
343 353 # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels)
344 354 # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels)
345 355 self.getBlockDimension()
346 356
347
348 357 def getFromServer(self):
349 358 self.flagDiscontinuousBlock = 0
350 359 self.profileIndex = 0
@@ -369,15 +378,19 class VoltageReader(JRODataReader, ProcessingUnit):
369 378 timestamp = self.basicHeaderObj.get_datatime()
370 379 print '[Reading] - Block {} - {}'.format(self.nTotalBlocks, timestamp)
371 380 current_pointer_location = self.blockPointer
372 junk = numpy.fromstring( block[self.blockPointer:], self.dtype, self.blocksize )
381 junk = numpy.fromstring(
382 block[self.blockPointer:], self.dtype, self.blocksize)
373 383
374 384 try:
375 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
385 junk = junk.reshape((self.processingHeaderObj.profilesPerBlock,
386 self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels))
376 387 except:
377 388 #print "The read block (%3d) has not enough data" %self.nReadBlocks
378 389 if self.waitDataBlock(pointer_location=current_pointer_location):
379 junk = numpy.fromstring( block[self.blockPointer:], self.dtype, self.blocksize )
380 junk = junk.reshape( (self.processingHeaderObj.profilesPerBlock, self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels) )
390 junk = numpy.fromstring(
391 block[self.blockPointer:], self.dtype, self.blocksize)
392 junk = junk.reshape((self.processingHeaderObj.profilesPerBlock,
393 self.processingHeaderObj.nHeights, self.systemHeaderObj.nChannels))
381 394 # return 0
382 395
383 396 #Dimensions : nChannels, nProfiles, nSamples
@@ -385,17 +398,21 class VoltageReader(JRODataReader, ProcessingUnit):
385 398 junk = numpy.transpose(junk, (2,0,1))
386 399 self.datablock = junk['real'] + junk['imag'] * 1j
387 400 self.profileIndex = 0
388 if self.selBlocksize == None: self.selBlocksize = self.dataOut.nProfiles
401 if self.selBlocksize == None:
402 self.selBlocksize = self.dataOut.nProfiles
389 403 if self.selBlocktime != None:
390 404 if self.dataOut.nCohInt is not None:
391 405 nCohInt = self.dataOut.nCohInt
392 406 else:
393 407 nCohInt = 1
394 self.selBlocksize = int(self.dataOut.nProfiles*round(self.selBlocktime/(nCohInt*self.dataOut.ippSeconds*self.dataOut.nProfiles)))
395 self.dataOut.data = self.datablock[:,self.profileIndex:self.profileIndex+self.selBlocksize,:]
408 self.selBlocksize = int(self.dataOut.nProfiles * round(self.selBlocktime / (
409 nCohInt * self.dataOut.ippSeconds * self.dataOut.nProfiles)))
410 self.dataOut.data = self.datablock[:,
411 self.profileIndex:self.profileIndex + self.selBlocksize, :]
396 412 datasize = self.dataOut.data.shape[1]
397 413 if datasize < self.selBlocksize:
398 buffer = numpy.zeros((self.dataOut.data.shape[0], self.selBlocksize, self.dataOut.data.shape[2]), dtype = 'complex')
414 buffer = numpy.zeros(
415 (self.dataOut.data.shape[0], self.selBlocksize, self.dataOut.data.shape[2]), dtype='complex')
399 416 buffer[:,:datasize,:] = self.dataOut.data
400 417 self.dataOut.data = buffer
401 418 self.profileIndex = blockIndex
@@ -487,20 +504,24 class VoltageReader(JRODataReader, ProcessingUnit):
487 504 """
488 505 Return a block
489 506 """
490 if self.selBlocksize == None: self.selBlocksize = self.dataOut.nProfiles
507 if self.selBlocksize == None:
508 self.selBlocksize = self.dataOut.nProfiles
491 509 if self.selBlocktime != None:
492 510 if self.dataOut.nCohInt is not None:
493 511 nCohInt = self.dataOut.nCohInt
494 512 else:
495 513 nCohInt = 1
496 self.selBlocksize = int(self.dataOut.nProfiles*round(self.selBlocktime/(nCohInt*self.dataOut.ippSeconds*self.dataOut.nProfiles)))
514 self.selBlocksize = int(self.dataOut.nProfiles * round(self.selBlocktime / (
515 nCohInt * self.dataOut.ippSeconds * self.dataOut.nProfiles)))
497 516
498 self.dataOut.data = self.datablock[:,self.profileIndex:self.profileIndex+self.selBlocksize,:]
517 self.dataOut.data = self.datablock[:,
518 self.profileIndex:self.profileIndex + self.selBlocksize, :]
499 519 self.profileIndex += self.selBlocksize
500 520 datasize = self.dataOut.data.shape[1]
501 521
502 522 if datasize < self.selBlocksize:
503 buffer = numpy.zeros((self.dataOut.data.shape[0],self.selBlocksize,self.dataOut.data.shape[2]), dtype = 'complex')
523 buffer = numpy.zeros(
524 (self.dataOut.data.shape[0], self.selBlocksize, self.dataOut.data.shape[2]), dtype='complex')
504 525 buffer[:,:datasize,:] = self.dataOut.data
505 526
506 527 while datasize < self.selBlocksize: #Not enough profiles to fill the block
@@ -515,7 +536,8 class VoltageReader(JRODataReader, ProcessingUnit):
515 536 blockIndex = self.selBlocksize - datasize
516 537 datablock1 = self.datablock[:,:blockIndex,:]
517 538
518 buffer[:,datasize:datasize+datablock1.shape[1],:] = datablock1
539 buffer[:, datasize:datasize +
540 datablock1.shape[1], :] = datablock1
519 541 datasize += datablock1.shape[1]
520 542
521 543 self.dataOut.data = buffer
@@ -528,11 +550,6 class VoltageReader(JRODataReader, ProcessingUnit):
528 550
529 551 self.getBasicHeader()
530 552
531 #print self.basicHeaderObj.printInfo()
532 #print self.systemHeaderObj.printInfo()
533 #print self.radarControllerHeaderObj.printInfo()
534 #print self.processingHeaderObj.printInfo()
535
536 553 self.dataOut.realtime = self.online
537 554
538 555 return self.dataOut.data
@@ -550,7 +567,6 class VoltageWriter(JRODataWriter, Operation):
550 567
551 568 shapeBuffer = None
552 569
553
554 570 def __init__(self, **kwargs):
555 571 """
556 572 Inicializador de la clase VoltageWriter para la escritura de datos de espectros.
@@ -597,7 +613,6 class VoltageWriter(JRODataWriter, Operation):
597 613 return 1
598 614 return 0
599 615
600
601 616 def setBlockDimension(self):
602 617 """
603 618 Obtiene las formas dimensionales del los subbloques de datos que componen un bloque
@@ -696,12 +711,12 class VoltageWriter(JRODataWriter, Operation):
696 711
697 712 dtype_width = self.getDtypeWidth()
698 713
699 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels * self.profilesPerBlock * dtype_width * 2)
714 blocksize = int(self.dataOut.nHeights * self.dataOut.nChannels *
715 self.profilesPerBlock * dtype_width * 2)
700 716
701 717 return blocksize
702 718
703 719 def setFirstHeader(self):
704
705 720 """
706 721 Obtiene una copia del First Header
707 722
@@ -722,10 +737,13 class VoltageWriter(JRODataWriter, Operation):
722 737 self.processingHeaderObj.blockSize = self.__getBlockSize()
723 738 self.processingHeaderObj.profilesPerBlock = self.profilesPerBlock
724 739 self.processingHeaderObj.dataBlocksPerFile = self.blocksPerFile
725 self.processingHeaderObj.nWindows = 1 #podria ser 1 o self.dataOut.processingHeaderObj.nWindows
740 # podria ser 1 o self.dataOut.processingHeaderObj.nWindows
741 self.processingHeaderObj.nWindows = 1
726 742 self.processingHeaderObj.nCohInt = self.dataOut.nCohInt
727 self.processingHeaderObj.nIncohInt = 1 # Cuando la data de origen es de tipo Voltage
728 self.processingHeaderObj.totalSpectra = 0 # Cuando la data de origen es de tipo Voltage
743 # Cuando la data de origen es de tipo Voltage
744 self.processingHeaderObj.nIncohInt = 1
745 # Cuando la data de origen es de tipo Voltage
746 self.processingHeaderObj.totalSpectra = 0
729 747
730 748 if self.dataOut.code is not None:
731 749 self.processingHeaderObj.code = self.dataOut.code
@@ -734,7 +752,8 class VoltageWriter(JRODataWriter, Operation):
734 752
735 753 if self.processingHeaderObj.nWindows != 0:
736 754 self.processingHeaderObj.firstHeight = self.dataOut.heightList[0]
737 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - self.dataOut.heightList[0]
755 self.processingHeaderObj.deltaHeight = self.dataOut.heightList[1] - \
756 self.dataOut.heightList[0]
738 757 self.processingHeaderObj.nHeights = self.dataOut.nHeights
739 758 self.processingHeaderObj.samplesWin = self.dataOut.nHeights
740 759
@@ -6,8 +6,8 from jroproc_base import ProcessingUnit, Operation
6 6 from schainpy.model.data.jroamisr import AMISR
7 7
8 8 class AMISRProc(ProcessingUnit):
9 def __init__(self):
10 ProcessingUnit.__init__(self)
9 def __init__(self, **kwargs):
10 ProcessingUnit.__init__(self, **kwargs)
11 11 self.objectDict = {}
12 12 self.dataOut = AMISR()
13 13
@@ -17,7 +17,8 class AMISRProc(ProcessingUnit):
17 17
18 18
19 19 class PrintInfo(Operation):
20 def __init__(self):
20 def __init__(self, **kwargs):
21 Operation.__init__(self, **kwargs)
21 22 self.__isPrinted = False
22 23
23 24 def run(self, dataOut):
@@ -42,8 +43,8 class BeamSelector(Operation):
42 43 profileIndex = None
43 44 nProfiles = None
44 45
45 def __init__(self):
46
46 def __init__(self, **kwargs):
47 Operation.__init__(self, **kwargs)
47 48 self.profileIndex = 0
48 49 self.__isConfig = False
49 50
@@ -98,7 +99,8 class BeamSelector(Operation):
98 99
99 100 class ProfileToChannels(Operation):
100 101
101 def __init__(self):
102 def __init__(self, **kwargs):
103 Operation.__init__(self, **kwargs)
102 104 self.__isConfig = False
103 105 self.__counter_chan = 0
104 106 self.buffer = None
@@ -17,7 +17,6 from functools import partial
17 17 import time
18 18 #from sklearn.cluster import KMeans
19 19
20 import matplotlib.pyplot as plt
21 20
22 21 from scipy.optimize import fmin_l_bfgs_b #optimize with bounds on state papameters
23 22 from jroproc_base import ProcessingUnit, Operation
@@ -1766,8 +1765,8 class WindProfiler(Operation):
1766 1765
1767 1766 n = None
1768 1767
1769 def __init__(self):
1770 Operation.__init__(self)
1768 def __init__(self, **kwargs):
1769 Operation.__init__(self, **kwargs)
1771 1770
1772 1771 def __calculateCosDir(self, elev, azim):
1773 1772 zen = (90 - elev)*numpy.pi/180
@@ -2473,8 +2472,8 class WindProfiler(Operation):
2473 2472
2474 2473 class EWDriftsEstimation(Operation):
2475 2474
2476 def __init__(self):
2477 Operation.__init__(self)
2475 def __init__(self, **kwargs):
2476 Operation.__init__(self, **kwargs)
2478 2477
2479 2478 def __correctValues(self, heiRang, phi, velRadial, SNR):
2480 2479 listPhi = phi.tolist()
@@ -197,7 +197,6 class SpectraProc(ProcessingUnit):
197 197
198 198 self.dataOut.data_cspc = self.dataOut.data_cspc[pairsIndex]
199 199 self.dataOut.pairsList = pairs
200 self.dataOut.pairsIndexList = pairsIndex
201 200
202 201 return
203 202
@@ -877,7 +876,6 class IncohInt(Operation):
877 876 return self.__initime, avgdata_spc, avgdata_cspc, avgdata_dc
878 877
879 878 def run(self, dataOut, n=None, timeInterval=None, overlapping=False):
880
881 879 if n==1:
882 880 return
883 881
@@ -901,4 +899,3 class IncohInt(Operation):
901 899 dataOut.nIncohInt *= self.n
902 900 dataOut.utctime = avgdatatime
903 901 dataOut.flagNoData = False
904
1 NO CONTENT: modified file
@@ -17,6 +17,7 SCHAINPY - LOG
17 17
18 18 import click
19 19
20
20 21 def warning(message, tag='Warning'):
21 22 click.echo(click.style('[{}] {}'.format(tag, message), fg='yellow'))
22 23 pass
@@ -39,6 +40,6 def log(message, tag='Info'):
39 40
40 41 def makelogger(tag, bg='reset', fg='reset'):
41 42 def func(message):
42 click.echo(click.style('[{}] {}'.format(tag.upper(), message),
43 bg=bg, fg=fg))
43 click.echo(click.style('[{}] {}'.format(
44 tag.upper(), message), bg=bg, fg=fg))
44 45 return func
@@ -8,6 +8,7 from setuptools import setup, Extension
8 8 from setuptools.command.build_ext import build_ext as _build_ext
9 9 from schainpy import __version__
10 10
11
11 12 class build_ext(_build_ext):
12 13 def finalize_options(self):
13 14 _build_ext.finalize_options(self)
@@ -63,7 +64,6 setup(name="schainpy",
63 64 "zmq",
64 65 "fuzzywuzzy",
65 66 "click",
66 "colorama",
67 67 "python-Levenshtein"
68 68 ],
69 69 )
1 NO CONTENT: file was removed
1 NO CONTENT: file was removed
General Comments 0
You need to be logged in to leave comments. Login now