@@ -0,0 +1,364 | |||
|
1 | ''' | |
|
2 | Created on Nov 9, 2016 | |
|
3 | ||
|
4 | @author: roj- LouVD | |
|
5 | ''' | |
|
6 | ||
|
7 | ||
|
8 | import os | |
|
9 | import sys | |
|
10 | import time | |
|
11 | import glob | |
|
12 | import datetime | |
|
13 | import numpy | |
|
14 | ||
|
15 | from schainpy.model.proc.jroproc_base import ProcessingUnit | |
|
16 | from schainpy.model.data.jrodata import Parameters | |
|
17 | from schainpy.model.io.jroIO_base import JRODataReader, isNumber | |
|
18 | ||
|
19 | FILE_HEADER_STRUCTURE = numpy.dtype([ | |
|
20 | ('FMN', '<u4'), | |
|
21 | ('nrec', '<u4'), | |
|
22 | ('fr_offset', '<u4'), | |
|
23 | ('id', '<u4'), | |
|
24 | ('site', 'u1', (32,)) | |
|
25 | ]) | |
|
26 | ||
|
27 | REC_HEADER_STRUCTURE = numpy.dtype([ | |
|
28 | ('rmn', '<u4'), | |
|
29 | ('rcounter', '<u4'), | |
|
30 | ('nr_offset', '<u4'), | |
|
31 | ('tr_offset', '<u4'), | |
|
32 | ('time', '<u4'), | |
|
33 | ('time_msec', '<u4'), | |
|
34 | ('tag', 'u1', (32,)), | |
|
35 | ('comments', 'u1', (32,)), | |
|
36 | ('lat', '<f4'), | |
|
37 | ('lon', '<f4'), | |
|
38 | ('gps_status', '<u4'), | |
|
39 | ('freq', '<u4'), | |
|
40 | ('freq0', '<u4'), | |
|
41 | ('nchan', '<u4'), | |
|
42 | ('delta_r', '<u4'), | |
|
43 | ('nranges', '<u4'), | |
|
44 | ('r0', '<u4'), | |
|
45 | ('prf', '<u4'), | |
|
46 | ('ncoh', '<u4'), | |
|
47 | ('npoints', '<u4'), | |
|
48 | ('polarization', '<i4'), | |
|
49 | ('rx_filter', '<u4'), | |
|
50 | ('nmodes', '<u4'), | |
|
51 | ('dmode_index', '<u4'), | |
|
52 | ('dmode_rngcorr', '<u4'), | |
|
53 | ('nrxs', '<u4'), | |
|
54 | ('acf_length', '<u4'), | |
|
55 | ('acf_lags', '<u4'), | |
|
56 | ('sea_to_atmos', '<f4'), | |
|
57 | ('sea_notch', '<u4'), | |
|
58 | ('lh_sea', '<u4'), | |
|
59 | ('hh_sea', '<u4'), | |
|
60 | ('nbins_sea', '<u4'), | |
|
61 | ('min_snr', '<f4'), | |
|
62 | ('min_cc', '<f4'), | |
|
63 | ('max_time_diff', '<f4') | |
|
64 | ]) | |
|
65 | ||
|
66 | DATA_STRUCTURE = numpy.dtype([ | |
|
67 | ('range', '<u4'), | |
|
68 | ('status', '<u4'), | |
|
69 | ('zonal', '<f4'), | |
|
70 | ('meridional', '<f4'), | |
|
71 | ('vertical', '<f4'), | |
|
72 | ('zonal_a', '<f4'), | |
|
73 | ('meridional_a', '<f4'), | |
|
74 | ('corrected_fading', '<f4'), # seconds | |
|
75 | ('uncorrected_fading', '<f4'), # seconds | |
|
76 | ('time_diff', '<f4'), | |
|
77 | ('major_axis', '<f4'), | |
|
78 | ('axial_ratio', '<f4'), | |
|
79 | ('orientation', '<f4'), | |
|
80 | ('sea_power', '<u4'), | |
|
81 | ('sea_algorithm', '<u4') | |
|
82 | ]) | |
|
83 | ||
|
84 | class BLTRParamReader(JRODataReader, ProcessingUnit): | |
|
85 | ''' | |
|
86 | Boundary Layer and Tropospheric Radar (BLTR) reader, Wind velocities and SNR from *.sswma files | |
|
87 | ''' | |
|
88 | ||
|
89 | ext = '.sswma' | |
|
90 | ||
|
91 | def __init__(self, **kwargs): | |
|
92 | ||
|
93 | ProcessingUnit.__init__(self , **kwargs) | |
|
94 | ||
|
95 | self.dataOut = Parameters() | |
|
96 | self.counter_records = 0 | |
|
97 | self.flagNoMoreFiles = 0 | |
|
98 | self.isConfig = False | |
|
99 | self.filename = None | |
|
100 | ||
|
101 | def setup(self, | |
|
102 | path=None, | |
|
103 | startDate=None, | |
|
104 | endDate=None, | |
|
105 | ext=None, | |
|
106 | startTime=datetime.time(0, 0, 0), | |
|
107 | endTime=datetime.time(23, 59, 59), | |
|
108 | timezone=0, | |
|
109 | status_value=0, | |
|
110 | **kwargs): | |
|
111 | ||
|
112 | self.path = path | |
|
113 | self.startTime = startTime | |
|
114 | self.endTime = endTime | |
|
115 | self.status_value = status_value | |
|
116 | ||
|
117 | if self.path is None: | |
|
118 | raise ValueError, "The path is not valid" | |
|
119 | ||
|
120 | if ext is None: | |
|
121 | ext = self.ext | |
|
122 | ||
|
123 | self.search_files(self.path, startDate, endDate, ext) | |
|
124 | self.timezone = timezone | |
|
125 | self.fileIndex = 0 | |
|
126 | ||
|
127 | if not self.fileList: | |
|
128 | raise Warning, "There is no files matching these date in the folder: %s. \n Check 'startDate' and 'endDate' "%(path) | |
|
129 | ||
|
130 | self.setNextFile() | |
|
131 | ||
|
132 | def search_files(self, path, startDate, endDate, ext): | |
|
133 | ''' | |
|
134 | Searching for BLTR rawdata file in path | |
|
135 | Creating a list of file to proces included in [startDate,endDate] | |
|
136 | ||
|
137 | Input: | |
|
138 | path - Path to find BLTR rawdata files | |
|
139 | startDate - Select file from this date | |
|
140 | enDate - Select file until this date | |
|
141 | ext - Extension of the file to read | |
|
142 | ||
|
143 | ''' | |
|
144 | ||
|
145 | print 'Searching file in %s ' % (path) | |
|
146 | foldercounter = 0 | |
|
147 | fileList0 = glob.glob1(path, "*%s" % ext) | |
|
148 | fileList0.sort() | |
|
149 | ||
|
150 | self.fileList = [] | |
|
151 | self.dateFileList = [] | |
|
152 | ||
|
153 | for thisFile in fileList0: | |
|
154 | year = thisFile[-14:-10] | |
|
155 | if not isNumber(year): | |
|
156 | continue | |
|
157 | ||
|
158 | month = thisFile[-10:-8] | |
|
159 | if not isNumber(month): | |
|
160 | continue | |
|
161 | ||
|
162 | day = thisFile[-8:-6] | |
|
163 | if not isNumber(day): | |
|
164 | continue | |
|
165 | ||
|
166 | year, month, day = int(year), int(month), int(day) | |
|
167 | dateFile = datetime.date(year, month, day) | |
|
168 | ||
|
169 | if (startDate > dateFile) or (endDate < dateFile): | |
|
170 | continue | |
|
171 | ||
|
172 | self.fileList.append(thisFile) | |
|
173 | self.dateFileList.append(dateFile) | |
|
174 | ||
|
175 | return | |
|
176 | ||
|
177 | def setNextFile(self): | |
|
178 | ||
|
179 | file_id = self.fileIndex | |
|
180 | ||
|
181 | if file_id == len(self.fileList): | |
|
182 | print '\nNo more files in the folder' | |
|
183 | print 'Total number of file(s) read : {}'.format(self.fileIndex + 1) | |
|
184 | self.flagNoMoreFiles = 1 | |
|
185 | return 0 | |
|
186 | ||
|
187 | print '\n[Setting file] (%s) ...' % self.fileList[file_id] | |
|
188 | filename = os.path.join(self.path, self.fileList[file_id]) | |
|
189 | ||
|
190 | dirname, name = os.path.split(filename) | |
|
191 | self.siteFile = name.split('.')[0] # 'peru2' ---> Piura - 'peru1' ---> Huancayo or Porcuya | |
|
192 | if self.filename is not None: | |
|
193 | self.fp.close() | |
|
194 | self.filename = filename | |
|
195 | self.fp = open(self.filename, 'rb') | |
|
196 | self.header_file = numpy.fromfile(self.fp, FILE_HEADER_STRUCTURE, 1) | |
|
197 | self.nrecords = self.header_file['nrec'][0] | |
|
198 | self.sizeOfFile = os.path.getsize(self.filename) | |
|
199 | self.counter_records = 0 | |
|
200 | self.flagIsNewFile = 0 | |
|
201 | self.fileIndex += 1 | |
|
202 | ||
|
203 | return 1 | |
|
204 | ||
|
205 | def readNextBlock(self): | |
|
206 | ||
|
207 | while True: | |
|
208 | if self.counter_records == self.nrecords: | |
|
209 | self.flagIsNewFile = 1 | |
|
210 | if not self.setNextFile(): | |
|
211 | return 0 | |
|
212 | ||
|
213 | self.readBlock() | |
|
214 | ||
|
215 | if (self.datatime.time() < self.startTime) or (self.datatime.time() > self.endTime): | |
|
216 | print "[Reading] Record No. %d/%d -> %s [Skipping]" %( | |
|
217 | self.counter_records, | |
|
218 | self.nrecords, | |
|
219 | self.datatime.ctime()) | |
|
220 | continue | |
|
221 | break | |
|
222 | ||
|
223 | print "[Reading] Record No. %d/%d -> %s" %( | |
|
224 | self.counter_records, | |
|
225 | self.nrecords, | |
|
226 | self.datatime.ctime()) | |
|
227 | ||
|
228 | return 1 | |
|
229 | ||
|
230 | def readBlock(self): | |
|
231 | ||
|
232 | pointer = self.fp.tell() | |
|
233 | header_rec = numpy.fromfile(self.fp, REC_HEADER_STRUCTURE, 1) | |
|
234 | self.nchannels = header_rec['nchan'][0]/2 | |
|
235 | self.kchan = header_rec['nrxs'][0] | |
|
236 | self.nmodes = header_rec['nmodes'][0] | |
|
237 | self.nranges = header_rec['nranges'][0] | |
|
238 | self.fp.seek(pointer) | |
|
239 | self.height = numpy.empty((self.nmodes, self.nranges)) | |
|
240 | self.snr = numpy.empty((self.nmodes, self.nchannels, self.nranges)) | |
|
241 | self.buffer = numpy.empty((self.nmodes, 3, self.nranges)) | |
|
242 | ||
|
243 | for mode in range(self.nmodes): | |
|
244 | self.readHeader() | |
|
245 | data = self.readData() | |
|
246 | self.height[mode] = (data[0] - self.correction) / 1000. | |
|
247 | self.buffer[mode] = data[1] | |
|
248 | self.snr[mode] = data[2] | |
|
249 | ||
|
250 | self.counter_records = self.counter_records + self.nmodes | |
|
251 | ||
|
252 | return | |
|
253 | ||
|
254 | def readHeader(self): | |
|
255 | ''' | |
|
256 | RecordHeader of BLTR rawdata file | |
|
257 | ''' | |
|
258 | ||
|
259 | header_structure = numpy.dtype( | |
|
260 | REC_HEADER_STRUCTURE.descr + [ | |
|
261 | ('antenna_coord', 'f4', (2, self.nchannels)), | |
|
262 | ('rx_gains', 'u4', (self.nchannels,)), | |
|
263 | ('rx_analysis', 'u4', (self.nchannels,)) | |
|
264 | ] | |
|
265 | ) | |
|
266 | ||
|
267 | self.header_rec = numpy.fromfile(self.fp, header_structure, 1) | |
|
268 | self.lat = self.header_rec['lat'][0] | |
|
269 | self.lon = self.header_rec['lon'][0] | |
|
270 | self.delta = self.header_rec['delta_r'][0] | |
|
271 | self.correction = self.header_rec['dmode_rngcorr'][0] | |
|
272 | self.imode = self.header_rec['dmode_index'][0] | |
|
273 | self.antenna = self.header_rec['antenna_coord'] | |
|
274 | self.rx_gains = self.header_rec['rx_gains'] | |
|
275 | self.time1 = self.header_rec['time'][0] | |
|
276 | tseconds = self.header_rec['time'][0] | |
|
277 | local_t1 = time.localtime(tseconds) | |
|
278 | self.year = local_t1.tm_year | |
|
279 | self.month = local_t1.tm_mon | |
|
280 | self.day = local_t1.tm_mday | |
|
281 | self.t = datetime.datetime(self.year, self.month, self.day) | |
|
282 | self.datatime = datetime.datetime.utcfromtimestamp(self.time1) | |
|
283 | ||
|
284 | def readData(self): | |
|
285 | ''' | |
|
286 | Reading and filtering data block record of BLTR rawdata file, filtering is according to status_value. | |
|
287 | ||
|
288 | Input: | |
|
289 | status_value - Array data is set to NAN for values that are not equal to status_value | |
|
290 | ||
|
291 | ''' | |
|
292 | ||
|
293 | data_structure = numpy.dtype( | |
|
294 | DATA_STRUCTURE.descr + [ | |
|
295 | ('rx_saturation', 'u4', (self.nchannels,)), | |
|
296 | ('chan_offset', 'u4', (2 * self.nchannels,)), | |
|
297 | ('rx_amp', 'u4', (self.nchannels,)), | |
|
298 | ('rx_snr', 'f4', (self.nchannels,)), | |
|
299 | ('cross_snr', 'f4', (self.kchan,)), | |
|
300 | ('sea_power_relative', 'f4', (self.kchan,))] | |
|
301 | ) | |
|
302 | ||
|
303 | data = numpy.fromfile(self.fp, data_structure, self.nranges) | |
|
304 | ||
|
305 | height = data['range'] | |
|
306 | winds = numpy.array((data['zonal'], data['meridional'], data['vertical'])) | |
|
307 | snr = data['rx_snr'].T | |
|
308 | ||
|
309 | winds[numpy.where(winds == -9999.)] = numpy.nan | |
|
310 | winds[:, numpy.where(data['status'] != self.status_value)] = numpy.nan | |
|
311 | snr[numpy.where(snr == -9999.)] = numpy.nan | |
|
312 | snr[:, numpy.where(data['status'] != self.status_value)] = numpy.nan | |
|
313 | snr = numpy.power(10, snr / 10) | |
|
314 | ||
|
315 | return height, winds, snr | |
|
316 | ||
|
317 | def set_output(self): | |
|
318 | ''' | |
|
319 | Storing data from databuffer to dataOut object | |
|
320 | ''' | |
|
321 | ||
|
322 | self.dataOut.time1 = self.time1 | |
|
323 | self.dataOut.data_SNR = self.snr | |
|
324 | self.dataOut.height= self.height | |
|
325 | self.dataOut.data_output = self.buffer | |
|
326 | self.dataOut.utctimeInit = self.time1 | |
|
327 | self.dataOut.utctime = self.dataOut.utctimeInit | |
|
328 | self.dataOut.counter_records = self.counter_records | |
|
329 | self.dataOut.nrecords = self.nrecords | |
|
330 | self.dataOut.useLocalTime = False | |
|
331 | self.dataOut.paramInterval = 157 | |
|
332 | self.dataOut.timezone = self.timezone | |
|
333 | self.dataOut.site = self.siteFile | |
|
334 | self.dataOut.nrecords = self.nrecords | |
|
335 | self.dataOut.sizeOfFile = self.sizeOfFile | |
|
336 | self.dataOut.lat = self.lat | |
|
337 | self.dataOut.lon = self.lon | |
|
338 | self.dataOut.channelList = range(self.nchannels) | |
|
339 | self.dataOut.kchan = self.kchan | |
|
340 | # self.dataOut.nHeights = self.nranges | |
|
341 | self.dataOut.delta = self.delta | |
|
342 | self.dataOut.correction = self.correction | |
|
343 | self.dataOut.nmodes = self.nmodes | |
|
344 | self.dataOut.imode = self.imode | |
|
345 | self.dataOut.antenna = self.antenna | |
|
346 | self.dataOut.rx_gains = self.rx_gains | |
|
347 | self.dataOut.flagNoData = False | |
|
348 | ||
|
349 | def getData(self): | |
|
350 | ''' | |
|
351 | Storing data from databuffer to dataOut object | |
|
352 | ''' | |
|
353 | if self.flagNoMoreFiles: | |
|
354 | self.dataOut.flagNoData = True | |
|
355 | print 'No file left to process' | |
|
356 | return 0 | |
|
357 | ||
|
358 | if not(self.readNextBlock()): | |
|
359 | self.dataOut.flagNoData = True | |
|
360 | return 0 | |
|
361 | ||
|
362 | self.set_output() | |
|
363 | ||
|
364 | return 1 |
@@ -0,0 +1,375 | |||
|
1 | ''' | |
|
2 | Created on Aug 1, 2017 | |
|
3 | ||
|
4 | @author: Juan C. Espinoza | |
|
5 | ''' | |
|
6 | ||
|
7 | import os | |
|
8 | import sys | |
|
9 | import time | |
|
10 | import datetime | |
|
11 | ||
|
12 | import numpy | |
|
13 | ||
|
14 | from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation | |
|
15 | from schainpy.model.data.jrodata import Parameters | |
|
16 | from schainpy.model.data.jroheaderIO import RadarControllerHeader, SystemHeader | |
|
17 | from schainpy.model.graphics.jroplot_parameters import WindProfilerPlot | |
|
18 | from schainpy.model.io.jroIO_base import * | |
|
19 | ||
|
20 | try: | |
|
21 | import madrigal | |
|
22 | import madrigal.cedar | |
|
23 | from madrigal.cedar import MadrigalCatalogRecord | |
|
24 | except: | |
|
25 | print 'You should install "madrigal library" module if you want to read/write Madrigal data' | |
|
26 | ||
|
27 | ||
|
28 | class MADWriter(Operation): | |
|
29 | ||
|
30 | def __init__(self): | |
|
31 | ||
|
32 | Operation.__init__(self) | |
|
33 | self.dataOut = Parameters() | |
|
34 | self.path = None | |
|
35 | self.dataOut = None | |
|
36 | self.flagIsNewFile=1 | |
|
37 | self.ext = ".hdf5" | |
|
38 | ||
|
39 | return | |
|
40 | ||
|
41 | def run(self, dataOut, path , modetowrite,**kwargs): | |
|
42 | ||
|
43 | if self.flagIsNewFile: | |
|
44 | flagdata = self.setup(dataOut, path, modetowrite) | |
|
45 | ||
|
46 | self.putData() | |
|
47 | return | |
|
48 | ||
|
49 | def setup(self, dataOut, path, modetowrite): | |
|
50 | ''' | |
|
51 | Recovering data to write in new *.hdf5 file | |
|
52 | Inputs: | |
|
53 | modew -- mode to write (1 or 2) | |
|
54 | path -- destination path | |
|
55 | ||
|
56 | ''' | |
|
57 | ||
|
58 | self.im = modetowrite-1 | |
|
59 | if self.im!=0 and self.im!=1: | |
|
60 | raise ValueError, 'Check "modetowrite" value. Must be egual to 1 or 2, "{}" is not valid. '.format(modetowrite) | |
|
61 | ||
|
62 | self.dataOut = dataOut | |
|
63 | self.nmodes = self.dataOut.nmodes | |
|
64 | self.nchannels = self.dataOut.nchannels | |
|
65 | self.lat = self.dataOut.lat | |
|
66 | self.lon = self.dataOut.lon | |
|
67 | self.hcm = 3 | |
|
68 | self.thisDate = self.dataOut.utctimeInit | |
|
69 | self.year = self.dataOut.year | |
|
70 | self.month = self.dataOut.month | |
|
71 | self.day = self.dataOut.day | |
|
72 | self.path = path | |
|
73 | ||
|
74 | self.flagIsNewFile = 0 | |
|
75 | ||
|
76 | return 1 | |
|
77 | ||
|
78 | def setFile(self): | |
|
79 | ''' | |
|
80 | - Determining the file name for each mode of operation | |
|
81 | kinst - Kind of Instrument (mnemotic) | |
|
82 | kindat - Kind of Data (mnemotic) | |
|
83 | ||
|
84 | - Creating a cedarObject | |
|
85 | ||
|
86 | ''' | |
|
87 | lat_piura = -5.17 | |
|
88 | lat_huancayo = -12.04 | |
|
89 | lat_porcuya = -5.8 | |
|
90 | ||
|
91 | if '%2.2f' % self.lat == '%2.2f' % lat_piura: | |
|
92 | self.instMnemonic = 'pbr' | |
|
93 | ||
|
94 | elif '%2.2f' % self.lat == '%2.2f' % lat_huancayo: | |
|
95 | self.instMnemonic = 'hbr' | |
|
96 | ||
|
97 | elif '%2.2f' % self.lat == '%2.2f' % lat_porcuya: | |
|
98 | self.instMnemonic = 'obr' | |
|
99 | else: raise Warning, "The site of file read doesn't match any site known. Only file from Huancayo, Piura and Porcuya can be processed.\n Check the file " | |
|
100 | ||
|
101 | mode = ['_mode1','_mode2'] | |
|
102 | ||
|
103 | self.hdf5filename = '%s%4.4d%2.2d%2.2d%s%s' % (self.instMnemonic, | |
|
104 | self.year, | |
|
105 | self.month, | |
|
106 | self.day, | |
|
107 | mode[self.im], | |
|
108 | self.ext) | |
|
109 | ||
|
110 | self.fullname=os.path.join(self.path,self.hdf5filename) | |
|
111 | ||
|
112 | if os.path.isfile(self.fullname) : | |
|
113 | print "Destination path '%s' already exists. Previous file deleted. " %self.fullname | |
|
114 | os.remove(self.fullname) | |
|
115 | ||
|
116 | # Identify kinst and kindat | |
|
117 | InstName = self.hdf5filename[0:3] | |
|
118 | KinstList = [1000, 1001, 1002] | |
|
119 | KinstId = {'pbr':0, 'hbr':1, 'obr':2} # pbr:piura, hbr:huancayo, obr:porcuya | |
|
120 | KindatList = [1600, 1601] # mode 1, mode 2 | |
|
121 | self.type = KinstId[InstName] | |
|
122 | self.kinst = KinstList[self.type] | |
|
123 | self.kindat = KindatList[self.im] | |
|
124 | ||
|
125 | try: | |
|
126 | self.cedarObj = madrigal.cedar.MadrigalCedarFile(self.fullname, True) | |
|
127 | except ValueError, message: | |
|
128 | print '[Error]: Impossible to create a cedar object with "madrigal.cedar.MadrigalCedarFile" ' | |
|
129 | return | |
|
130 | ||
|
131 | return 1 | |
|
132 | ||
|
133 | def writeBlock(self): | |
|
134 | ''' | |
|
135 | - Selecting mode of operation: | |
|
136 | ||
|
137 | bltr high resolution mode 1 - Low Atmosphere (0 - 3km) // bltr high resolution mode 2 - High Atmosphere (0 - 10km) | |
|
138 | msnr - Average Signal Noise Ratio in dB | |
|
139 | hcm - 3 km | |
|
140 | ||
|
141 | - Filling the cedarObject by a block: each array data entry is assigned a code that defines the parameter to write to the file | |
|
142 | ||
|
143 | GDLATR - Reference geod latitude (deg) | |
|
144 | GDLONR - Reference geographic longitude (deg) | |
|
145 | GDLAT2 - Geodetic latitude of second inst (deg) | |
|
146 | GLON2 - Geographic longitude of second inst (deg) | |
|
147 | ||
|
148 | GDALT - Geodetic altitude (height) (km) | |
|
149 | SNL - Log10 (signal to noise ratio) | |
|
150 | VN1P2 - Neutral wind in direction 1 (eastward) (m/s), ie zonal wind | |
|
151 | VN2P2 - Neutral wind in direction 2 (northward) (m/s), ie meridional wind | |
|
152 | EL2 - Ending elevation angle (deg), ie vertical wind | |
|
153 | ||
|
154 | Other parameters: /madrigal3/metadata/parcodes.tab | |
|
155 | ||
|
156 | ''' | |
|
157 | ||
|
158 | self.z_zon = self.dataOut.data_output[0,:,:] | |
|
159 | self.z_mer =self.dataOut.data_output[1,:,:] | |
|
160 | self.z_ver = self.dataOut.data_output[2,:,:] | |
|
161 | ||
|
162 | if self.im == 0: | |
|
163 | h_select = numpy.where(numpy.bitwise_and(self.dataOut.height[0, :] >= 0., self.dataOut.height[0, :] <= self.hcm, numpy.isfinite(self.dataOut.height[0, :]))) | |
|
164 | else: | |
|
165 | h_select = numpy.where(numpy.bitwise_and(self.dataOut.height[0, :] >= 0., self.dataOut.height[0, :] < 20, numpy.isfinite(self.dataOut.height[0, :]))) | |
|
166 | ||
|
167 | ht = h_select[0] | |
|
168 | ||
|
169 | self.o_height = self.dataOut.height[self.im, ht] | |
|
170 | self.o_zon = self.z_zon[ht, self.im] | |
|
171 | self.o_mer = self.z_mer[ht, self.im] | |
|
172 | self.o_ver = self.z_ver[ht, self.im] | |
|
173 | o_snr = self.dataOut.data_SNR[ :, :, self.im] | |
|
174 | ||
|
175 | o_snr = o_snr[ht, :] | |
|
176 | ||
|
177 | ndiv = numpy.nansum((numpy.isfinite(o_snr)), 1) | |
|
178 | ndiv = ndiv.astype(float) | |
|
179 | ||
|
180 | sel_div = numpy.where(ndiv == 0.) | |
|
181 | ndiv[sel_div] = numpy.nan | |
|
182 | ||
|
183 | if self.nchannels > 1: | |
|
184 | msnr = numpy.nansum(o_snr, axis=1) | |
|
185 | else: | |
|
186 | msnr = o_snr | |
|
187 | ||
|
188 | try: | |
|
189 | self.msnr = 10 * numpy.log10(msnr / ndiv) | |
|
190 | except ZeroDivisionError: | |
|
191 | self.msnr = 10 * numpy.log10(msnr /1) | |
|
192 | print 'Number of division (ndiv) egal to 1 by default. Check SNR' | |
|
193 | ||
|
194 | time_t = time.gmtime(self.dataOut.time1) | |
|
195 | year = time_t.tm_year | |
|
196 | month = time_t.tm_mon | |
|
197 | day = time_t.tm_mday | |
|
198 | hour = time_t.tm_hour | |
|
199 | minute = time_t.tm_min | |
|
200 | second = time_t.tm_sec | |
|
201 | timedate_0 = datetime.datetime(year, month, day, hour, minute, second) | |
|
202 | ||
|
203 | # 1d parameters | |
|
204 | GDLATR = self.lat | |
|
205 | GDLONR = self.lon | |
|
206 | GDLAT2 = self.lat | |
|
207 | GLON2 = self.lon | |
|
208 | ||
|
209 | # 2d parameters | |
|
210 | GDALT = self.o_height | |
|
211 | ||
|
212 | SNL = self.msnr | |
|
213 | VN1P2 = self.o_zon | |
|
214 | VN2P2 = self.o_mer | |
|
215 | EL2 = self.o_ver | |
|
216 | NROW = len(self.o_height) | |
|
217 | ||
|
218 | startTime = timedate_0 | |
|
219 | endTime = startTime | |
|
220 | self.dataRec = madrigal.cedar.MadrigalDataRecord(self.kinst, | |
|
221 | self.kindat, | |
|
222 | startTime.year, | |
|
223 | startTime.month, | |
|
224 | startTime.day, | |
|
225 | startTime.hour, | |
|
226 | startTime.minute, | |
|
227 | startTime.second, | |
|
228 | 0, | |
|
229 | endTime.year, | |
|
230 | endTime.month, | |
|
231 | endTime.day, | |
|
232 | endTime.hour, | |
|
233 | endTime.minute, | |
|
234 | endTime.second, | |
|
235 | 0, | |
|
236 | ('gdlatr', 'gdlonr', 'gdlat2', 'glon2'), | |
|
237 | ('gdalt', 'snl', 'vn1p2', 'vn2p2', 'el2'), | |
|
238 | NROW, ind2DList=['gdalt']) | |
|
239 | ||
|
240 | # Setting 1d values | |
|
241 | self.dataRec.set1D('gdlatr', GDLATR) | |
|
242 | self.dataRec.set1D('gdlonr', GDLONR) | |
|
243 | self.dataRec.set1D('gdlat2', GDLAT2) | |
|
244 | self.dataRec.set1D('glon2', GLON2) | |
|
245 | ||
|
246 | # Setting 2d values | |
|
247 | for n in range(self.o_height.shape[0]): | |
|
248 | self.dataRec.set2D('gdalt', n, GDALT[n]) | |
|
249 | self.dataRec.set2D('snl', n, SNL[n]) | |
|
250 | self.dataRec.set2D('vn1p2', n, VN1P2[n]) | |
|
251 | self.dataRec.set2D('vn2p2', n, VN2P2[n]) | |
|
252 | self.dataRec.set2D('el2', n, EL2[n]) | |
|
253 | ||
|
254 | # Appending new data record | |
|
255 | ''' | |
|
256 | [MADRIGAL3]There are two ways to write to a MadrigalCedarFile. Either this method (write) is called after all the | |
|
257 | records have been appended to the MadrigalCedarFile, or dump is called after a certain number of records are appended, | |
|
258 | and then at the end dump is called a final time if there were any records not yet dumped, followed by addArray. | |
|
259 | ''' | |
|
260 | ||
|
261 | self.cedarObj.append(self.dataRec) | |
|
262 | print ' [Writing] records {} (mode {}).'.format(self.dataOut.counter_records,self.im+1) | |
|
263 | self.cedarObj.dump() | |
|
264 | ||
|
265 | ||
|
266 | ||
|
267 | ||
|
268 | def setHeader(self): | |
|
269 | ''' | |
|
270 | - Creating self.catHeadObj | |
|
271 | - Adding information catalog | |
|
272 | - Writing file header | |
|
273 | ||
|
274 | ''' | |
|
275 | self.catHeadObj = madrigal.cedar.CatalogHeaderCreator(self.fullname) | |
|
276 | kindatDesc, comments, analyst, history, principleInvestigator = self._info_BLTR() | |
|
277 | ||
|
278 | self.catHeadObj.createCatalog(principleInvestigator="Jarjar", | |
|
279 | expPurpose='characterize the atmospheric dynamics in this region where frequently it happens the El Nino', | |
|
280 | sciRemarks="http://madrigal3.haystack.mit.edu/static/CEDARMadrigalHdf5Format.pdf") | |
|
281 | ||
|
282 | self.catHeadObj.createHeader(kindatDesc, analyst, comments, history) | |
|
283 | ||
|
284 | self.catHeadObj.write() | |
|
285 | ||
|
286 | print '[File created] path: %s' % (self.fullname) | |
|
287 | ||
|
288 | def putData(self): | |
|
289 | ||
|
290 | if self.dataOut.flagNoData: | |
|
291 | return 0 | |
|
292 | ||
|
293 | if self.dataOut.counter_records == 1: | |
|
294 | self.setFile() | |
|
295 | print '[Writing] Setting new hdf5 file for the mode {}'.format(self.im+1) | |
|
296 | ||
|
297 | if self.dataOut.counter_records <= self.dataOut.nrecords: | |
|
298 | self.writeBlock() | |
|
299 | ||
|
300 | ||
|
301 | if self.dataOut.counter_records == self.dataOut.nrecords: | |
|
302 | self.cedarObj.addArray() | |
|
303 | ||
|
304 | self.setHeader() | |
|
305 | self.flagIsNewFile = 1 | |
|
306 | ||
|
307 | def _info_BLTR(self): | |
|
308 | ||
|
309 | kindatDesc = '''--This header is for KINDAT = %d''' % self.kindat | |
|
310 | history = None | |
|
311 | analyst = '''Jarjar''' | |
|
312 | principleInvestigator = ''' | |
|
313 | Jarjar | |
|
314 | Radio Observatorio de Jicamarca | |
|
315 | Instituto Geofisico del Peru | |
|
316 | ||
|
317 | ''' | |
|
318 | if self.type == 1: | |
|
319 | comments = ''' | |
|
320 | ||
|
321 | --These data are provided by two Boundary Layer and Tropospheric Radar (BLTR) deployed at two different locations at Peru(GMT-5), one of them at Piura(5.17 S, 80.64W) and another located at Huancayo (12.04 S, 75.32 W). | |
|
322 | ||
|
323 | --The purpose of conducting these observations is to measure wind in the differents levels of height, this radar makes measurements the Zonal(U), Meridional(V) and Vertical(W) wind velocities component in northcoast from Peru. And the main purpose of these mensurations is to characterize the atmospheric dynamics in this region where frequently it happens the 'El Nino Phenomenon' | |
|
324 | ||
|
325 | --In Kindat = 1600, contains information of wind velocities component since 0 Km to 3 Km. | |
|
326 | ||
|
327 | --In Kindat = 1601, contains information of wind velocities component since 0 Km to 10 Km. | |
|
328 | ||
|
329 | --The Huancayo-BLTR is a VHF Profiler Radar System is a 3 channel coherent receiver pulsed radar utilising state-of-the-art software and computing techniques to acquire, decode, and translate signals obtained from partial reflection echoes in the troposphere, lower stratosphere and mesosphere. It uses an array of three horizontal spaced and vertically directed receiving antennas. The data is recorded thirty seconds, averaged to one minute mean values of Height, Zonal, Meridional and Vertical wind. | |
|
330 | ||
|
331 | --The Huancayo-BLTR was installed in January 2010. This instrument was designed and constructed by Genesis Soft Pty. Ltd. Is constituted by three groups of spaced antennas (distributed) forming an isosceles triangle. | |
|
332 | ||
|
333 | ||
|
334 | Station _______ Geographic Coord ______ Geomagnetic Coord | |
|
335 | ||
|
336 | _______________ Latitude _ Longitude __ Latitude _ Longitude | |
|
337 | ||
|
338 | Huancayo (HUA) __12.04 S ___ 75.32 W _____ -12.05 ____ 352.85 | |
|
339 | Piura (PIU) _____ 5.17 S ___ 80.64 W ______ 5.18 ____ 350.93 | |
|
340 | ||
|
341 | WIND OBSERVATIONS | |
|
342 | ||
|
343 | --To obtain wind the BLTR uses Spaced Antenna technique (e.g., Briggs 1984). The scatter and reflection it still provided by variations in the refractive index as in the Doppler method(Gage and Basley,1978; Balsley and Gage 1982; Larsen and Rottger 1982), but instead of using the Doppler shift to derive the velocity components, the cross-correlation between signals in an array of three horizontally spaced and vertically directed receiving antennas is used. | |
|
344 | ||
|
345 | ...................................................................... | |
|
346 | For more information, consult the following references: | |
|
347 | - Balsley, B. B., and K. S. Gage., On the use of radars for operational wind profiling, Bull. Amer. Meteor.Soc.,63, 1009-1018, 1982. | |
|
348 | ||
|
349 | - Briggs, B. H., The analysis of spaced sensor data by correations techniques, Handbook for MAP, Vol. 13, SCOTEP Secretariat, University of Illinois, Urbana, 166-186, 1984. | |
|
350 | ||
|
351 | - Gage, K. S., and B.B. Balsley., Doppler radar probing of the clear atmosphere, Bull. Amer. Meteor.Soc., 59, 1074-1093, 1978. | |
|
352 | ||
|
353 | - Larsen, M. F., The Spaced Antenna Technique for Radar Wind Profiling, Journal of Atm. and Ocean. Technology. , Vol.6, 920-937, 1989. | |
|
354 | ||
|
355 | - Larsen, M. F., A method for single radar voracity measurements?, Handbook for MAP,SCOSTEP Secretariat, University of the Illinois, Urban, in press, 1989. | |
|
356 | ...................................................................... | |
|
357 | ||
|
358 | ACKNOWLEDGEMENTS: | |
|
359 | ||
|
360 | --The Piura and Huancayo BLTR are part of the network of instruments operated by the Jicamarca Radio Observatory. | |
|
361 | ||
|
362 | --The Jicamarca Radio Observatory is a facility of the Instituto Geofisico del Peru operated with support from the NSF Cooperative Agreement ATM-0432565 through Cornell University | |
|
363 | ||
|
364 | ...................................................................... | |
|
365 | ||
|
366 | Further questions and comments should be addressed to: | |
|
367 | Radio Observatorio de Jicamarca | |
|
368 | Instituto Geofisico del Peru | |
|
369 | Lima, Peru | |
|
370 | Web URL: http://jro.igp.gob.pe | |
|
371 | ...................................................................... | |
|
372 | ''' | |
|
373 | ||
|
374 | return kindatDesc, comments, analyst, history, principleInvestigator | |
|
375 |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
@@ -1,19 +1,19 | |||
|
1 | 1 | ''' |
|
2 | 2 | |
|
3 | 3 | $Author: murco $ |
|
4 | 4 | $Id: JRODataIO.py 169 2012-11-19 21:57:03Z murco $ |
|
5 | 5 | ''' |
|
6 | 6 | |
|
7 | 7 | from jroIO_voltage import * |
|
8 | 8 | from jroIO_spectra import * |
|
9 | 9 | from jroIO_heispectra import * |
|
10 | 10 | from jroIO_usrp import * |
|
11 | 11 | |
|
12 | 12 | from jroIO_kamisr import * |
|
13 | 13 | from jroIO_param import * |
|
14 | 14 | from jroIO_hf import * |
|
15 | 15 | |
|
16 | from bltrIO_param import * | |
|
16 | 17 | from jroIO_bltr import * |
|
17 | 18 | from jroIO_mira35c import * |
|
18 | from io_bltr_block import * | |
|
19 | 19 |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
@@ -1,1795 +1,1794 | |||
|
1 | 1 | ''' |
|
2 | 2 | Created on Jul 2, 2014 |
|
3 | 3 | |
|
4 | 4 | @author: roj-idl71 |
|
5 | 5 | ''' |
|
6 | 6 | import os |
|
7 | 7 | import sys |
|
8 | 8 | import glob |
|
9 | 9 | import time |
|
10 | 10 | import numpy |
|
11 | 11 | import fnmatch |
|
12 | 12 | import inspect |
|
13 | 13 | import time, datetime |
|
14 | #import h5py | |
|
15 | 14 | import traceback |
|
16 | 15 | |
|
17 | 16 | try: |
|
18 | 17 | from gevent import sleep |
|
19 | 18 | except: |
|
20 | 19 | from time import sleep |
|
21 | 20 | |
|
22 | 21 | from schainpy.model.data.jroheaderIO import PROCFLAG, BasicHeader, SystemHeader, RadarControllerHeader, ProcessingHeader |
|
23 | 22 | from schainpy.model.data.jroheaderIO import get_dtype_index, get_numpy_dtype, get_procflag_dtype, get_dtype_width |
|
24 | 23 | |
|
25 | 24 | LOCALTIME = True |
|
26 | 25 | |
|
27 | 26 | def isNumber(cad): |
|
28 | 27 | """ |
|
29 | 28 | Chequea si el conjunto de caracteres que componen un string puede ser convertidos a un numero. |
|
30 | 29 | |
|
31 | 30 | Excepciones: |
|
32 | 31 | Si un determinado string no puede ser convertido a numero |
|
33 | 32 | Input: |
|
34 | 33 | str, string al cual se le analiza para determinar si convertible a un numero o no |
|
35 | 34 | |
|
36 | 35 | Return: |
|
37 | 36 | True : si el string es uno numerico |
|
38 | 37 | False : no es un string numerico |
|
39 | 38 | """ |
|
40 | 39 | try: |
|
41 | 40 | float( cad ) |
|
42 | 41 | return True |
|
43 | 42 | except: |
|
44 | 43 | return False |
|
45 | 44 | |
|
46 | 45 | def isFileInEpoch(filename, startUTSeconds, endUTSeconds): |
|
47 | 46 | """ |
|
48 | 47 | Esta funcion determina si un archivo de datos se encuentra o no dentro del rango de fecha especificado. |
|
49 | 48 | |
|
50 | 49 | Inputs: |
|
51 | 50 | filename : nombre completo del archivo de datos en formato Jicamarca (.r) |
|
52 | 51 | |
|
53 | 52 | startUTSeconds : fecha inicial del rango seleccionado. La fecha esta dada en |
|
54 | 53 | segundos contados desde 01/01/1970. |
|
55 | 54 | endUTSeconds : fecha final del rango seleccionado. La fecha esta dada en |
|
56 | 55 | segundos contados desde 01/01/1970. |
|
57 | 56 | |
|
58 | 57 | Return: |
|
59 | 58 | Boolean : Retorna True si el archivo de datos contiene datos en el rango de |
|
60 | 59 | fecha especificado, de lo contrario retorna False. |
|
61 | 60 | |
|
62 | 61 | Excepciones: |
|
63 | 62 | Si el archivo no existe o no puede ser abierto |
|
64 | 63 | Si la cabecera no puede ser leida. |
|
65 | 64 | |
|
66 | 65 | """ |
|
67 | 66 | basicHeaderObj = BasicHeader(LOCALTIME) |
|
68 | 67 | |
|
69 | 68 | try: |
|
70 | 69 | fp = open(filename,'rb') |
|
71 | 70 | except IOError: |
|
72 | 71 | print "The file %s can't be opened" %(filename) |
|
73 | 72 | return 0 |
|
74 | 73 | |
|
75 | 74 | sts = basicHeaderObj.read(fp) |
|
76 | 75 | fp.close() |
|
77 | 76 | |
|
78 | 77 | if not(sts): |
|
79 | 78 | print "Skipping the file %s because it has not a valid header" %(filename) |
|
80 | 79 | return 0 |
|
81 | 80 | |
|
82 | 81 | if not ((startUTSeconds <= basicHeaderObj.utc) and (endUTSeconds > basicHeaderObj.utc)): |
|
83 | 82 | return 0 |
|
84 | 83 | |
|
85 | 84 | return 1 |
|
86 | 85 | |
|
87 | 86 | def isTimeInRange(thisTime, startTime, endTime): |
|
88 | 87 | |
|
89 | 88 | if endTime >= startTime: |
|
90 | 89 | if (thisTime < startTime) or (thisTime > endTime): |
|
91 | 90 | return 0 |
|
92 | 91 | |
|
93 | 92 | return 1 |
|
94 | 93 | else: |
|
95 | 94 | if (thisTime < startTime) and (thisTime > endTime): |
|
96 | 95 | return 0 |
|
97 | 96 | |
|
98 | 97 | return 1 |
|
99 | 98 | |
|
100 | 99 | def isFileInTimeRange(filename, startDate, endDate, startTime, endTime): |
|
101 | 100 | """ |
|
102 | 101 | Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado. |
|
103 | 102 | |
|
104 | 103 | Inputs: |
|
105 | 104 | filename : nombre completo del archivo de datos en formato Jicamarca (.r) |
|
106 | 105 | |
|
107 | 106 | startDate : fecha inicial del rango seleccionado en formato datetime.date |
|
108 | 107 | |
|
109 | 108 | endDate : fecha final del rango seleccionado en formato datetime.date |
|
110 | 109 | |
|
111 | 110 | startTime : tiempo inicial del rango seleccionado en formato datetime.time |
|
112 | 111 | |
|
113 | 112 | endTime : tiempo final del rango seleccionado en formato datetime.time |
|
114 | 113 | |
|
115 | 114 | Return: |
|
116 | 115 | Boolean : Retorna True si el archivo de datos contiene datos en el rango de |
|
117 | 116 | fecha especificado, de lo contrario retorna False. |
|
118 | 117 | |
|
119 | 118 | Excepciones: |
|
120 | 119 | Si el archivo no existe o no puede ser abierto |
|
121 | 120 | Si la cabecera no puede ser leida. |
|
122 | 121 | |
|
123 | 122 | """ |
|
124 | 123 | |
|
125 | 124 | |
|
126 | 125 | try: |
|
127 | 126 | fp = open(filename,'rb') |
|
128 | 127 | except IOError: |
|
129 | 128 | print "The file %s can't be opened" %(filename) |
|
130 | 129 | return None |
|
131 | 130 | |
|
132 | 131 | firstBasicHeaderObj = BasicHeader(LOCALTIME) |
|
133 | 132 | systemHeaderObj = SystemHeader() |
|
134 | 133 | radarControllerHeaderObj = RadarControllerHeader() |
|
135 | 134 | processingHeaderObj = ProcessingHeader() |
|
136 | 135 | |
|
137 | 136 | lastBasicHeaderObj = BasicHeader(LOCALTIME) |
|
138 | 137 | |
|
139 | 138 | sts = firstBasicHeaderObj.read(fp) |
|
140 | 139 | |
|
141 | 140 | if not(sts): |
|
142 | 141 | print "[Reading] Skipping the file %s because it has not a valid header" %(filename) |
|
143 | 142 | return None |
|
144 | 143 | |
|
145 | 144 | if not systemHeaderObj.read(fp): |
|
146 | 145 | return None |
|
147 | 146 | |
|
148 | 147 | if not radarControllerHeaderObj.read(fp): |
|
149 | 148 | return None |
|
150 | 149 | |
|
151 | 150 | if not processingHeaderObj.read(fp): |
|
152 | 151 | return None |
|
153 | 152 | |
|
154 | 153 | filesize = os.path.getsize(filename) |
|
155 | 154 | |
|
156 | 155 | offset = processingHeaderObj.blockSize + 24 #header size |
|
157 | 156 | |
|
158 | 157 | if filesize <= offset: |
|
159 | 158 | print "[Reading] %s: This file has not enough data" %filename |
|
160 | 159 | return None |
|
161 | 160 | |
|
162 | 161 | fp.seek(-offset, 2) |
|
163 | 162 | |
|
164 | 163 | sts = lastBasicHeaderObj.read(fp) |
|
165 | 164 | |
|
166 | 165 | fp.close() |
|
167 | 166 | |
|
168 | 167 | thisDatetime = lastBasicHeaderObj.datatime |
|
169 | 168 | thisTime_last_block = thisDatetime.time() |
|
170 | 169 | |
|
171 | 170 | thisDatetime = firstBasicHeaderObj.datatime |
|
172 | 171 | thisDate = thisDatetime.date() |
|
173 | 172 | thisTime_first_block = thisDatetime.time() |
|
174 | 173 | |
|
175 | 174 | #General case |
|
176 | 175 | # o>>>>>>>>>>>>>><<<<<<<<<<<<<<o |
|
177 | 176 | #-----------o----------------------------o----------- |
|
178 | 177 | # startTime endTime |
|
179 | 178 | |
|
180 | 179 | if endTime >= startTime: |
|
181 | 180 | if (thisTime_last_block < startTime) or (thisTime_first_block > endTime): |
|
182 | 181 | return None |
|
183 | 182 | |
|
184 | 183 | return thisDatetime |
|
185 | 184 | |
|
186 | 185 | #If endTime < startTime then endTime belongs to the next day |
|
187 | 186 | |
|
188 | 187 | |
|
189 | 188 | #<<<<<<<<<<<o o>>>>>>>>>>> |
|
190 | 189 | #-----------o----------------------------o----------- |
|
191 | 190 | # endTime startTime |
|
192 | 191 | |
|
193 | 192 | if (thisDate == startDate) and (thisTime_last_block < startTime): |
|
194 | 193 | return None |
|
195 | 194 | |
|
196 | 195 | if (thisDate == endDate) and (thisTime_first_block > endTime): |
|
197 | 196 | return None |
|
198 | 197 | |
|
199 | 198 | if (thisTime_last_block < startTime) and (thisTime_first_block > endTime): |
|
200 | 199 | return None |
|
201 | 200 | |
|
202 | 201 | return thisDatetime |
|
203 | 202 | |
|
204 | 203 | def isFolderInDateRange(folder, startDate=None, endDate=None): |
|
205 | 204 | """ |
|
206 | 205 | Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado. |
|
207 | 206 | |
|
208 | 207 | Inputs: |
|
209 | 208 | folder : nombre completo del directorio. |
|
210 | 209 | Su formato deberia ser "/path_root/?YYYYDDD" |
|
211 | 210 | |
|
212 | 211 | siendo: |
|
213 | 212 | YYYY : Anio (ejemplo 2015) |
|
214 | 213 | DDD : Dia del anio (ejemplo 305) |
|
215 | 214 | |
|
216 | 215 | startDate : fecha inicial del rango seleccionado en formato datetime.date |
|
217 | 216 | |
|
218 | 217 | endDate : fecha final del rango seleccionado en formato datetime.date |
|
219 | 218 | |
|
220 | 219 | Return: |
|
221 | 220 | Boolean : Retorna True si el archivo de datos contiene datos en el rango de |
|
222 | 221 | fecha especificado, de lo contrario retorna False. |
|
223 | 222 | Excepciones: |
|
224 | 223 | Si el directorio no tiene el formato adecuado |
|
225 | 224 | """ |
|
226 | 225 | |
|
227 | 226 | basename = os.path.basename(folder) |
|
228 | 227 | |
|
229 | 228 | if not isRadarFolder(basename): |
|
230 | 229 | print "The folder %s has not the rigth format" %folder |
|
231 | 230 | return 0 |
|
232 | 231 | |
|
233 | 232 | if startDate and endDate: |
|
234 | 233 | thisDate = getDateFromRadarFolder(basename) |
|
235 | 234 | |
|
236 | 235 | if thisDate < startDate: |
|
237 | 236 | return 0 |
|
238 | 237 | |
|
239 | 238 | if thisDate > endDate: |
|
240 | 239 | return 0 |
|
241 | 240 | |
|
242 | 241 | return 1 |
|
243 | 242 | |
|
244 | 243 | def isFileInDateRange(filename, startDate=None, endDate=None): |
|
245 | 244 | """ |
|
246 | 245 | Retorna 1 si el archivo de datos se encuentra dentro del rango de horas especificado. |
|
247 | 246 | |
|
248 | 247 | Inputs: |
|
249 | 248 | filename : nombre completo del archivo de datos en formato Jicamarca (.r) |
|
250 | 249 | |
|
251 | 250 | Su formato deberia ser "?YYYYDDDsss" |
|
252 | 251 | |
|
253 | 252 | siendo: |
|
254 | 253 | YYYY : Anio (ejemplo 2015) |
|
255 | 254 | DDD : Dia del anio (ejemplo 305) |
|
256 | 255 | sss : set |
|
257 | 256 | |
|
258 | 257 | startDate : fecha inicial del rango seleccionado en formato datetime.date |
|
259 | 258 | |
|
260 | 259 | endDate : fecha final del rango seleccionado en formato datetime.date |
|
261 | 260 | |
|
262 | 261 | Return: |
|
263 | 262 | Boolean : Retorna True si el archivo de datos contiene datos en el rango de |
|
264 | 263 | fecha especificado, de lo contrario retorna False. |
|
265 | 264 | Excepciones: |
|
266 | 265 | Si el archivo no tiene el formato adecuado |
|
267 | 266 | """ |
|
268 | 267 | |
|
269 | 268 | basename = os.path.basename(filename) |
|
270 | 269 | |
|
271 | 270 | if not isRadarFile(basename): |
|
272 | 271 | print "The filename %s has not the rigth format" %filename |
|
273 | 272 | return 0 |
|
274 | 273 | |
|
275 | 274 | if startDate and endDate: |
|
276 | 275 | thisDate = getDateFromRadarFile(basename) |
|
277 | 276 | |
|
278 | 277 | if thisDate < startDate: |
|
279 | 278 | return 0 |
|
280 | 279 | |
|
281 | 280 | if thisDate > endDate: |
|
282 | 281 | return 0 |
|
283 | 282 | |
|
284 | 283 | return 1 |
|
285 | 284 | |
|
286 | 285 | def getFileFromSet(path, ext, set): |
|
287 | 286 | validFilelist = [] |
|
288 | 287 | fileList = os.listdir(path) |
|
289 | 288 | |
|
290 | 289 | # 0 1234 567 89A BCDE |
|
291 | 290 | # H YYYY DDD SSS .ext |
|
292 | 291 | |
|
293 | 292 | for thisFile in fileList: |
|
294 | 293 | try: |
|
295 | 294 | year = int(thisFile[1:5]) |
|
296 | 295 | doy = int(thisFile[5:8]) |
|
297 | 296 | except: |
|
298 | 297 | continue |
|
299 | 298 | |
|
300 | 299 | if (os.path.splitext(thisFile)[-1].lower() != ext.lower()): |
|
301 | 300 | continue |
|
302 | 301 | |
|
303 | 302 | validFilelist.append(thisFile) |
|
304 | 303 | |
|
305 | 304 | myfile = fnmatch.filter(validFilelist,'*%4.4d%3.3d%3.3d*'%(year,doy,set)) |
|
306 | 305 | |
|
307 | 306 | if len(myfile)!= 0: |
|
308 | 307 | return myfile[0] |
|
309 | 308 | else: |
|
310 | 309 | filename = '*%4.4d%3.3d%3.3d%s'%(year,doy,set,ext.lower()) |
|
311 | 310 | print 'the filename %s does not exist'%filename |
|
312 | 311 | print '...going to the last file: ' |
|
313 | 312 | |
|
314 | 313 | if validFilelist: |
|
315 | 314 | validFilelist = sorted( validFilelist, key=str.lower ) |
|
316 | 315 | return validFilelist[-1] |
|
317 | 316 | |
|
318 | 317 | return None |
|
319 | 318 | |
|
320 | 319 | def getlastFileFromPath(path, ext): |
|
321 | 320 | """ |
|
322 | 321 | Depura el fileList dejando solo los que cumplan el formato de "PYYYYDDDSSS.ext" |
|
323 | 322 | al final de la depuracion devuelve el ultimo file de la lista que quedo. |
|
324 | 323 | |
|
325 | 324 | Input: |
|
326 | 325 | fileList : lista conteniendo todos los files (sin path) que componen una determinada carpeta |
|
327 | 326 | ext : extension de los files contenidos en una carpeta |
|
328 | 327 | |
|
329 | 328 | Return: |
|
330 | 329 | El ultimo file de una determinada carpeta, no se considera el path. |
|
331 | 330 | """ |
|
332 | 331 | validFilelist = [] |
|
333 | 332 | fileList = os.listdir(path) |
|
334 | 333 | |
|
335 | 334 | # 0 1234 567 89A BCDE |
|
336 | 335 | # H YYYY DDD SSS .ext |
|
337 | 336 | |
|
338 | 337 | for thisFile in fileList: |
|
339 | 338 | |
|
340 | 339 | year = thisFile[1:5] |
|
341 | 340 | if not isNumber(year): |
|
342 | 341 | continue |
|
343 | 342 | |
|
344 | 343 | doy = thisFile[5:8] |
|
345 | 344 | if not isNumber(doy): |
|
346 | 345 | continue |
|
347 | 346 | |
|
348 | 347 | year = int(year) |
|
349 | 348 | doy = int(doy) |
|
350 | 349 | |
|
351 | 350 | if (os.path.splitext(thisFile)[-1].lower() != ext.lower()): |
|
352 | 351 | continue |
|
353 | 352 | |
|
354 | 353 | validFilelist.append(thisFile) |
|
355 | 354 | |
|
356 | 355 | if validFilelist: |
|
357 | 356 | validFilelist = sorted( validFilelist, key=str.lower ) |
|
358 | 357 | return validFilelist[-1] |
|
359 | 358 | |
|
360 | 359 | return None |
|
361 | 360 | |
|
362 | 361 | def checkForRealPath(path, foldercounter, year, doy, set, ext): |
|
363 | 362 | """ |
|
364 | 363 | Por ser Linux Case Sensitive entonces checkForRealPath encuentra el nombre correcto de un path, |
|
365 | 364 | Prueba por varias combinaciones de nombres entre mayusculas y minusculas para determinar |
|
366 | 365 | el path exacto de un determinado file. |
|
367 | 366 | |
|
368 | 367 | Example : |
|
369 | 368 | nombre correcto del file es .../.../D2009307/P2009307367.ext |
|
370 | 369 | |
|
371 | 370 | Entonces la funcion prueba con las siguientes combinaciones |
|
372 | 371 | .../.../y2009307367.ext |
|
373 | 372 | .../.../Y2009307367.ext |
|
374 | 373 | .../.../x2009307/y2009307367.ext |
|
375 | 374 | .../.../x2009307/Y2009307367.ext |
|
376 | 375 | .../.../X2009307/y2009307367.ext |
|
377 | 376 | .../.../X2009307/Y2009307367.ext |
|
378 | 377 | siendo para este caso, la ultima combinacion de letras, identica al file buscado |
|
379 | 378 | |
|
380 | 379 | Return: |
|
381 | 380 | Si encuentra la cobinacion adecuada devuelve el path completo y el nombre del file |
|
382 | 381 | caso contrario devuelve None como path y el la ultima combinacion de nombre en mayusculas |
|
383 | 382 | para el filename |
|
384 | 383 | """ |
|
385 | 384 | fullfilename = None |
|
386 | 385 | find_flag = False |
|
387 | 386 | filename = None |
|
388 | 387 | |
|
389 | 388 | prefixDirList = [None,'d','D'] |
|
390 | 389 | if ext.lower() == ".r": #voltage |
|
391 | 390 | prefixFileList = ['d','D'] |
|
392 | 391 | elif ext.lower() == ".pdata": #spectra |
|
393 | 392 | prefixFileList = ['p','P'] |
|
394 | 393 | else: |
|
395 | 394 | return None, filename |
|
396 | 395 | |
|
397 | 396 | #barrido por las combinaciones posibles |
|
398 | 397 | for prefixDir in prefixDirList: |
|
399 | 398 | thispath = path |
|
400 | 399 | if prefixDir != None: |
|
401 | 400 | #formo el nombre del directorio xYYYYDDD (x=d o x=D) |
|
402 | 401 | if foldercounter == 0: |
|
403 | 402 | thispath = os.path.join(path, "%s%04d%03d" % ( prefixDir, year, doy )) |
|
404 | 403 | else: |
|
405 | 404 | thispath = os.path.join(path, "%s%04d%03d_%02d" % ( prefixDir, year, doy , foldercounter)) |
|
406 | 405 | for prefixFile in prefixFileList: #barrido por las dos combinaciones posibles de "D" |
|
407 | 406 | filename = "%s%04d%03d%03d%s" % ( prefixFile, year, doy, set, ext ) #formo el nombre del file xYYYYDDDSSS.ext |
|
408 | 407 | fullfilename = os.path.join( thispath, filename ) #formo el path completo |
|
409 | 408 | |
|
410 | 409 | if os.path.exists( fullfilename ): #verifico que exista |
|
411 | 410 | find_flag = True |
|
412 | 411 | break |
|
413 | 412 | if find_flag: |
|
414 | 413 | break |
|
415 | 414 | |
|
416 | 415 | if not(find_flag): |
|
417 | 416 | return None, filename |
|
418 | 417 | |
|
419 | 418 | return fullfilename, filename |
|
420 | 419 | |
|
421 | 420 | def isRadarFolder(folder): |
|
422 | 421 | try: |
|
423 | 422 | year = int(folder[1:5]) |
|
424 | 423 | doy = int(folder[5:8]) |
|
425 | 424 | except: |
|
426 | 425 | return 0 |
|
427 | 426 | |
|
428 | 427 | return 1 |
|
429 | 428 | |
|
430 | 429 | def isRadarFile(file): |
|
431 | 430 | try: |
|
432 | 431 | year = int(file[1:5]) |
|
433 | 432 | doy = int(file[5:8]) |
|
434 | 433 | set = int(file[8:11]) |
|
435 | 434 | except: |
|
436 | 435 | return 0 |
|
437 | 436 | |
|
438 | 437 | return 1 |
|
439 | 438 | |
|
440 | 439 | def getDateFromRadarFile(file): |
|
441 | 440 | try: |
|
442 | 441 | year = int(file[1:5]) |
|
443 | 442 | doy = int(file[5:8]) |
|
444 | 443 | set = int(file[8:11]) |
|
445 | 444 | except: |
|
446 | 445 | return None |
|
447 | 446 | |
|
448 | 447 | thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy-1) |
|
449 | 448 | return thisDate |
|
450 | 449 | |
|
451 | 450 | def getDateFromRadarFolder(folder): |
|
452 | 451 | try: |
|
453 | 452 | year = int(folder[1:5]) |
|
454 | 453 | doy = int(folder[5:8]) |
|
455 | 454 | except: |
|
456 | 455 | return None |
|
457 | 456 | |
|
458 | 457 | thisDate = datetime.date(year, 1, 1) + datetime.timedelta(doy-1) |
|
459 | 458 | return thisDate |
|
460 | 459 | |
|
461 | 460 | class JRODataIO: |
|
462 | 461 | |
|
463 | 462 | c = 3E8 |
|
464 | 463 | |
|
465 | 464 | isConfig = False |
|
466 | 465 | |
|
467 | 466 | basicHeaderObj = None |
|
468 | 467 | |
|
469 | 468 | systemHeaderObj = None |
|
470 | 469 | |
|
471 | 470 | radarControllerHeaderObj = None |
|
472 | 471 | |
|
473 | 472 | processingHeaderObj = None |
|
474 | 473 | |
|
475 | 474 | dtype = None |
|
476 | 475 | |
|
477 | 476 | pathList = [] |
|
478 | 477 | |
|
479 | 478 | filenameList = [] |
|
480 | 479 | |
|
481 | 480 | filename = None |
|
482 | 481 | |
|
483 | 482 | ext = None |
|
484 | 483 | |
|
485 | 484 | flagIsNewFile = 1 |
|
486 | 485 | |
|
487 | 486 | flagDiscontinuousBlock = 0 |
|
488 | 487 | |
|
489 | 488 | flagIsNewBlock = 0 |
|
490 | 489 | |
|
491 | 490 | fp = None |
|
492 | 491 | |
|
493 | 492 | firstHeaderSize = 0 |
|
494 | 493 | |
|
495 | 494 | basicHeaderSize = 24 |
|
496 | 495 | |
|
497 | 496 | versionFile = 1103 |
|
498 | 497 | |
|
499 | 498 | fileSize = None |
|
500 | 499 | |
|
501 | 500 | # ippSeconds = None |
|
502 | 501 | |
|
503 | 502 | fileSizeByHeader = None |
|
504 | 503 | |
|
505 | 504 | fileIndex = None |
|
506 | 505 | |
|
507 | 506 | profileIndex = None |
|
508 | 507 | |
|
509 | 508 | blockIndex = None |
|
510 | 509 | |
|
511 | 510 | nTotalBlocks = None |
|
512 | 511 | |
|
513 | 512 | maxTimeStep = 30 |
|
514 | 513 | |
|
515 | 514 | lastUTTime = None |
|
516 | 515 | |
|
517 | 516 | datablock = None |
|
518 | 517 | |
|
519 | 518 | dataOut = None |
|
520 | 519 | |
|
521 | 520 | blocksize = None |
|
522 | 521 | |
|
523 | 522 | getByBlock = False |
|
524 | 523 | |
|
525 | 524 | def __init__(self): |
|
526 | 525 | |
|
527 | 526 | raise NotImplementedError |
|
528 | 527 | |
|
529 | 528 | def run(self): |
|
530 | 529 | |
|
531 | 530 | raise NotImplementedError |
|
532 | 531 | |
|
533 | 532 | def getDtypeWidth(self): |
|
534 | 533 | |
|
535 | 534 | dtype_index = get_dtype_index(self.dtype) |
|
536 | 535 | dtype_width = get_dtype_width(dtype_index) |
|
537 | 536 | |
|
538 | 537 | return dtype_width |
|
539 | 538 | |
|
540 | 539 | def getAllowedArgs(self): |
|
541 | 540 | return inspect.getargspec(self.run).args |
|
542 | 541 | |
|
543 | 542 | class JRODataReader(JRODataIO): |
|
544 | 543 | |
|
545 | 544 | |
|
546 | 545 | online = 0 |
|
547 | 546 | |
|
548 | 547 | realtime = 0 |
|
549 | 548 | |
|
550 | 549 | nReadBlocks = 0 |
|
551 | 550 | |
|
552 | 551 | delay = 10 #number of seconds waiting a new file |
|
553 | 552 | |
|
554 | 553 | nTries = 3 #quantity tries |
|
555 | 554 | |
|
556 | 555 | nFiles = 3 #number of files for searching |
|
557 | 556 | |
|
558 | 557 | path = None |
|
559 | 558 | |
|
560 | 559 | foldercounter = 0 |
|
561 | 560 | |
|
562 | 561 | flagNoMoreFiles = 0 |
|
563 | 562 | |
|
564 | 563 | datetimeList = [] |
|
565 | 564 | |
|
566 | 565 | __isFirstTimeOnline = 1 |
|
567 | 566 | |
|
568 | 567 | __printInfo = True |
|
569 | 568 | |
|
570 | 569 | profileIndex = None |
|
571 | 570 | |
|
572 | 571 | nTxs = 1 |
|
573 | 572 | |
|
574 | 573 | txIndex = None |
|
575 | 574 | |
|
576 | 575 | #Added-------------------- |
|
577 | 576 | |
|
578 | 577 | selBlocksize = None |
|
579 | 578 | |
|
580 | 579 | selBlocktime = None |
|
581 | 580 | |
|
582 | 581 | |
|
583 | 582 | def __init__(self): |
|
584 | 583 | |
|
585 | 584 | """ |
|
586 | 585 | This class is used to find data files |
|
587 | 586 | |
|
588 | 587 | Example: |
|
589 | 588 | reader = JRODataReader() |
|
590 | 589 | fileList = reader.findDataFiles() |
|
591 | 590 | |
|
592 | 591 | """ |
|
593 | 592 | pass |
|
594 | 593 | |
|
595 | 594 | |
|
596 | 595 | def createObjByDefault(self): |
|
597 | 596 | """ |
|
598 | 597 | |
|
599 | 598 | """ |
|
600 | 599 | raise NotImplementedError |
|
601 | 600 | |
|
602 | 601 | def getBlockDimension(self): |
|
603 | 602 | |
|
604 | 603 | raise NotImplementedError |
|
605 | 604 | |
|
606 | 605 | def __searchFilesOffLine(self, |
|
607 | 606 | path, |
|
608 | 607 | startDate=None, |
|
609 | 608 | endDate=None, |
|
610 | 609 | startTime=datetime.time(0,0,0), |
|
611 | 610 | endTime=datetime.time(23,59,59), |
|
612 | 611 | set=None, |
|
613 | 612 | expLabel='', |
|
614 | 613 | ext='.r', |
|
615 | 614 | queue=None, |
|
616 | 615 | cursor=None, |
|
617 | 616 | skip=None, |
|
618 | 617 | walk=True): |
|
619 | 618 | |
|
620 | 619 | self.filenameList = [] |
|
621 | 620 | self.datetimeList = [] |
|
622 | 621 | |
|
623 | 622 | pathList = [] |
|
624 | 623 | |
|
625 | 624 | dateList, pathList = self.findDatafiles(path, startDate, endDate, expLabel, ext, walk, include_path=True) |
|
626 | 625 | |
|
627 | 626 | if dateList == []: |
|
628 | 627 | # print "[Reading] Date range selected invalid [%s - %s]: No *%s files in %s)" %(startDate, endDate, ext, path) |
|
629 | 628 | return None, None |
|
630 | 629 | |
|
631 | 630 | if len(dateList) > 1: |
|
632 | 631 | print "[Reading] Data found for date range [%s - %s]: total days = %d" %(startDate, endDate, len(dateList)) |
|
633 | 632 | else: |
|
634 | 633 | print "[Reading] Data found for date range [%s - %s]: date = %s" %(startDate, endDate, dateList[0]) |
|
635 | 634 | |
|
636 | 635 | filenameList = [] |
|
637 | 636 | datetimeList = [] |
|
638 | 637 | |
|
639 | 638 | for thisPath in pathList: |
|
640 | 639 | # thisPath = pathList[pathDict[file]] |
|
641 | 640 | |
|
642 | 641 | fileList = glob.glob1(thisPath, "*%s" %ext) |
|
643 | 642 | fileList.sort() |
|
644 | 643 | |
|
645 | 644 | skippedFileList = [] |
|
646 | 645 | |
|
647 | 646 | if cursor is not None and skip is not None: |
|
648 | 647 | # if cursor*skip > len(fileList): |
|
649 | 648 | if skip == 0: |
|
650 | 649 | if queue is not None: |
|
651 | 650 | queue.put(len(fileList)) |
|
652 | 651 | skippedFileList = [] |
|
653 | 652 | else: |
|
654 | 653 | skippedFileList = fileList[cursor*skip: cursor*skip + skip] |
|
655 | 654 | |
|
656 | 655 | else: |
|
657 | 656 | skippedFileList = fileList |
|
658 | 657 | |
|
659 | 658 | for file in skippedFileList: |
|
660 | 659 | |
|
661 | 660 | filename = os.path.join(thisPath,file) |
|
662 | 661 | |
|
663 | 662 | if not isFileInDateRange(filename, startDate, endDate): |
|
664 | 663 | continue |
|
665 | 664 | |
|
666 | 665 | thisDatetime = isFileInTimeRange(filename, startDate, endDate, startTime, endTime) |
|
667 | 666 | |
|
668 | 667 | if not(thisDatetime): |
|
669 | 668 | continue |
|
670 | 669 | |
|
671 | 670 | filenameList.append(filename) |
|
672 | 671 | datetimeList.append(thisDatetime) |
|
673 | 672 | |
|
674 | 673 | if not(filenameList): |
|
675 | 674 | print "[Reading] Time range selected invalid [%s - %s]: No *%s files in %s)" %(startTime, endTime, ext, path) |
|
676 | 675 | return None, None |
|
677 | 676 | |
|
678 | 677 | print "[Reading] %d file(s) was(were) found in time range: %s - %s" %(len(filenameList), startTime, endTime) |
|
679 | 678 | |
|
680 | 679 | |
|
681 | 680 | for i in range(len(filenameList)): |
|
682 | 681 | print "[Reading] %s -> [%s]" %(filenameList[i], datetimeList[i].ctime()) |
|
683 | 682 | |
|
684 | 683 | self.filenameList = filenameList |
|
685 | 684 | self.datetimeList = datetimeList |
|
686 | 685 | |
|
687 | 686 | return pathList, filenameList |
|
688 | 687 | |
|
689 | 688 | def __searchFilesOnLine(self, path, expLabel = "", ext = None, walk=True, set=None): |
|
690 | 689 | |
|
691 | 690 | """ |
|
692 | 691 | Busca el ultimo archivo de la ultima carpeta (determinada o no por startDateTime) y |
|
693 | 692 | devuelve el archivo encontrado ademas de otros datos. |
|
694 | 693 | |
|
695 | 694 | Input: |
|
696 | 695 | path : carpeta donde estan contenidos los files que contiene data |
|
697 | 696 | |
|
698 | 697 | expLabel : Nombre del subexperimento (subfolder) |
|
699 | 698 | |
|
700 | 699 | ext : extension de los files |
|
701 | 700 | |
|
702 | 701 | walk : Si es habilitado no realiza busquedas dentro de los ubdirectorios (doypath) |
|
703 | 702 | |
|
704 | 703 | Return: |
|
705 | 704 | directory : eL directorio donde esta el file encontrado |
|
706 | 705 | filename : el ultimo file de una determinada carpeta |
|
707 | 706 | year : el anho |
|
708 | 707 | doy : el numero de dia del anho |
|
709 | 708 | set : el set del archivo |
|
710 | 709 | |
|
711 | 710 | |
|
712 | 711 | """ |
|
713 | 712 | if not os.path.isdir(path): |
|
714 | 713 | return None, None, None, None, None, None |
|
715 | 714 | |
|
716 | 715 | dirList = [] |
|
717 | 716 | |
|
718 | 717 | if not walk: |
|
719 | 718 | fullpath = path |
|
720 | 719 | foldercounter = 0 |
|
721 | 720 | else: |
|
722 | 721 | #Filtra solo los directorios |
|
723 | 722 | for thisPath in os.listdir(path): |
|
724 | 723 | if not os.path.isdir(os.path.join(path,thisPath)): |
|
725 | 724 | continue |
|
726 | 725 | if not isRadarFolder(thisPath): |
|
727 | 726 | continue |
|
728 | 727 | |
|
729 | 728 | dirList.append(thisPath) |
|
730 | 729 | |
|
731 | 730 | if not(dirList): |
|
732 | 731 | return None, None, None, None, None, None |
|
733 | 732 | |
|
734 | 733 | dirList = sorted( dirList, key=str.lower ) |
|
735 | 734 | |
|
736 | 735 | doypath = dirList[-1] |
|
737 | 736 | foldercounter = int(doypath.split('_')[1]) if len(doypath.split('_'))>1 else 0 |
|
738 | 737 | fullpath = os.path.join(path, doypath, expLabel) |
|
739 | 738 | |
|
740 | 739 | |
|
741 | 740 | print "[Reading] %s folder was found: " %(fullpath ) |
|
742 | 741 | |
|
743 | 742 | if set == None: |
|
744 | 743 | filename = getlastFileFromPath(fullpath, ext) |
|
745 | 744 | else: |
|
746 | 745 | filename = getFileFromSet(fullpath, ext, set) |
|
747 | 746 | |
|
748 | 747 | if not(filename): |
|
749 | 748 | return None, None, None, None, None, None |
|
750 | 749 | |
|
751 | 750 | print "[Reading] %s file was found" %(filename) |
|
752 | 751 | |
|
753 | 752 | if not(self.__verifyFile(os.path.join(fullpath, filename))): |
|
754 | 753 | return None, None, None, None, None, None |
|
755 | 754 | |
|
756 | 755 | year = int( filename[1:5] ) |
|
757 | 756 | doy = int( filename[5:8] ) |
|
758 | 757 | set = int( filename[8:11] ) |
|
759 | 758 | |
|
760 | 759 | return fullpath, foldercounter, filename, year, doy, set |
|
761 | 760 | |
|
762 | 761 | def __setNextFileOffline(self): |
|
763 | 762 | |
|
764 | 763 | idFile = self.fileIndex |
|
765 | 764 | |
|
766 | 765 | while (True): |
|
767 | 766 | idFile += 1 |
|
768 | 767 | if not(idFile < len(self.filenameList)): |
|
769 | 768 | self.flagNoMoreFiles = 1 |
|
770 | 769 | # print "[Reading] No more Files" |
|
771 | 770 | return 0 |
|
772 | 771 | |
|
773 | 772 | filename = self.filenameList[idFile] |
|
774 | 773 | |
|
775 | 774 | if not(self.__verifyFile(filename)): |
|
776 | 775 | continue |
|
777 | 776 | |
|
778 | 777 | fileSize = os.path.getsize(filename) |
|
779 | 778 | fp = open(filename,'rb') |
|
780 | 779 | break |
|
781 | 780 | |
|
782 | 781 | self.flagIsNewFile = 1 |
|
783 | 782 | self.fileIndex = idFile |
|
784 | 783 | self.filename = filename |
|
785 | 784 | self.fileSize = fileSize |
|
786 | 785 | self.fp = fp |
|
787 | 786 | |
|
788 | 787 | # print "[Reading] Setting the file: %s"%self.filename |
|
789 | 788 | |
|
790 | 789 | return 1 |
|
791 | 790 | |
|
792 | 791 | def __setNextFileOnline(self): |
|
793 | 792 | """ |
|
794 | 793 | Busca el siguiente file que tenga suficiente data para ser leida, dentro de un folder especifico, si |
|
795 | 794 | no encuentra un file valido espera un tiempo determinado y luego busca en los posibles n files |
|
796 | 795 | siguientes. |
|
797 | 796 | |
|
798 | 797 | Affected: |
|
799 | 798 | self.flagIsNewFile |
|
800 | 799 | self.filename |
|
801 | 800 | self.fileSize |
|
802 | 801 | self.fp |
|
803 | 802 | self.set |
|
804 | 803 | self.flagNoMoreFiles |
|
805 | 804 | |
|
806 | 805 | Return: |
|
807 | 806 | 0 : si luego de una busqueda del siguiente file valido este no pudo ser encontrado |
|
808 | 807 | 1 : si el file fue abierto con exito y esta listo a ser leido |
|
809 | 808 | |
|
810 | 809 | Excepciones: |
|
811 | 810 | Si un determinado file no puede ser abierto |
|
812 | 811 | """ |
|
813 | 812 | nFiles = 0 |
|
814 | 813 | fileOk_flag = False |
|
815 | 814 | firstTime_flag = True |
|
816 | 815 | |
|
817 | 816 | self.set += 1 |
|
818 | 817 | |
|
819 | 818 | if self.set > 999: |
|
820 | 819 | self.set = 0 |
|
821 | 820 | self.foldercounter += 1 |
|
822 | 821 | |
|
823 | 822 | #busca el 1er file disponible |
|
824 | 823 | fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext ) |
|
825 | 824 | if fullfilename: |
|
826 | 825 | if self.__verifyFile(fullfilename, False): |
|
827 | 826 | fileOk_flag = True |
|
828 | 827 | |
|
829 | 828 | #si no encuentra un file entonces espera y vuelve a buscar |
|
830 | 829 | if not(fileOk_flag): |
|
831 | 830 | for nFiles in range(self.nFiles+1): #busco en los siguientes self.nFiles+1 files posibles |
|
832 | 831 | |
|
833 | 832 | if firstTime_flag: #si es la 1era vez entonces hace el for self.nTries veces |
|
834 | 833 | tries = self.nTries |
|
835 | 834 | else: |
|
836 | 835 | tries = 1 #si no es la 1era vez entonces solo lo hace una vez |
|
837 | 836 | |
|
838 | 837 | for nTries in range( tries ): |
|
839 | 838 | if firstTime_flag: |
|
840 | 839 | print "\t[Reading] Waiting %0.2f sec for the next file: \"%s\" , try %03d ..." % ( self.delay, filename, nTries+1 ) |
|
841 | 840 | sleep( self.delay ) |
|
842 | 841 | else: |
|
843 | 842 | print "\t[Reading] Searching the next \"%s%04d%03d%03d%s\" file ..." % (self.optchar, self.year, self.doy, self.set, self.ext) |
|
844 | 843 | |
|
845 | 844 | fullfilename, filename = checkForRealPath( self.path, self.foldercounter, self.year, self.doy, self.set, self.ext ) |
|
846 | 845 | if fullfilename: |
|
847 | 846 | if self.__verifyFile(fullfilename): |
|
848 | 847 | fileOk_flag = True |
|
849 | 848 | break |
|
850 | 849 | |
|
851 | 850 | if fileOk_flag: |
|
852 | 851 | break |
|
853 | 852 | |
|
854 | 853 | firstTime_flag = False |
|
855 | 854 | |
|
856 | 855 | print "\t[Reading] Skipping the file \"%s\" due to this file doesn't exist" % filename |
|
857 | 856 | self.set += 1 |
|
858 | 857 | |
|
859 | 858 | if nFiles == (self.nFiles-1): #si no encuentro el file buscado cambio de carpeta y busco en la siguiente carpeta |
|
860 | 859 | self.set = 0 |
|
861 | 860 | self.doy += 1 |
|
862 | 861 | self.foldercounter = 0 |
|
863 | 862 | |
|
864 | 863 | if fileOk_flag: |
|
865 | 864 | self.fileSize = os.path.getsize( fullfilename ) |
|
866 | 865 | self.filename = fullfilename |
|
867 | 866 | self.flagIsNewFile = 1 |
|
868 | 867 | if self.fp != None: self.fp.close() |
|
869 | 868 | self.fp = open(fullfilename, 'rb') |
|
870 | 869 | self.flagNoMoreFiles = 0 |
|
871 | 870 | # print '[Reading] Setting the file: %s' % fullfilename |
|
872 | 871 | else: |
|
873 | 872 | self.fileSize = 0 |
|
874 | 873 | self.filename = None |
|
875 | 874 | self.flagIsNewFile = 0 |
|
876 | 875 | self.fp = None |
|
877 | 876 | self.flagNoMoreFiles = 1 |
|
878 | 877 | # print '[Reading] No more files to read' |
|
879 | 878 | |
|
880 | 879 | return fileOk_flag |
|
881 | 880 | |
|
882 | 881 | def setNextFile(self): |
|
883 | 882 | if self.fp != None: |
|
884 | 883 | self.fp.close() |
|
885 | 884 | |
|
886 | 885 | if self.online: |
|
887 | 886 | newFile = self.__setNextFileOnline() |
|
888 | 887 | else: |
|
889 | 888 | newFile = self.__setNextFileOffline() |
|
890 | 889 | |
|
891 | 890 | if not(newFile): |
|
892 | 891 | print '[Reading] No more files to read' |
|
893 | 892 | return 0 |
|
894 | 893 | |
|
895 | 894 | if self.verbose: |
|
896 | 895 | print '[Reading] Setting the file: %s' % self.filename |
|
897 | 896 | |
|
898 | 897 | self.__readFirstHeader() |
|
899 | 898 | self.nReadBlocks = 0 |
|
900 | 899 | return 1 |
|
901 | 900 | |
|
902 | 901 | def __waitNewBlock(self): |
|
903 | 902 | """ |
|
904 | 903 | Return 1 si se encontro un nuevo bloque de datos, 0 de otra forma. |
|
905 | 904 | |
|
906 | 905 | Si el modo de lectura es OffLine siempre retorn 0 |
|
907 | 906 | """ |
|
908 | 907 | if not self.online: |
|
909 | 908 | return 0 |
|
910 | 909 | |
|
911 | 910 | if (self.nReadBlocks >= self.processingHeaderObj.dataBlocksPerFile): |
|
912 | 911 | return 0 |
|
913 | 912 | |
|
914 | 913 | currentPointer = self.fp.tell() |
|
915 | 914 | |
|
916 | 915 | neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize |
|
917 | 916 | |
|
918 | 917 | for nTries in range( self.nTries ): |
|
919 | 918 | |
|
920 | 919 | self.fp.close() |
|
921 | 920 | self.fp = open( self.filename, 'rb' ) |
|
922 | 921 | self.fp.seek( currentPointer ) |
|
923 | 922 | |
|
924 | 923 | self.fileSize = os.path.getsize( self.filename ) |
|
925 | 924 | currentSize = self.fileSize - currentPointer |
|
926 | 925 | |
|
927 | 926 | if ( currentSize >= neededSize ): |
|
928 | 927 | self.basicHeaderObj.read(self.fp) |
|
929 | 928 | return 1 |
|
930 | 929 | |
|
931 | 930 | if self.fileSize == self.fileSizeByHeader: |
|
932 | 931 | # self.flagEoF = True |
|
933 | 932 | return 0 |
|
934 | 933 | |
|
935 | 934 | print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1) |
|
936 | 935 | sleep( self.delay ) |
|
937 | 936 | |
|
938 | 937 | |
|
939 | 938 | return 0 |
|
940 | 939 | |
|
941 | 940 | def waitDataBlock(self,pointer_location): |
|
942 | 941 | |
|
943 | 942 | currentPointer = pointer_location |
|
944 | 943 | |
|
945 | 944 | neededSize = self.processingHeaderObj.blockSize #+ self.basicHeaderSize |
|
946 | 945 | |
|
947 | 946 | for nTries in range( self.nTries ): |
|
948 | 947 | self.fp.close() |
|
949 | 948 | self.fp = open( self.filename, 'rb' ) |
|
950 | 949 | self.fp.seek( currentPointer ) |
|
951 | 950 | |
|
952 | 951 | self.fileSize = os.path.getsize( self.filename ) |
|
953 | 952 | currentSize = self.fileSize - currentPointer |
|
954 | 953 | |
|
955 | 954 | if ( currentSize >= neededSize ): |
|
956 | 955 | return 1 |
|
957 | 956 | |
|
958 | 957 | print "[Reading] Waiting %0.2f seconds for the next block, try %03d ..." % (self.delay, nTries+1) |
|
959 | 958 | sleep( self.delay ) |
|
960 | 959 | |
|
961 | 960 | return 0 |
|
962 | 961 | |
|
963 | 962 | def __jumpToLastBlock(self): |
|
964 | 963 | |
|
965 | 964 | if not(self.__isFirstTimeOnline): |
|
966 | 965 | return |
|
967 | 966 | |
|
968 | 967 | csize = self.fileSize - self.fp.tell() |
|
969 | 968 | blocksize = self.processingHeaderObj.blockSize |
|
970 | 969 | |
|
971 | 970 | #salta el primer bloque de datos |
|
972 | 971 | if csize > self.processingHeaderObj.blockSize: |
|
973 | 972 | self.fp.seek(self.fp.tell() + blocksize) |
|
974 | 973 | else: |
|
975 | 974 | return |
|
976 | 975 | |
|
977 | 976 | csize = self.fileSize - self.fp.tell() |
|
978 | 977 | neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize |
|
979 | 978 | while True: |
|
980 | 979 | |
|
981 | 980 | if self.fp.tell()<self.fileSize: |
|
982 | 981 | self.fp.seek(self.fp.tell() + neededsize) |
|
983 | 982 | else: |
|
984 | 983 | self.fp.seek(self.fp.tell() - neededsize) |
|
985 | 984 | break |
|
986 | 985 | |
|
987 | 986 | # csize = self.fileSize - self.fp.tell() |
|
988 | 987 | # neededsize = self.processingHeaderObj.blockSize + self.basicHeaderSize |
|
989 | 988 | # factor = int(csize/neededsize) |
|
990 | 989 | # if factor > 0: |
|
991 | 990 | # self.fp.seek(self.fp.tell() + factor*neededsize) |
|
992 | 991 | |
|
993 | 992 | self.flagIsNewFile = 0 |
|
994 | 993 | self.__isFirstTimeOnline = 0 |
|
995 | 994 | |
|
996 | 995 | def __setNewBlock(self): |
|
997 | 996 | |
|
998 | 997 | if self.fp == None: |
|
999 | 998 | return 0 |
|
1000 | 999 | |
|
1001 | 1000 | # if self.online: |
|
1002 | 1001 | # self.__jumpToLastBlock() |
|
1003 | 1002 | |
|
1004 | 1003 | if self.flagIsNewFile: |
|
1005 | 1004 | self.lastUTTime = self.basicHeaderObj.utc |
|
1006 | 1005 | return 1 |
|
1007 | 1006 | |
|
1008 | 1007 | if self.realtime: |
|
1009 | 1008 | self.flagDiscontinuousBlock = 1 |
|
1010 | 1009 | if not(self.setNextFile()): |
|
1011 | 1010 | return 0 |
|
1012 | 1011 | else: |
|
1013 | 1012 | return 1 |
|
1014 | 1013 | |
|
1015 | 1014 | currentSize = self.fileSize - self.fp.tell() |
|
1016 | 1015 | neededSize = self.processingHeaderObj.blockSize + self.basicHeaderSize |
|
1017 | 1016 | |
|
1018 | 1017 | if (currentSize >= neededSize): |
|
1019 | 1018 | self.basicHeaderObj.read(self.fp) |
|
1020 | 1019 | self.lastUTTime = self.basicHeaderObj.utc |
|
1021 | 1020 | return 1 |
|
1022 | 1021 | |
|
1023 | 1022 | if self.__waitNewBlock(): |
|
1024 | 1023 | self.lastUTTime = self.basicHeaderObj.utc |
|
1025 | 1024 | return 1 |
|
1026 | 1025 | |
|
1027 | 1026 | if not(self.setNextFile()): |
|
1028 | 1027 | return 0 |
|
1029 | 1028 | |
|
1030 | 1029 | deltaTime = self.basicHeaderObj.utc - self.lastUTTime # |
|
1031 | 1030 | self.lastUTTime = self.basicHeaderObj.utc |
|
1032 | 1031 | |
|
1033 | 1032 | self.flagDiscontinuousBlock = 0 |
|
1034 | 1033 | |
|
1035 | 1034 | if deltaTime > self.maxTimeStep: |
|
1036 | 1035 | self.flagDiscontinuousBlock = 1 |
|
1037 | 1036 | |
|
1038 | 1037 | return 1 |
|
1039 | 1038 | |
|
1040 | 1039 | def readNextBlock(self): |
|
1041 | 1040 | |
|
1042 | 1041 | #Skip block out of startTime and endTime |
|
1043 | 1042 | while True: |
|
1044 | 1043 | if not(self.__setNewBlock()): |
|
1045 | 1044 | return 0 |
|
1046 | 1045 | |
|
1047 | 1046 | if not(self.readBlock()): |
|
1048 | 1047 | return 0 |
|
1049 | 1048 | |
|
1050 | 1049 | self.getBasicHeader() |
|
1051 | 1050 | |
|
1052 | 1051 | if not isTimeInRange(self.dataOut.datatime.time(), self.startTime, self.endTime): |
|
1053 | 1052 | |
|
1054 | 1053 | print "[Reading] Block No. %d/%d -> %s [Skipping]" %(self.nReadBlocks, |
|
1055 | 1054 | self.processingHeaderObj.dataBlocksPerFile, |
|
1056 | 1055 | self.dataOut.datatime.ctime()) |
|
1057 | 1056 | continue |
|
1058 | 1057 | |
|
1059 | 1058 | break |
|
1060 | 1059 | |
|
1061 |
|
|
|
1062 |
|
|
|
1063 |
|
|
|
1064 |
|
|
|
1060 | if self.verbose: | |
|
1061 | print "[Reading] Block No. %d/%d -> %s" %(self.nReadBlocks, | |
|
1062 | self.processingHeaderObj.dataBlocksPerFile, | |
|
1063 | self.dataOut.datatime.ctime()) | |
|
1065 | 1064 | return 1 |
|
1066 | 1065 | |
|
1067 | 1066 | def __readFirstHeader(self): |
|
1068 | 1067 | |
|
1069 | 1068 | self.basicHeaderObj.read(self.fp) |
|
1070 | 1069 | self.systemHeaderObj.read(self.fp) |
|
1071 | 1070 | self.radarControllerHeaderObj.read(self.fp) |
|
1072 | 1071 | self.processingHeaderObj.read(self.fp) |
|
1073 | 1072 | |
|
1074 | 1073 | self.firstHeaderSize = self.basicHeaderObj.size |
|
1075 | 1074 | |
|
1076 | 1075 | datatype = int(numpy.log2((self.processingHeaderObj.processFlags & PROCFLAG.DATATYPE_MASK))-numpy.log2(PROCFLAG.DATATYPE_CHAR)) |
|
1077 | 1076 | if datatype == 0: |
|
1078 | 1077 | datatype_str = numpy.dtype([('real','<i1'),('imag','<i1')]) |
|
1079 | 1078 | elif datatype == 1: |
|
1080 | 1079 | datatype_str = numpy.dtype([('real','<i2'),('imag','<i2')]) |
|
1081 | 1080 | elif datatype == 2: |
|
1082 | 1081 | datatype_str = numpy.dtype([('real','<i4'),('imag','<i4')]) |
|
1083 | 1082 | elif datatype == 3: |
|
1084 | 1083 | datatype_str = numpy.dtype([('real','<i8'),('imag','<i8')]) |
|
1085 | 1084 | elif datatype == 4: |
|
1086 | 1085 | datatype_str = numpy.dtype([('real','<f4'),('imag','<f4')]) |
|
1087 | 1086 | elif datatype == 5: |
|
1088 | 1087 | datatype_str = numpy.dtype([('real','<f8'),('imag','<f8')]) |
|
1089 | 1088 | else: |
|
1090 | 1089 | raise ValueError, 'Data type was not defined' |
|
1091 | 1090 | |
|
1092 | 1091 | self.dtype = datatype_str |
|
1093 | 1092 | #self.ippSeconds = 2 * 1000 * self.radarControllerHeaderObj.ipp / self.c |
|
1094 | 1093 | self.fileSizeByHeader = self.processingHeaderObj.dataBlocksPerFile * self.processingHeaderObj.blockSize + self.firstHeaderSize + self.basicHeaderSize*(self.processingHeaderObj.dataBlocksPerFile - 1) |
|
1095 | 1094 | # self.dataOut.channelList = numpy.arange(self.systemHeaderObj.numChannels) |
|
1096 | 1095 | # self.dataOut.channelIndexList = numpy.arange(self.systemHeaderObj.numChannels) |
|
1097 | 1096 | self.getBlockDimension() |
|
1098 | 1097 | |
|
1099 | 1098 | def __verifyFile(self, filename, msgFlag=True): |
|
1100 | 1099 | |
|
1101 | 1100 | msg = None |
|
1102 | 1101 | |
|
1103 | 1102 | try: |
|
1104 | 1103 | fp = open(filename, 'rb') |
|
1105 | 1104 | except IOError: |
|
1106 | 1105 | |
|
1107 | 1106 | if msgFlag: |
|
1108 | 1107 | print "[Reading] File %s can't be opened" % (filename) |
|
1109 | 1108 | |
|
1110 | 1109 | return False |
|
1111 | 1110 | |
|
1112 | 1111 | currentPosition = fp.tell() |
|
1113 | 1112 | neededSize = self.processingHeaderObj.blockSize + self.firstHeaderSize |
|
1114 | 1113 | |
|
1115 | 1114 | if neededSize == 0: |
|
1116 | 1115 | basicHeaderObj = BasicHeader(LOCALTIME) |
|
1117 | 1116 | systemHeaderObj = SystemHeader() |
|
1118 | 1117 | radarControllerHeaderObj = RadarControllerHeader() |
|
1119 | 1118 | processingHeaderObj = ProcessingHeader() |
|
1120 | 1119 | |
|
1121 | 1120 | if not( basicHeaderObj.read(fp) ): |
|
1122 | 1121 | fp.close() |
|
1123 | 1122 | return False |
|
1124 | 1123 | |
|
1125 | 1124 | if not( systemHeaderObj.read(fp) ): |
|
1126 | 1125 | fp.close() |
|
1127 | 1126 | return False |
|
1128 | 1127 | |
|
1129 | 1128 | if not( radarControllerHeaderObj.read(fp) ): |
|
1130 | 1129 | fp.close() |
|
1131 | 1130 | return False |
|
1132 | 1131 | |
|
1133 | 1132 | if not( processingHeaderObj.read(fp) ): |
|
1134 | 1133 | fp.close() |
|
1135 | 1134 | return False |
|
1136 | 1135 | |
|
1137 | 1136 | neededSize = processingHeaderObj.blockSize + basicHeaderObj.size |
|
1138 | 1137 | else: |
|
1139 | 1138 | msg = "[Reading] Skipping the file %s due to it hasn't enough data" %filename |
|
1140 | 1139 | |
|
1141 | 1140 | fp.close() |
|
1142 | 1141 | |
|
1143 | 1142 | fileSize = os.path.getsize(filename) |
|
1144 | 1143 | currentSize = fileSize - currentPosition |
|
1145 | 1144 | |
|
1146 | 1145 | if currentSize < neededSize: |
|
1147 | 1146 | if msgFlag and (msg != None): |
|
1148 | 1147 | print msg |
|
1149 | 1148 | return False |
|
1150 | 1149 | |
|
1151 | 1150 | return True |
|
1152 | 1151 | |
|
1153 | 1152 | def findDatafiles(self, path, startDate=None, endDate=None, expLabel='', ext='.r', walk=True, include_path=False): |
|
1154 | 1153 | |
|
1155 | 1154 | path_empty = True |
|
1156 | 1155 | |
|
1157 | 1156 | dateList = [] |
|
1158 | 1157 | pathList = [] |
|
1159 | 1158 | |
|
1160 | 1159 | multi_path = path.split(',') |
|
1161 | 1160 | |
|
1162 | 1161 | if not walk: |
|
1163 | 1162 | |
|
1164 | 1163 | for single_path in multi_path: |
|
1165 | 1164 | |
|
1166 | 1165 | if not os.path.isdir(single_path): |
|
1167 | 1166 | continue |
|
1168 | 1167 | |
|
1169 | 1168 | fileList = glob.glob1(single_path, "*"+ext) |
|
1170 | 1169 | |
|
1171 | 1170 | if not fileList: |
|
1172 | 1171 | continue |
|
1173 | 1172 | |
|
1174 | 1173 | path_empty = False |
|
1175 | 1174 | |
|
1176 | 1175 | fileList.sort() |
|
1177 | 1176 | |
|
1178 | 1177 | for thisFile in fileList: |
|
1179 | 1178 | |
|
1180 | 1179 | if not os.path.isfile(os.path.join(single_path, thisFile)): |
|
1181 | 1180 | continue |
|
1182 | 1181 | |
|
1183 | 1182 | if not isRadarFile(thisFile): |
|
1184 | 1183 | continue |
|
1185 | 1184 | |
|
1186 | 1185 | if not isFileInDateRange(thisFile, startDate, endDate): |
|
1187 | 1186 | continue |
|
1188 | 1187 | |
|
1189 | 1188 | thisDate = getDateFromRadarFile(thisFile) |
|
1190 | 1189 | |
|
1191 | 1190 | if thisDate in dateList: |
|
1192 | 1191 | continue |
|
1193 | 1192 | |
|
1194 | 1193 | dateList.append(thisDate) |
|
1195 | 1194 | pathList.append(single_path) |
|
1196 | 1195 | |
|
1197 | 1196 | else: |
|
1198 | 1197 | for single_path in multi_path: |
|
1199 | 1198 | |
|
1200 | 1199 | if not os.path.isdir(single_path): |
|
1201 | 1200 | continue |
|
1202 | 1201 | |
|
1203 | 1202 | dirList = [] |
|
1204 | 1203 | |
|
1205 | 1204 | for thisPath in os.listdir(single_path): |
|
1206 | 1205 | |
|
1207 | 1206 | if not os.path.isdir(os.path.join(single_path,thisPath)): |
|
1208 | 1207 | continue |
|
1209 | 1208 | |
|
1210 | 1209 | if not isRadarFolder(thisPath): |
|
1211 | 1210 | continue |
|
1212 | 1211 | |
|
1213 | 1212 | if not isFolderInDateRange(thisPath, startDate, endDate): |
|
1214 | 1213 | continue |
|
1215 | 1214 | |
|
1216 | 1215 | dirList.append(thisPath) |
|
1217 | 1216 | |
|
1218 | 1217 | if not dirList: |
|
1219 | 1218 | continue |
|
1220 | 1219 | |
|
1221 | 1220 | dirList.sort() |
|
1222 | 1221 | |
|
1223 | 1222 | for thisDir in dirList: |
|
1224 | 1223 | |
|
1225 | 1224 | datapath = os.path.join(single_path, thisDir, expLabel) |
|
1226 | 1225 | fileList = glob.glob1(datapath, "*"+ext) |
|
1227 | 1226 | |
|
1228 | 1227 | if not fileList: |
|
1229 | 1228 | continue |
|
1230 | 1229 | |
|
1231 | 1230 | path_empty = False |
|
1232 | 1231 | |
|
1233 | 1232 | thisDate = getDateFromRadarFolder(thisDir) |
|
1234 | 1233 | |
|
1235 | 1234 | pathList.append(datapath) |
|
1236 | 1235 | dateList.append(thisDate) |
|
1237 | 1236 | |
|
1238 | 1237 | dateList.sort() |
|
1239 | 1238 | |
|
1240 | 1239 | if walk: |
|
1241 | 1240 | pattern_path = os.path.join(multi_path[0], "[dYYYYDDD]", expLabel) |
|
1242 | 1241 | else: |
|
1243 | 1242 | pattern_path = multi_path[0] |
|
1244 | 1243 | |
|
1245 | 1244 | if path_empty: |
|
1246 | 1245 | print "[Reading] No *%s files in %s for %s to %s" %(ext, pattern_path, startDate, endDate) |
|
1247 | 1246 | else: |
|
1248 | 1247 | if not dateList: |
|
1249 | 1248 | print "[Reading] Date range selected invalid [%s - %s]: No *%s files in %s)" %(startDate, endDate, ext, path) |
|
1250 | 1249 | |
|
1251 | 1250 | if include_path: |
|
1252 | 1251 | return dateList, pathList |
|
1253 | 1252 | |
|
1254 | 1253 | return dateList |
|
1255 | 1254 | |
|
1256 | 1255 | def setup(self, |
|
1257 | 1256 | path=None, |
|
1258 | 1257 | startDate=None, |
|
1259 | 1258 | endDate=None, |
|
1260 | 1259 | startTime=datetime.time(0,0,0), |
|
1261 | 1260 | endTime=datetime.time(23,59,59), |
|
1262 | 1261 | set=None, |
|
1263 | 1262 | expLabel = "", |
|
1264 | 1263 | ext = None, |
|
1265 | 1264 | online = False, |
|
1266 | 1265 | delay = 60, |
|
1267 | 1266 | walk = True, |
|
1268 | 1267 | getblock = False, |
|
1269 | 1268 | nTxs = 1, |
|
1270 | 1269 | realtime=False, |
|
1271 | 1270 | blocksize=None, |
|
1272 | 1271 | blocktime=None, |
|
1273 | 1272 | queue=None, |
|
1274 | 1273 | skip=None, |
|
1275 | 1274 | cursor=None, |
|
1276 | 1275 | warnings=True, |
|
1277 | 1276 | verbose=True): |
|
1278 | 1277 | |
|
1279 | 1278 | if path == None: |
|
1280 | 1279 | raise ValueError, "[Reading] The path is not valid" |
|
1281 | 1280 | |
|
1282 | 1281 | if ext == None: |
|
1283 | 1282 | ext = self.ext |
|
1284 | 1283 | |
|
1285 | 1284 | if online: |
|
1286 | 1285 | print "[Reading] Searching files in online mode..." |
|
1287 | 1286 | |
|
1288 | 1287 | for nTries in range( self.nTries ): |
|
1289 | 1288 | fullpath, foldercounter, file, year, doy, set = self.__searchFilesOnLine(path=path, expLabel=expLabel, ext=ext, walk=walk, set=set) |
|
1290 | 1289 | |
|
1291 | 1290 | if fullpath: |
|
1292 | 1291 | break |
|
1293 | 1292 | |
|
1294 | 1293 | print '[Reading] Waiting %0.2f sec for an valid file in %s: try %02d ...' % (self.delay, path, nTries+1) |
|
1295 | 1294 | sleep( self.delay ) |
|
1296 | 1295 | |
|
1297 | 1296 | if not(fullpath): |
|
1298 | 1297 | print "[Reading] There 'isn't any valid file in %s" % path |
|
1299 | 1298 | return |
|
1300 | 1299 | |
|
1301 | 1300 | self.year = year |
|
1302 | 1301 | self.doy = doy |
|
1303 | 1302 | self.set = set - 1 |
|
1304 | 1303 | self.path = path |
|
1305 | 1304 | self.foldercounter = foldercounter |
|
1306 | 1305 | last_set = None |
|
1307 | 1306 | |
|
1308 | 1307 | else: |
|
1309 | 1308 | print "[Reading] Searching files in offline mode ..." |
|
1310 | 1309 | pathList, filenameList = self.__searchFilesOffLine(path, startDate=startDate, endDate=endDate, |
|
1311 | 1310 | startTime=startTime, endTime=endTime, |
|
1312 | 1311 | set=set, expLabel=expLabel, ext=ext, |
|
1313 | 1312 | walk=walk, cursor=cursor, |
|
1314 | 1313 | skip=skip, queue=queue) |
|
1315 | 1314 | |
|
1316 | 1315 | if not(pathList): |
|
1317 | 1316 | # print "[Reading] No *%s files in %s (%s - %s)"%(ext, path, |
|
1318 | 1317 | # datetime.datetime.combine(startDate,startTime).ctime(), |
|
1319 | 1318 | # datetime.datetime.combine(endDate,endTime).ctime()) |
|
1320 | 1319 | |
|
1321 | 1320 | # sys.exit(-1) |
|
1322 | 1321 | |
|
1323 | 1322 | self.fileIndex = -1 |
|
1324 | 1323 | self.pathList = [] |
|
1325 | 1324 | self.filenameList = [] |
|
1326 | 1325 | return |
|
1327 | 1326 | |
|
1328 | 1327 | self.fileIndex = -1 |
|
1329 | 1328 | self.pathList = pathList |
|
1330 | 1329 | self.filenameList = filenameList |
|
1331 | 1330 | file_name = os.path.basename(filenameList[-1]) |
|
1332 | 1331 | basename, ext = os.path.splitext(file_name) |
|
1333 | 1332 | last_set = int(basename[-3:]) |
|
1334 | 1333 | |
|
1335 | 1334 | self.online = online |
|
1336 | 1335 | self.realtime = realtime |
|
1337 | 1336 | self.delay = delay |
|
1338 | 1337 | ext = ext.lower() |
|
1339 | 1338 | self.ext = ext |
|
1340 | 1339 | self.getByBlock = getblock |
|
1341 | 1340 | self.nTxs = nTxs |
|
1342 | 1341 | self.startTime = startTime |
|
1343 | 1342 | self.endTime = endTime |
|
1344 | 1343 | |
|
1345 | 1344 | #Added----------------- |
|
1346 | 1345 | self.selBlocksize = blocksize |
|
1347 | 1346 | self.selBlocktime = blocktime |
|
1348 | 1347 | |
|
1349 | 1348 | # Verbose----------- |
|
1350 | 1349 | self.verbose = verbose |
|
1351 | 1350 | self.warnings = warnings |
|
1352 | 1351 | |
|
1353 | 1352 | if not(self.setNextFile()): |
|
1354 | 1353 | if (startDate!=None) and (endDate!=None): |
|
1355 | 1354 | print "[Reading] No files in range: %s - %s" %(datetime.datetime.combine(startDate,startTime).ctime(), datetime.datetime.combine(endDate,endTime).ctime()) |
|
1356 | 1355 | elif startDate != None: |
|
1357 | 1356 | print "[Reading] No files in range: %s" %(datetime.datetime.combine(startDate,startTime).ctime()) |
|
1358 | 1357 | else: |
|
1359 | 1358 | print "[Reading] No files" |
|
1360 | 1359 | |
|
1361 | 1360 | self.fileIndex = -1 |
|
1362 | 1361 | self.pathList = [] |
|
1363 | 1362 | self.filenameList = [] |
|
1364 | 1363 | return |
|
1365 | 1364 | |
|
1366 | 1365 | # self.getBasicHeader() |
|
1367 | 1366 | |
|
1368 | 1367 | if last_set != None: |
|
1369 | 1368 | self.dataOut.last_block = last_set * self.processingHeaderObj.dataBlocksPerFile + self.basicHeaderObj.dataBlock |
|
1370 | 1369 | return |
|
1371 | 1370 | |
|
1372 | 1371 | def getBasicHeader(self): |
|
1373 | 1372 | |
|
1374 | 1373 | self.dataOut.utctime = self.basicHeaderObj.utc + self.basicHeaderObj.miliSecond/1000. + self.profileIndex * self.radarControllerHeaderObj.ippSeconds |
|
1375 | 1374 | |
|
1376 | 1375 | self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock |
|
1377 | 1376 | |
|
1378 | 1377 | self.dataOut.timeZone = self.basicHeaderObj.timeZone |
|
1379 | 1378 | |
|
1380 | 1379 | self.dataOut.dstFlag = self.basicHeaderObj.dstFlag |
|
1381 | 1380 | |
|
1382 | 1381 | self.dataOut.errorCount = self.basicHeaderObj.errorCount |
|
1383 | 1382 | |
|
1384 | 1383 | self.dataOut.useLocalTime = self.basicHeaderObj.useLocalTime |
|
1385 | 1384 | |
|
1386 | 1385 | self.dataOut.ippSeconds = self.radarControllerHeaderObj.ippSeconds/self.nTxs |
|
1387 | 1386 | |
|
1388 | 1387 | # self.dataOut.nProfiles = self.processingHeaderObj.profilesPerBlock*self.nTxs |
|
1389 | 1388 | |
|
1390 | 1389 | |
|
1391 | 1390 | def getFirstHeader(self): |
|
1392 | 1391 | |
|
1393 | 1392 | raise NotImplementedError |
|
1394 | 1393 | |
|
1395 | 1394 | def getData(self): |
|
1396 | 1395 | |
|
1397 | 1396 | raise NotImplementedError |
|
1398 | 1397 | |
|
1399 | 1398 | def hasNotDataInBuffer(self): |
|
1400 | 1399 | |
|
1401 | 1400 | raise NotImplementedError |
|
1402 | 1401 | |
|
1403 | 1402 | def readBlock(self): |
|
1404 | 1403 | |
|
1405 | 1404 | raise NotImplementedError |
|
1406 | 1405 | |
|
1407 | 1406 | def isEndProcess(self): |
|
1408 | 1407 | |
|
1409 | 1408 | return self.flagNoMoreFiles |
|
1410 | 1409 | |
|
1411 | 1410 | def printReadBlocks(self): |
|
1412 | 1411 | |
|
1413 | 1412 | print "[Reading] Number of read blocks per file %04d" %self.nReadBlocks |
|
1414 | 1413 | |
|
1415 | 1414 | def printTotalBlocks(self): |
|
1416 | 1415 | |
|
1417 | 1416 | print "[Reading] Number of read blocks %04d" %self.nTotalBlocks |
|
1418 | 1417 | |
|
1419 | 1418 | def printNumberOfBlock(self): |
|
1420 | 1419 | 'SPAM!' |
|
1421 | 1420 | |
|
1422 | 1421 | # if self.flagIsNewBlock: |
|
1423 | 1422 | # print "[Reading] Block No. %d/%d -> %s" %(self.nReadBlocks, |
|
1424 | 1423 | # self.processingHeaderObj.dataBlocksPerFile, |
|
1425 | 1424 | # self.dataOut.datatime.ctime()) |
|
1426 | 1425 | |
|
1427 | 1426 | def printInfo(self): |
|
1428 | 1427 | |
|
1429 | 1428 | if self.__printInfo == False: |
|
1430 | 1429 | return |
|
1431 | 1430 | |
|
1432 | 1431 | self.basicHeaderObj.printInfo() |
|
1433 | 1432 | self.systemHeaderObj.printInfo() |
|
1434 | 1433 | self.radarControllerHeaderObj.printInfo() |
|
1435 | 1434 | self.processingHeaderObj.printInfo() |
|
1436 | 1435 | |
|
1437 | 1436 | self.__printInfo = False |
|
1438 | 1437 | |
|
1439 | 1438 | |
|
1440 | 1439 | def run(self, |
|
1441 | 1440 | path=None, |
|
1442 | 1441 | startDate=None, |
|
1443 | 1442 | endDate=None, |
|
1444 | 1443 | startTime=datetime.time(0,0,0), |
|
1445 | 1444 | endTime=datetime.time(23,59,59), |
|
1446 | 1445 | set=None, |
|
1447 | 1446 | expLabel = "", |
|
1448 | 1447 | ext = None, |
|
1449 | 1448 | online = False, |
|
1450 | 1449 | delay = 60, |
|
1451 | 1450 | walk = True, |
|
1452 | 1451 | getblock = False, |
|
1453 | 1452 | nTxs = 1, |
|
1454 | 1453 | realtime=False, |
|
1455 | 1454 | blocksize=None, |
|
1456 | 1455 | blocktime=None, |
|
1457 | 1456 | queue=None, |
|
1458 | 1457 | skip=None, |
|
1459 | 1458 | cursor=None, |
|
1460 | 1459 | warnings=True, |
|
1461 | 1460 | verbose=True, **kwargs): |
|
1462 | 1461 | |
|
1463 | 1462 | if not(self.isConfig): |
|
1464 | 1463 | # self.dataOut = dataOut |
|
1465 | 1464 | self.setup( path=path, |
|
1466 | 1465 | startDate=startDate, |
|
1467 | 1466 | endDate=endDate, |
|
1468 | 1467 | startTime=startTime, |
|
1469 | 1468 | endTime=endTime, |
|
1470 | 1469 | set=set, |
|
1471 | 1470 | expLabel=expLabel, |
|
1472 | 1471 | ext=ext, |
|
1473 | 1472 | online=online, |
|
1474 | 1473 | delay=delay, |
|
1475 | 1474 | walk=walk, |
|
1476 | 1475 | getblock=getblock, |
|
1477 | 1476 | nTxs=nTxs, |
|
1478 | 1477 | realtime=realtime, |
|
1479 | 1478 | blocksize=blocksize, |
|
1480 | 1479 | blocktime=blocktime, |
|
1481 | 1480 | queue=queue, |
|
1482 | 1481 | skip=skip, |
|
1483 | 1482 | cursor=cursor, |
|
1484 | 1483 | warnings=warnings, |
|
1485 | 1484 | verbose=verbose) |
|
1486 | 1485 | self.isConfig = True |
|
1487 | 1486 | |
|
1488 | 1487 | self.getData() |
|
1489 | 1488 | |
|
1490 | 1489 | class JRODataWriter(JRODataIO): |
|
1491 | 1490 | |
|
1492 | 1491 | """ |
|
1493 | 1492 | Esta clase permite escribir datos a archivos procesados (.r o ,pdata). La escritura |
|
1494 | 1493 | de los datos siempre se realiza por bloques. |
|
1495 | 1494 | """ |
|
1496 | 1495 | |
|
1497 | 1496 | blockIndex = 0 |
|
1498 | 1497 | |
|
1499 | 1498 | path = None |
|
1500 | 1499 | |
|
1501 | 1500 | setFile = None |
|
1502 | 1501 | |
|
1503 | 1502 | profilesPerBlock = None |
|
1504 | 1503 | |
|
1505 | 1504 | blocksPerFile = None |
|
1506 | 1505 | |
|
1507 | 1506 | nWriteBlocks = 0 |
|
1508 | 1507 | |
|
1509 | 1508 | fileDate = None |
|
1510 | 1509 | |
|
1511 | 1510 | def __init__(self, dataOut=None): |
|
1512 | 1511 | raise NotImplementedError |
|
1513 | 1512 | |
|
1514 | 1513 | |
|
1515 | 1514 | def hasAllDataInBuffer(self): |
|
1516 | 1515 | raise NotImplementedError |
|
1517 | 1516 | |
|
1518 | 1517 | |
|
1519 | 1518 | def setBlockDimension(self): |
|
1520 | 1519 | raise NotImplementedError |
|
1521 | 1520 | |
|
1522 | 1521 | |
|
1523 | 1522 | def writeBlock(self): |
|
1524 | 1523 | raise NotImplementedError |
|
1525 | 1524 | |
|
1526 | 1525 | |
|
1527 | 1526 | def putData(self): |
|
1528 | 1527 | raise NotImplementedError |
|
1529 | 1528 | |
|
1530 | 1529 | |
|
1531 | 1530 | def getProcessFlags(self): |
|
1532 | 1531 | |
|
1533 | 1532 | processFlags = 0 |
|
1534 | 1533 | |
|
1535 | 1534 | dtype_index = get_dtype_index(self.dtype) |
|
1536 | 1535 | procflag_dtype = get_procflag_dtype(dtype_index) |
|
1537 | 1536 | |
|
1538 | 1537 | processFlags += procflag_dtype |
|
1539 | 1538 | |
|
1540 | 1539 | if self.dataOut.flagDecodeData: |
|
1541 | 1540 | processFlags += PROCFLAG.DECODE_DATA |
|
1542 | 1541 | |
|
1543 | 1542 | if self.dataOut.flagDeflipData: |
|
1544 | 1543 | processFlags += PROCFLAG.DEFLIP_DATA |
|
1545 | 1544 | |
|
1546 | 1545 | if self.dataOut.code is not None: |
|
1547 | 1546 | processFlags += PROCFLAG.DEFINE_PROCESS_CODE |
|
1548 | 1547 | |
|
1549 | 1548 | if self.dataOut.nCohInt > 1: |
|
1550 | 1549 | processFlags += PROCFLAG.COHERENT_INTEGRATION |
|
1551 | 1550 | |
|
1552 | 1551 | if self.dataOut.type == "Spectra": |
|
1553 | 1552 | if self.dataOut.nIncohInt > 1: |
|
1554 | 1553 | processFlags += PROCFLAG.INCOHERENT_INTEGRATION |
|
1555 | 1554 | |
|
1556 | 1555 | if self.dataOut.data_dc is not None: |
|
1557 | 1556 | processFlags += PROCFLAG.SAVE_CHANNELS_DC |
|
1558 | 1557 | |
|
1559 | 1558 | if self.dataOut.flagShiftFFT: |
|
1560 | 1559 | processFlags += PROCFLAG.SHIFT_FFT_DATA |
|
1561 | 1560 | |
|
1562 | 1561 | return processFlags |
|
1563 | 1562 | |
|
1564 | 1563 | def setBasicHeader(self): |
|
1565 | 1564 | |
|
1566 | 1565 | self.basicHeaderObj.size = self.basicHeaderSize #bytes |
|
1567 | 1566 | self.basicHeaderObj.version = self.versionFile |
|
1568 | 1567 | self.basicHeaderObj.dataBlock = self.nTotalBlocks |
|
1569 | 1568 | |
|
1570 | 1569 | utc = numpy.floor(self.dataOut.utctime) |
|
1571 | 1570 | milisecond = (self.dataOut.utctime - utc)* 1000.0 |
|
1572 | 1571 | |
|
1573 | 1572 | self.basicHeaderObj.utc = utc |
|
1574 | 1573 | self.basicHeaderObj.miliSecond = milisecond |
|
1575 | 1574 | self.basicHeaderObj.timeZone = self.dataOut.timeZone |
|
1576 | 1575 | self.basicHeaderObj.dstFlag = self.dataOut.dstFlag |
|
1577 | 1576 | self.basicHeaderObj.errorCount = self.dataOut.errorCount |
|
1578 | 1577 | |
|
1579 | 1578 | def setFirstHeader(self): |
|
1580 | 1579 | """ |
|
1581 | 1580 | Obtiene una copia del First Header |
|
1582 | 1581 | |
|
1583 | 1582 | Affected: |
|
1584 | 1583 | |
|
1585 | 1584 | self.basicHeaderObj |
|
1586 | 1585 | self.systemHeaderObj |
|
1587 | 1586 | self.radarControllerHeaderObj |
|
1588 | 1587 | self.processingHeaderObj self. |
|
1589 | 1588 | |
|
1590 | 1589 | Return: |
|
1591 | 1590 | None |
|
1592 | 1591 | """ |
|
1593 | 1592 | |
|
1594 | 1593 | raise NotImplementedError |
|
1595 | 1594 | |
|
1596 | 1595 | def __writeFirstHeader(self): |
|
1597 | 1596 | """ |
|
1598 | 1597 | Escribe el primer header del file es decir el Basic header y el Long header (SystemHeader, RadarControllerHeader, ProcessingHeader) |
|
1599 | 1598 | |
|
1600 | 1599 | Affected: |
|
1601 | 1600 | __dataType |
|
1602 | 1601 | |
|
1603 | 1602 | Return: |
|
1604 | 1603 | None |
|
1605 | 1604 | """ |
|
1606 | 1605 | |
|
1607 | 1606 | # CALCULAR PARAMETROS |
|
1608 | 1607 | |
|
1609 | 1608 | sizeLongHeader = self.systemHeaderObj.size + self.radarControllerHeaderObj.size + self.processingHeaderObj.size |
|
1610 | 1609 | self.basicHeaderObj.size = self.basicHeaderSize + sizeLongHeader |
|
1611 | 1610 | |
|
1612 | 1611 | self.basicHeaderObj.write(self.fp) |
|
1613 | 1612 | self.systemHeaderObj.write(self.fp) |
|
1614 | 1613 | self.radarControllerHeaderObj.write(self.fp) |
|
1615 | 1614 | self.processingHeaderObj.write(self.fp) |
|
1616 | 1615 | |
|
1617 | 1616 | def __setNewBlock(self): |
|
1618 | 1617 | """ |
|
1619 | 1618 | Si es un nuevo file escribe el First Header caso contrario escribe solo el Basic Header |
|
1620 | 1619 | |
|
1621 | 1620 | Return: |
|
1622 | 1621 | 0 : si no pudo escribir nada |
|
1623 | 1622 | 1 : Si escribio el Basic el First Header |
|
1624 | 1623 | """ |
|
1625 | 1624 | if self.fp == None: |
|
1626 | 1625 | self.setNextFile() |
|
1627 | 1626 | |
|
1628 | 1627 | if self.flagIsNewFile: |
|
1629 | 1628 | return 1 |
|
1630 | 1629 | |
|
1631 | 1630 | if self.blockIndex < self.processingHeaderObj.dataBlocksPerFile: |
|
1632 | 1631 | self.basicHeaderObj.write(self.fp) |
|
1633 | 1632 | return 1 |
|
1634 | 1633 | |
|
1635 | 1634 | if not( self.setNextFile() ): |
|
1636 | 1635 | return 0 |
|
1637 | 1636 | |
|
1638 | 1637 | return 1 |
|
1639 | 1638 | |
|
1640 | 1639 | |
|
1641 | 1640 | def writeNextBlock(self): |
|
1642 | 1641 | """ |
|
1643 | 1642 | Selecciona el bloque siguiente de datos y los escribe en un file |
|
1644 | 1643 | |
|
1645 | 1644 | Return: |
|
1646 | 1645 | 0 : Si no hizo pudo escribir el bloque de datos |
|
1647 | 1646 | 1 : Si no pudo escribir el bloque de datos |
|
1648 | 1647 | """ |
|
1649 | 1648 | if not( self.__setNewBlock() ): |
|
1650 | 1649 | return 0 |
|
1651 | 1650 | |
|
1652 | 1651 | self.writeBlock() |
|
1653 | 1652 | |
|
1654 | 1653 | print "[Writing] Block No. %d/%d" %(self.blockIndex, |
|
1655 | 1654 | self.processingHeaderObj.dataBlocksPerFile) |
|
1656 | 1655 | |
|
1657 | 1656 | return 1 |
|
1658 | 1657 | |
|
1659 | 1658 | def setNextFile(self): |
|
1660 | 1659 | """ |
|
1661 | 1660 | Determina el siguiente file que sera escrito |
|
1662 | 1661 | |
|
1663 | 1662 | Affected: |
|
1664 | 1663 | self.filename |
|
1665 | 1664 | self.subfolder |
|
1666 | 1665 | self.fp |
|
1667 | 1666 | self.setFile |
|
1668 | 1667 | self.flagIsNewFile |
|
1669 | 1668 | |
|
1670 | 1669 | Return: |
|
1671 | 1670 | 0 : Si el archivo no puede ser escrito |
|
1672 | 1671 | 1 : Si el archivo esta listo para ser escrito |
|
1673 | 1672 | """ |
|
1674 | 1673 | ext = self.ext |
|
1675 | 1674 | path = self.path |
|
1676 | 1675 | |
|
1677 | 1676 | if self.fp != None: |
|
1678 | 1677 | self.fp.close() |
|
1679 | 1678 | |
|
1680 | 1679 | timeTuple = time.localtime( self.dataOut.utctime) |
|
1681 | 1680 | subfolder = 'd%4.4d%3.3d' % (timeTuple.tm_year,timeTuple.tm_yday) |
|
1682 | 1681 | |
|
1683 | 1682 | fullpath = os.path.join( path, subfolder ) |
|
1684 | 1683 | setFile = self.setFile |
|
1685 | 1684 | |
|
1686 | 1685 | if not( os.path.exists(fullpath) ): |
|
1687 | 1686 | os.mkdir(fullpath) |
|
1688 | 1687 | setFile = -1 #inicializo mi contador de seteo |
|
1689 | 1688 | else: |
|
1690 | 1689 | filesList = os.listdir( fullpath ) |
|
1691 | 1690 | if len( filesList ) > 0: |
|
1692 | 1691 | filesList = sorted( filesList, key=str.lower ) |
|
1693 | 1692 | filen = filesList[-1] |
|
1694 | 1693 | # el filename debera tener el siguiente formato |
|
1695 | 1694 | # 0 1234 567 89A BCDE (hex) |
|
1696 | 1695 | # x YYYY DDD SSS .ext |
|
1697 | 1696 | if isNumber( filen[8:11] ): |
|
1698 | 1697 | setFile = int( filen[8:11] ) #inicializo mi contador de seteo al seteo del ultimo file |
|
1699 | 1698 | else: |
|
1700 | 1699 | setFile = -1 |
|
1701 | 1700 | else: |
|
1702 | 1701 | setFile = -1 #inicializo mi contador de seteo |
|
1703 | 1702 | |
|
1704 | 1703 | setFile += 1 |
|
1705 | 1704 | |
|
1706 | 1705 | #If this is a new day it resets some values |
|
1707 | 1706 | if self.dataOut.datatime.date() > self.fileDate: |
|
1708 | 1707 | setFile = 0 |
|
1709 | 1708 | self.nTotalBlocks = 0 |
|
1710 | 1709 | |
|
1711 | 1710 | filen = '%s%4.4d%3.3d%3.3d%s' % (self.optchar, timeTuple.tm_year, timeTuple.tm_yday, setFile, ext ) |
|
1712 | 1711 | |
|
1713 | 1712 | filename = os.path.join( path, subfolder, filen ) |
|
1714 | 1713 | |
|
1715 | 1714 | fp = open( filename,'wb' ) |
|
1716 | 1715 | |
|
1717 | 1716 | self.blockIndex = 0 |
|
1718 | 1717 | |
|
1719 | 1718 | #guardando atributos |
|
1720 | 1719 | self.filename = filename |
|
1721 | 1720 | self.subfolder = subfolder |
|
1722 | 1721 | self.fp = fp |
|
1723 | 1722 | self.setFile = setFile |
|
1724 | 1723 | self.flagIsNewFile = 1 |
|
1725 | 1724 | self.fileDate = self.dataOut.datatime.date() |
|
1726 | 1725 | |
|
1727 | 1726 | self.setFirstHeader() |
|
1728 | 1727 | |
|
1729 | 1728 | print '[Writing] Opening file: %s'%self.filename |
|
1730 | 1729 | |
|
1731 | 1730 | self.__writeFirstHeader() |
|
1732 | 1731 | |
|
1733 | 1732 | return 1 |
|
1734 | 1733 | |
|
1735 | 1734 | def setup(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=None, ext=None, datatype=4): |
|
1736 | 1735 | """ |
|
1737 | 1736 | Setea el tipo de formato en la cual sera guardada la data y escribe el First Header |
|
1738 | 1737 | |
|
1739 | 1738 | Inputs: |
|
1740 | 1739 | path : directory where data will be saved |
|
1741 | 1740 | profilesPerBlock : number of profiles per block |
|
1742 | 1741 | set : initial file set |
|
1743 | 1742 | datatype : An integer number that defines data type: |
|
1744 | 1743 | 0 : int8 (1 byte) |
|
1745 | 1744 | 1 : int16 (2 bytes) |
|
1746 | 1745 | 2 : int32 (4 bytes) |
|
1747 | 1746 | 3 : int64 (8 bytes) |
|
1748 | 1747 | 4 : float32 (4 bytes) |
|
1749 | 1748 | 5 : double64 (8 bytes) |
|
1750 | 1749 | |
|
1751 | 1750 | Return: |
|
1752 | 1751 | 0 : Si no realizo un buen seteo |
|
1753 | 1752 | 1 : Si realizo un buen seteo |
|
1754 | 1753 | """ |
|
1755 | 1754 | |
|
1756 | 1755 | if ext == None: |
|
1757 | 1756 | ext = self.ext |
|
1758 | 1757 | |
|
1759 | 1758 | self.ext = ext.lower() |
|
1760 | 1759 | |
|
1761 | 1760 | self.path = path |
|
1762 | 1761 | |
|
1763 | 1762 | if set is None: |
|
1764 | 1763 | self.setFile = -1 |
|
1765 | 1764 | else: |
|
1766 | 1765 | self.setFile = set - 1 |
|
1767 | 1766 | |
|
1768 | 1767 | self.blocksPerFile = blocksPerFile |
|
1769 | 1768 | |
|
1770 | 1769 | self.profilesPerBlock = profilesPerBlock |
|
1771 | 1770 | |
|
1772 | 1771 | self.dataOut = dataOut |
|
1773 | 1772 | self.fileDate = self.dataOut.datatime.date() |
|
1774 | 1773 | #By default |
|
1775 | 1774 | self.dtype = self.dataOut.dtype |
|
1776 | 1775 | |
|
1777 | 1776 | if datatype is not None: |
|
1778 | 1777 | self.dtype = get_numpy_dtype(datatype) |
|
1779 | 1778 | |
|
1780 | 1779 | if not(self.setNextFile()): |
|
1781 | 1780 | print "[Writing] There isn't a next file" |
|
1782 | 1781 | return 0 |
|
1783 | 1782 | |
|
1784 | 1783 | self.setBlockDimension() |
|
1785 | 1784 | |
|
1786 | 1785 | return 1 |
|
1787 | 1786 | |
|
1788 | 1787 | def run(self, dataOut, path, blocksPerFile, profilesPerBlock=64, set=None, ext=None, datatype=4, **kwargs): |
|
1789 | 1788 | |
|
1790 | 1789 | if not(self.isConfig): |
|
1791 | 1790 | |
|
1792 | 1791 | self.setup(dataOut, path, blocksPerFile, profilesPerBlock=profilesPerBlock, set=set, ext=ext, datatype=datatype, **kwargs) |
|
1793 | 1792 | self.isConfig = True |
|
1794 | 1793 | |
|
1795 | 1794 | self.putData() |
|
1 | NO CONTENT: modified file, binary diff hidden |
@@ -1,1193 +1,1154 | |||
|
1 | 1 | import os, sys |
|
2 | 2 | import glob |
|
3 | 3 | import fnmatch |
|
4 | 4 | import datetime |
|
5 | 5 | import time |
|
6 | 6 | import re |
|
7 | 7 | import h5py |
|
8 | 8 | import numpy |
|
9 | 9 | import matplotlib.pyplot as plt |
|
10 | 10 | |
|
11 | 11 | import pylab as plb |
|
12 | 12 | from scipy.optimize import curve_fit |
|
13 | from scipy import asarray as ar,exp | |
|
13 | from scipy import asarray as ar, exp | |
|
14 | 14 | from scipy import stats |
|
15 | 15 | |
|
16 | from duplicity.path import Path | |
|
17 | 16 | from numpy.ma.core import getdata |
|
18 | 17 | |
|
19 | 18 | SPEED_OF_LIGHT = 299792458 |
|
20 | 19 | SPEED_OF_LIGHT = 3e8 |
|
21 | 20 | |
|
22 | 21 | try: |
|
23 | 22 | from gevent import sleep |
|
24 | 23 | except: |
|
25 | 24 | from time import sleep |
|
26 | 25 | |
|
27 | 26 | from schainpy.model.data.jrodata import Spectra |
|
28 | 27 | #from schainpy.model.data.BLTRheaderIO import FileHeader, RecordHeader |
|
29 | 28 | from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation |
|
30 | 29 | #from schainpy.model.io.jroIO_bltr import BLTRReader |
|
31 | 30 | from numpy import imag, shape, NaN |
|
32 | 31 | |
|
33 | 32 | from jroIO_base import JRODataReader |
|
34 | 33 | |
|
35 | 34 | |
|
36 | 35 | class Header(object): |
|
37 | 36 | |
|
38 | 37 | def __init__(self): |
|
39 | 38 | raise NotImplementedError |
|
40 | 39 | |
|
41 | 40 | |
|
42 | 41 | def read(self): |
|
43 | 42 | |
|
44 | 43 | raise NotImplementedError |
|
45 | 44 | |
|
46 | 45 | def write(self): |
|
47 | 46 | |
|
48 | 47 | raise NotImplementedError |
|
49 | 48 | |
|
50 | 49 | def printInfo(self): |
|
51 | 50 | |
|
52 | 51 | message = "#"*50 + "\n" |
|
53 | 52 | message += self.__class__.__name__.upper() + "\n" |
|
54 | 53 | message += "#"*50 + "\n" |
|
55 | 54 | |
|
56 | 55 | keyList = self.__dict__.keys() |
|
57 | 56 | keyList.sort() |
|
58 | 57 | |
|
59 | 58 | for key in keyList: |
|
60 | 59 | message += "%s = %s" %(key, self.__dict__[key]) + "\n" |
|
61 | 60 | |
|
62 | 61 | if "size" not in keyList: |
|
63 | 62 | attr = getattr(self, "size") |
|
64 | 63 | |
|
65 | 64 | if attr: |
|
66 | 65 | message += "%s = %s" %("size", attr) + "\n" |
|
67 | 66 | |
|
68 | 67 | #print message |
|
69 | 68 | |
|
70 | 69 | |
|
71 | 70 | |
|
72 | 71 | |
|
73 | 72 | |
|
74 | 73 | FILE_STRUCTURE = numpy.dtype([ #HEADER 48bytes |
|
75 | 74 | ('FileMgcNumber','<u4'), #0x23020100 |
|
76 | 75 | ('nFDTdataRecors','<u4'), #No Of FDT data records in this file (0 or more) |
|
77 | 76 | ('OffsetStartHeader','<u4'), |
|
78 | 77 | ('RadarUnitId','<u4'), |
|
79 | 78 | ('SiteName',numpy.str_,32), #Null terminated |
|
80 | 79 | ]) |
|
81 | 80 | |
|
82 | 81 | class FileHeaderBLTR(Header): |
|
83 | 82 | |
|
84 | 83 | def __init__(self): |
|
85 | 84 | |
|
86 | 85 | self.FileMgcNumber= 0 #0x23020100 |
|
87 | 86 | self.nFDTdataRecors=0 #No Of FDT data records in this file (0 or more) |
|
88 | 87 | self.RadarUnitId= 0 |
|
89 | 88 | self.OffsetStartHeader=0 |
|
90 | 89 | self.SiteName= "" |
|
91 | 90 | self.size = 48 |
|
92 | 91 | |
|
93 | 92 | def FHread(self, fp): |
|
94 | 93 | #try: |
|
95 | 94 | startFp = open(fp,"rb") |
|
96 | 95 | |
|
97 | 96 | header = numpy.fromfile(startFp, FILE_STRUCTURE,1) |
|
98 | 97 | |
|
99 | 98 | print ' ' |
|
100 | 99 | print 'puntero file header', startFp.tell() |
|
101 | 100 | print ' ' |
|
102 | 101 | |
|
103 | 102 | |
|
104 | 103 | ''' numpy.fromfile(file, dtype, count, sep='') |
|
105 | 104 | file : file or str |
|
106 | 105 | Open file object or filename. |
|
107 | 106 | |
|
108 | 107 | dtype : data-type |
|
109 | 108 | Data type of the returned array. For binary files, it is used to determine |
|
110 | 109 | the size and byte-order of the items in the file. |
|
111 | 110 | |
|
112 | 111 | count : int |
|
113 | 112 | Number of items to read. -1 means all items (i.e., the complete file). |
|
114 | 113 | |
|
115 | 114 | sep : str |
|
116 | 115 | Separator between items if file is a text file. Empty ("") separator means |
|
117 | 116 | the file should be treated as binary. Spaces (" ") in the separator match zero |
|
118 | 117 | or more whitespace characters. A separator consisting only of spaces must match |
|
119 | 118 | at least one whitespace. |
|
120 | 119 | |
|
121 | 120 | ''' |
|
122 | 121 | |
|
123 | 122 | |
|
124 | 123 | |
|
125 | 124 | self.FileMgcNumber= hex(header['FileMgcNumber'][0]) |
|
126 | 125 | self.nFDTdataRecors=int(header['nFDTdataRecors'][0]) #No Of FDT data records in this file (0 or more) |
|
127 | 126 | self.RadarUnitId= int(header['RadarUnitId'][0]) |
|
128 | 127 | self.OffsetStartHeader= int(header['OffsetStartHeader'][0]) |
|
129 | 128 | self.SiteName= str(header['SiteName'][0]) |
|
130 | 129 | |
|
131 | 130 | #print 'Numero de bloques', self.nFDTdataRecors |
|
132 | 131 | |
|
133 | 132 | |
|
134 | 133 | if self.size <48: |
|
135 | 134 | return 0 |
|
136 | 135 | |
|
137 | 136 | return 1 |
|
138 | 137 | |
|
139 | 138 | |
|
140 | 139 | def write(self, fp): |
|
141 | 140 | |
|
142 | 141 | headerTuple = (self.FileMgcNumber, |
|
143 | 142 | self.nFDTdataRecors, |
|
144 | 143 | self.RadarUnitId, |
|
145 | 144 | self.SiteName, |
|
146 | 145 | self.size) |
|
147 | 146 | |
|
148 | 147 | |
|
149 | 148 | header = numpy.array(headerTuple, FILE_STRUCTURE) |
|
150 | 149 | # numpy.array(object, dtype=None, copy=True, order=None, subok=False, ndmin=0) |
|
151 | 150 | header.tofile(fp) |
|
152 | 151 | ''' ndarray.tofile(fid, sep, format) Write array to a file as text or binary (default). |
|
153 | 152 | |
|
154 | 153 | fid : file or str |
|
155 | 154 | An open file object, or a string containing a filename. |
|
156 | 155 | |
|
157 | 156 | sep : str |
|
158 | 157 | Separator between array items for text output. If "" (empty), a binary file is written, |
|
159 | 158 | equivalent to file.write(a.tobytes()). |
|
160 | 159 | |
|
161 | 160 | format : str |
|
162 | 161 | Format string for text file output. Each entry in the array is formatted to text by |
|
163 | 162 | first converting it to the closest Python type, and then using "format" % item. |
|
164 | 163 | |
|
165 | 164 | ''' |
|
166 | 165 | |
|
167 | 166 | return 1 |
|
168 | 167 | |
|
169 | 168 | |
|
170 | 169 | |
|
171 | 170 | |
|
172 | 171 | |
|
173 | 172 | RECORD_STRUCTURE = numpy.dtype([ #RECORD HEADER 180+20N bytes |
|
174 | 173 | ('RecMgcNumber','<u4'), #0x23030001 |
|
175 | 174 | ('RecCounter','<u4'), #Record counter(0,1, ...) |
|
176 | 175 | ('Off2StartNxtRec','<u4'), #Offset to start of next record form start of this record |
|
177 | 176 | ('Off2StartData','<u4'), #Offset to start of data from start of this record |
|
178 | 177 | ('nUtime','<i4'), #Epoch time stamp of start of acquisition (seconds) |
|
179 | 178 | ('nMilisec','<u4'), #Millisecond component of time stamp (0,...,999) |
|
180 | 179 | ('ExpTagName',numpy.str_,32), #Experiment tag name (null terminated) |
|
181 | 180 | ('ExpComment',numpy.str_,32), #Experiment comment (null terminated) |
|
182 | 181 | ('SiteLatDegrees','<f4'), #Site latitude (from GPS) in degrees (positive implies North) |
|
183 | 182 | ('SiteLongDegrees','<f4'), #Site longitude (from GPS) in degrees (positive implies East) |
|
184 | 183 | ('RTCgpsStatus','<u4'), #RTC GPS engine status (0=SEEK, 1=LOCK, 2=NOT FITTED, 3=UNAVAILABLE) |
|
185 | 184 | ('TransmitFrec','<u4'), #Transmit frequency (Hz) |
|
186 | 185 | ('ReceiveFrec','<u4'), #Receive frequency |
|
187 | 186 | ('FirstOsciFrec','<u4'), #First local oscillator frequency (Hz) |
|
188 | 187 | ('Polarisation','<u4'), #(0="O", 1="E", 2="linear 1", 3="linear2") |
|
189 | 188 | ('ReceiverFiltSett','<u4'), #Receiver filter settings (0,1,2,3) |
|
190 | 189 | ('nModesInUse','<u4'), #Number of modes in use (1 or 2) |
|
191 | 190 | ('DualModeIndex','<u4'), #Dual Mode index number for these data (0 or 1) |
|
192 | 191 | ('DualModeRange','<u4'), #Dual Mode range correction for these data (m) |
|
193 | 192 | ('nDigChannels','<u4'), #Number of digital channels acquired (2*N) |
|
194 | 193 | ('SampResolution','<u4'), #Sampling resolution (meters) |
|
195 | 194 | ('nHeights','<u4'), #Number of range gates sampled |
|
196 | 195 | ('StartRangeSamp','<u4'), #Start range of sampling (meters) |
|
197 | 196 | ('PRFhz','<u4'), #PRF (Hz) |
|
198 | 197 | ('nCohInt','<u4'), #Integrations |
|
199 | 198 | ('nProfiles','<u4'), #Number of data points transformed |
|
200 | 199 | ('nChannels','<u4'), #Number of receive beams stored in file (1 or N) |
|
201 | 200 | ('nIncohInt','<u4'), #Number of spectral averages |
|
202 | 201 | ('FFTwindowingInd','<u4'), #FFT windowing index (0 = no window) |
|
203 | 202 | ('BeamAngleAzim','<f4'), #Beam steer angle (azimuth) in degrees (clockwise from true North) |
|
204 | 203 | ('BeamAngleZen','<f4'), #Beam steer angle (zenith) in degrees (0=> vertical) |
|
205 | 204 | ('AntennaCoord0','<f4'), #Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs |
|
206 | 205 | ('AntennaAngl0','<f4'), #Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs |
|
207 | 206 | ('AntennaCoord1','<f4'), #Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs |
|
208 | 207 | ('AntennaAngl1','<f4'), #Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs |
|
209 | 208 | ('AntennaCoord2','<f4'), #Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs |
|
210 | 209 | ('AntennaAngl2','<f4'), #Antenna coordinates (Range(meters), Bearing(degrees)) - N pairs |
|
211 | 210 | ('RecPhaseCalibr0','<f4'), #Receiver phase calibration (degrees) - N values |
|
212 | 211 | ('RecPhaseCalibr1','<f4'), #Receiver phase calibration (degrees) - N values |
|
213 | 212 | ('RecPhaseCalibr2','<f4'), #Receiver phase calibration (degrees) - N values |
|
214 | 213 | ('RecAmpCalibr0','<f4'), #Receiver amplitude calibration (ratio relative to receiver one) - N values |
|
215 | 214 | ('RecAmpCalibr1','<f4'), #Receiver amplitude calibration (ratio relative to receiver one) - N values |
|
216 | 215 | ('RecAmpCalibr2','<f4'), #Receiver amplitude calibration (ratio relative to receiver one) - N values |
|
217 | 216 | ('ReceiverGaindB0','<i4'), #Receiver gains in dB - N values |
|
218 | 217 | ('ReceiverGaindB1','<i4'), #Receiver gains in dB - N values |
|
219 | 218 | ('ReceiverGaindB2','<i4'), #Receiver gains in dB - N values |
|
220 | 219 | ]) |
|
221 | 220 | |
|
222 | 221 | |
|
223 | 222 | class RecordHeaderBLTR(Header): |
|
224 | 223 | |
|
225 | 224 | def __init__(self, RecMgcNumber=None, RecCounter= 0, Off2StartNxtRec= 811248, |
|
226 | 225 | nUtime= 0, nMilisec= 0, ExpTagName= None, |
|
227 | 226 | ExpComment=None, SiteLatDegrees=0, SiteLongDegrees= 0, |
|
228 | 227 | RTCgpsStatus= 0, TransmitFrec= 0, ReceiveFrec= 0, |
|
229 | 228 | FirstOsciFrec= 0, Polarisation= 0, ReceiverFiltSett= 0, |
|
230 | 229 | nModesInUse= 0, DualModeIndex= 0, DualModeRange= 0, |
|
231 | 230 | nDigChannels= 0, SampResolution= 0, nHeights= 0, |
|
232 | 231 | StartRangeSamp= 0, PRFhz= 0, nCohInt= 0, |
|
233 | 232 | nProfiles= 0, nChannels= 0, nIncohInt= 0, |
|
234 | 233 | FFTwindowingInd= 0, BeamAngleAzim= 0, BeamAngleZen= 0, |
|
235 | 234 | AntennaCoord0= 0, AntennaCoord1= 0, AntennaCoord2= 0, |
|
236 | 235 | RecPhaseCalibr0= 0, RecPhaseCalibr1= 0, RecPhaseCalibr2= 0, |
|
237 | 236 | RecAmpCalibr0= 0, RecAmpCalibr1= 0, RecAmpCalibr2= 0, |
|
238 | 237 | AntennaAngl0=0, AntennaAngl1=0, AntennaAngl2=0, |
|
239 | 238 | ReceiverGaindB0= 0, ReceiverGaindB1= 0, ReceiverGaindB2= 0, Off2StartData=0, OffsetStartHeader=0): |
|
240 | 239 | |
|
241 | 240 | self.RecMgcNumber = RecMgcNumber #0x23030001 |
|
242 | 241 | self.RecCounter = RecCounter |
|
243 | 242 | self.Off2StartNxtRec = Off2StartNxtRec |
|
244 | 243 | self.Off2StartData = Off2StartData |
|
245 | 244 | self.nUtime = nUtime |
|
246 | 245 | self.nMilisec = nMilisec |
|
247 | 246 | self.ExpTagName = ExpTagName |
|
248 | 247 | self.ExpComment = ExpComment |
|
249 | 248 | self.SiteLatDegrees = SiteLatDegrees |
|
250 | 249 | self.SiteLongDegrees = SiteLongDegrees |
|
251 | 250 | self.RTCgpsStatus = RTCgpsStatus |
|
252 | 251 | self.TransmitFrec = TransmitFrec |
|
253 | 252 | self.ReceiveFrec = ReceiveFrec |
|
254 | 253 | self.FirstOsciFrec = FirstOsciFrec |
|
255 | 254 | self.Polarisation = Polarisation |
|
256 | 255 | self.ReceiverFiltSett = ReceiverFiltSett |
|
257 | 256 | self.nModesInUse = nModesInUse |
|
258 | 257 | self.DualModeIndex = DualModeIndex |
|
259 | 258 | self.DualModeRange = DualModeRange |
|
260 | 259 | self.nDigChannels = nDigChannels |
|
261 | 260 | self.SampResolution = SampResolution |
|
262 | 261 | self.nHeights = nHeights |
|
263 | 262 | self.StartRangeSamp = StartRangeSamp |
|
264 | 263 | self.PRFhz = PRFhz |
|
265 | 264 | self.nCohInt = nCohInt |
|
266 | 265 | self.nProfiles = nProfiles |
|
267 | 266 | self.nChannels = nChannels |
|
268 | 267 | self.nIncohInt = nIncohInt |
|
269 | 268 | self.FFTwindowingInd = FFTwindowingInd |
|
270 | 269 | self.BeamAngleAzim = BeamAngleAzim |
|
271 | 270 | self.BeamAngleZen = BeamAngleZen |
|
272 | 271 | self.AntennaCoord0 = AntennaCoord0 |
|
273 | 272 | self.AntennaAngl0 = AntennaAngl0 |
|
274 | 273 | self.AntennaAngl1 = AntennaAngl1 |
|
275 | 274 | self.AntennaAngl2 = AntennaAngl2 |
|
276 | 275 | self.AntennaCoord1 = AntennaCoord1 |
|
277 | 276 | self.AntennaCoord2 = AntennaCoord2 |
|
278 | 277 | self.RecPhaseCalibr0 = RecPhaseCalibr0 |
|
279 | 278 | self.RecPhaseCalibr1 = RecPhaseCalibr1 |
|
280 | 279 | self.RecPhaseCalibr2 = RecPhaseCalibr2 |
|
281 | 280 | self.RecAmpCalibr0 = RecAmpCalibr0 |
|
282 | 281 | self.RecAmpCalibr1 = RecAmpCalibr1 |
|
283 | 282 | self.RecAmpCalibr2 = RecAmpCalibr2 |
|
284 | 283 | self.ReceiverGaindB0 = ReceiverGaindB0 |
|
285 | 284 | self.ReceiverGaindB1 = ReceiverGaindB1 |
|
286 | 285 | self.ReceiverGaindB2 = ReceiverGaindB2 |
|
287 | 286 | self.OffsetStartHeader = 48 |
|
288 | 287 | |
|
289 | 288 | |
|
290 | 289 | |
|
291 | 290 | def RHread(self, fp): |
|
292 | 291 | #print fp |
|
293 | 292 | #startFp = open('/home/erick/Documents/Data/huancayo.20161019.22.fdt',"rb") #The method tell() returns the current position of the file read/write pointer within the file. |
|
294 | 293 | startFp = open(fp,"rb") #The method tell() returns the current position of the file read/write pointer within the file. |
|
295 | 294 | #RecCounter=0 |
|
296 | 295 | #Off2StartNxtRec=811248 |
|
297 | 296 | OffRHeader= self.OffsetStartHeader + self.RecCounter*self.Off2StartNxtRec |
|
298 | 297 | print ' ' |
|
299 | 298 | print 'puntero Record Header', startFp.tell() |
|
300 | 299 | print ' ' |
|
301 | 300 | |
|
302 | 301 | |
|
303 | 302 | startFp.seek(OffRHeader, os.SEEK_SET) |
|
304 | 303 | |
|
305 | 304 | print ' ' |
|
306 | 305 | print 'puntero Record Header con seek', startFp.tell() |
|
307 | 306 | print ' ' |
|
308 | 307 | |
|
309 | 308 | #print 'Posicion del bloque: ',OffRHeader |
|
310 | 309 | |
|
311 | 310 | header = numpy.fromfile(startFp,RECORD_STRUCTURE,1) |
|
312 | 311 | |
|
313 | 312 | print ' ' |
|
314 | 313 | print 'puntero Record Header con seek', startFp.tell() |
|
315 | 314 | print ' ' |
|
316 | 315 | |
|
317 | 316 | print ' ' |
|
318 | 317 | # |
|
319 | 318 | #print 'puntero Record Header despues de seek', header.tell() |
|
320 | 319 | print ' ' |
|
321 | 320 | |
|
322 | 321 | self.RecMgcNumber = hex(header['RecMgcNumber'][0]) #0x23030001 |
|
323 | 322 | self.RecCounter = int(header['RecCounter'][0]) |
|
324 | 323 | self.Off2StartNxtRec = int(header['Off2StartNxtRec'][0]) |
|
325 | 324 | self.Off2StartData = int(header['Off2StartData'][0]) |
|
326 | 325 | self.nUtime = header['nUtime'][0] |
|
327 | 326 | self.nMilisec = header['nMilisec'][0] |
|
328 | 327 | self.ExpTagName = str(header['ExpTagName'][0]) |
|
329 | 328 | self.ExpComment = str(header['ExpComment'][0]) |
|
330 | 329 | self.SiteLatDegrees = header['SiteLatDegrees'][0] |
|
331 | 330 | self.SiteLongDegrees = header['SiteLongDegrees'][0] |
|
332 | 331 | self.RTCgpsStatus = header['RTCgpsStatus'][0] |
|
333 | 332 | self.TransmitFrec = header['TransmitFrec'][0] |
|
334 | 333 | self.ReceiveFrec = header['ReceiveFrec'][0] |
|
335 | 334 | self.FirstOsciFrec = header['FirstOsciFrec'][0] |
|
336 | 335 | self.Polarisation = header['Polarisation'][0] |
|
337 | 336 | self.ReceiverFiltSett = header['ReceiverFiltSett'][0] |
|
338 | 337 | self.nModesInUse = header['nModesInUse'][0] |
|
339 | 338 | self.DualModeIndex = header['DualModeIndex'][0] |
|
340 | 339 | self.DualModeRange = header['DualModeRange'][0] |
|
341 | 340 | self.nDigChannels = header['nDigChannels'][0] |
|
342 | 341 | self.SampResolution = header['SampResolution'][0] |
|
343 | 342 | self.nHeights = header['nHeights'][0] |
|
344 | 343 | self.StartRangeSamp = header['StartRangeSamp'][0] |
|
345 | 344 | self.PRFhz = header['PRFhz'][0] |
|
346 | 345 | self.nCohInt = header['nCohInt'][0] |
|
347 | 346 | self.nProfiles = header['nProfiles'][0] |
|
348 | 347 | self.nChannels = header['nChannels'][0] |
|
349 | 348 | self.nIncohInt = header['nIncohInt'][0] |
|
350 | 349 | self.FFTwindowingInd = header['FFTwindowingInd'][0] |
|
351 | 350 | self.BeamAngleAzim = header['BeamAngleAzim'][0] |
|
352 | 351 | self.BeamAngleZen = header['BeamAngleZen'][0] |
|
353 | 352 | self.AntennaCoord0 = header['AntennaCoord0'][0] |
|
354 | 353 | self.AntennaAngl0 = header['AntennaAngl0'][0] |
|
355 | 354 | self.AntennaCoord1 = header['AntennaCoord1'][0] |
|
356 | 355 | self.AntennaAngl1 = header['AntennaAngl1'][0] |
|
357 | 356 | self.AntennaCoord2 = header['AntennaCoord2'][0] |
|
358 | 357 | self.AntennaAngl2 = header['AntennaAngl2'][0] |
|
359 | 358 | self.RecPhaseCalibr0 = header['RecPhaseCalibr0'][0] |
|
360 | 359 | self.RecPhaseCalibr1 = header['RecPhaseCalibr1'][0] |
|
361 | 360 | self.RecPhaseCalibr2 = header['RecPhaseCalibr2'][0] |
|
362 | 361 | self.RecAmpCalibr0 = header['RecAmpCalibr0'][0] |
|
363 | 362 | self.RecAmpCalibr1 = header['RecAmpCalibr1'][0] |
|
364 | 363 | self.RecAmpCalibr2 = header['RecAmpCalibr2'][0] |
|
365 | 364 | self.ReceiverGaindB0 = header['ReceiverGaindB0'][0] |
|
366 | 365 | self.ReceiverGaindB1 = header['ReceiverGaindB1'][0] |
|
367 | 366 | self.ReceiverGaindB2 = header['ReceiverGaindB2'][0] |
|
368 | 367 | |
|
369 | 368 | self.ipp= 0.5*(SPEED_OF_LIGHT/self.PRFhz) |
|
370 | 369 | |
|
371 | 370 | self.RHsize = 180+20*self.nChannels |
|
372 | 371 | self.Datasize= self.nProfiles*self.nChannels*self.nHeights*2*4 |
|
373 | 372 | #print 'Datasize',self.Datasize |
|
374 | 373 | endFp = self.OffsetStartHeader + self.RecCounter*self.Off2StartNxtRec |
|
375 | 374 | |
|
376 | 375 | print '==============================================' |
|
377 | 376 | print 'RecMgcNumber ',self.RecMgcNumber |
|
378 | 377 | print 'RecCounter ',self.RecCounter |
|
379 | 378 | print 'Off2StartNxtRec ',self.Off2StartNxtRec |
|
380 | 379 | print 'Off2StartData ',self.Off2StartData |
|
381 | 380 | print 'Range Resolution ',self.SampResolution |
|
382 | 381 | print 'First Height ',self.StartRangeSamp |
|
383 | 382 | print 'PRF (Hz) ',self.PRFhz |
|
384 | 383 | print 'Heights (K) ',self.nHeights |
|
385 | 384 | print 'Channels (N) ',self.nChannels |
|
386 | 385 | print 'Profiles (J) ',self.nProfiles |
|
387 | 386 | print 'iCoh ',self.nCohInt |
|
388 | 387 | print 'iInCoh ',self.nIncohInt |
|
389 | 388 | print 'BeamAngleAzim ',self.BeamAngleAzim |
|
390 | 389 | print 'BeamAngleZen ',self.BeamAngleZen |
|
391 | 390 | |
|
392 | 391 | #print 'ModoEnUso ',self.DualModeIndex |
|
393 | 392 | #print 'UtcTime ',self.nUtime |
|
394 | 393 | #print 'MiliSec ',self.nMilisec |
|
395 | 394 | #print 'Exp TagName ',self.ExpTagName |
|
396 | 395 | #print 'Exp Comment ',self.ExpComment |
|
397 | 396 | #print 'FFT Window Index ',self.FFTwindowingInd |
|
398 | 397 | #print 'N Dig. Channels ',self.nDigChannels |
|
399 | 398 | print 'Size de bloque ',self.RHsize |
|
400 | 399 | print 'DataSize ',self.Datasize |
|
401 | 400 | print 'BeamAngleAzim ',self.BeamAngleAzim |
|
402 | 401 | #print 'AntennaCoord0 ',self.AntennaCoord0 |
|
403 | 402 | #print 'AntennaAngl0 ',self.AntennaAngl0 |
|
404 | 403 | #print 'AntennaCoord1 ',self.AntennaCoord1 |
|
405 | 404 | #print 'AntennaAngl1 ',self.AntennaAngl1 |
|
406 | 405 | #print 'AntennaCoord2 ',self.AntennaCoord2 |
|
407 | 406 | #print 'AntennaAngl2 ',self.AntennaAngl2 |
|
408 | 407 | print 'RecPhaseCalibr0 ',self.RecPhaseCalibr0 |
|
409 | 408 | print 'RecPhaseCalibr1 ',self.RecPhaseCalibr1 |
|
410 | 409 | print 'RecPhaseCalibr2 ',self.RecPhaseCalibr2 |
|
411 | 410 | print 'RecAmpCalibr0 ',self.RecAmpCalibr0 |
|
412 | 411 | print 'RecAmpCalibr1 ',self.RecAmpCalibr1 |
|
413 | 412 | print 'RecAmpCalibr2 ',self.RecAmpCalibr2 |
|
414 | 413 | print 'ReceiverGaindB0 ',self.ReceiverGaindB0 |
|
415 | 414 | print 'ReceiverGaindB1 ',self.ReceiverGaindB1 |
|
416 | 415 | print 'ReceiverGaindB2 ',self.ReceiverGaindB2 |
|
417 | 416 | print '==============================================' |
|
418 | 417 | |
|
419 | 418 | if OffRHeader > endFp: |
|
420 | 419 | sys.stderr.write("Warning %s: Size value read from System Header is lower than it has to be\n" %fp) |
|
421 | 420 | return 0 |
|
422 | 421 | |
|
423 | 422 | if OffRHeader < endFp: |
|
424 | 423 | sys.stderr.write("Warning %s: Size value read from System Header size is greater than it has to be\n" %fp) |
|
425 | 424 | return 0 |
|
426 | 425 | |
|
427 | 426 | return 1 |
|
428 | 427 | |
|
429 | 428 | |
|
430 | class BLTRReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODataReader): | |
|
429 | class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODataReader): | |
|
431 | 430 | |
|
432 | 431 | path = None |
|
433 | 432 | startDate = None |
|
434 | 433 | endDate = None |
|
435 | 434 | startTime = None |
|
436 | 435 | endTime = None |
|
437 | 436 | walk = None |
|
438 | 437 | isConfig = False |
|
439 | 438 | |
|
440 | 439 | |
|
441 | 440 | fileList= None |
|
442 | 441 | |
|
443 | 442 | #metadata |
|
444 | 443 | TimeZone= None |
|
445 | 444 | Interval= None |
|
446 | 445 | heightList= None |
|
447 | 446 | |
|
448 | 447 | #data |
|
449 | 448 | data= None |
|
450 | 449 | utctime= None |
|
451 | 450 | |
|
452 | 451 | |
|
453 | 452 | |
|
454 | 453 | def __init__(self, **kwargs): |
|
455 | 454 | |
|
456 | 455 | #Eliminar de la base la herencia |
|
457 | 456 | ProcessingUnit.__init__(self, **kwargs) |
|
458 | 457 | |
|
459 |
|
|
|
458 | #self.isConfig = False | |
|
460 | 459 | |
|
461 | 460 | #self.pts2read_SelfSpectra = 0 |
|
462 | 461 | #self.pts2read_CrossSpectra = 0 |
|
463 | 462 | #self.pts2read_DCchannels = 0 |
|
464 | 463 | #self.datablock = None |
|
465 | 464 | self.utc = None |
|
466 | 465 | self.ext = ".fdt" |
|
467 | 466 | self.optchar = "P" |
|
468 | 467 | self.fpFile=None |
|
469 | 468 | self.fp = None |
|
470 | 469 | self.BlockCounter=0 |
|
471 | 470 | self.dtype = None |
|
472 | 471 | self.fileSizeByHeader = None |
|
473 | 472 | self.filenameList = [] |
|
474 | 473 | self.fileSelector = 0 |
|
475 | 474 | self.Off2StartNxtRec=0 |
|
476 | 475 | self.RecCounter=0 |
|
477 | 476 | self.flagNoMoreFiles = 0 |
|
478 | 477 | self.data_spc=None |
|
479 | 478 | self.data_cspc=None |
|
480 | 479 | self.data_output=None |
|
481 | 480 | self.path = None |
|
482 | 481 | self.OffsetStartHeader=0 |
|
483 | 482 | self.Off2StartData=0 |
|
484 | 483 | self.ipp = 0 |
|
485 | 484 | self.nFDTdataRecors=0 |
|
486 | 485 | self.blocksize = 0 |
|
487 | 486 | self.dataOut = Spectra() |
|
488 | 487 | self.profileIndex = 1 #Always |
|
489 | 488 | self.dataOut.flagNoData=False |
|
490 | 489 | self.dataOut.nRdPairs = 0 |
|
491 | 490 | self.dataOut.pairsList = [] |
|
492 | 491 | self.dataOut.data_spc=None |
|
493 | 492 | self.dataOut.noise=[] |
|
494 | 493 | self.dataOut.velocityX=[] |
|
495 | 494 | self.dataOut.velocityY=[] |
|
496 | 495 | self.dataOut.velocityV=[] |
|
497 | 496 | |
|
498 | 497 | |
|
499 | 498 | |
|
500 | 499 | def Files2Read(self, fp): |
|
501 | 500 | ''' |
|
502 | 501 | Function that indicates the number of .fdt files that exist in the folder to be read. |
|
503 | 502 | It also creates an organized list with the names of the files to read. |
|
504 | 503 | ''' |
|
505 | 504 | #self.__checkPath() |
|
506 | 505 | |
|
507 | 506 | ListaData=os.listdir(fp) #Gets the list of files within the fp address |
|
508 | 507 | ListaData=sorted(ListaData) #Sort the list of files from least to largest by names |
|
509 | 508 | nFiles=0 #File Counter |
|
510 | 509 | FileList=[] #A list is created that will contain the .fdt files |
|
511 | 510 | for IndexFile in ListaData : |
|
512 | 511 | if '.fdt' in IndexFile: |
|
513 | 512 | FileList.append(IndexFile) |
|
514 | 513 | nFiles+=1 |
|
515 | 514 | |
|
516 | 515 | #print 'Files2Read' |
|
517 | 516 | #print 'Existen '+str(nFiles)+' archivos .fdt' |
|
518 | 517 | |
|
519 | 518 | self.filenameList=FileList #List of files from least to largest by names |
|
520 | 519 | |
|
521 | 520 | |
|
522 | 521 | def run(self, **kwargs): |
|
523 | 522 | ''' |
|
524 | 523 | This method will be the one that will initiate the data entry, will be called constantly. |
|
525 | 524 | You should first verify that your Setup () is set up and then continue to acquire |
|
526 | 525 | the data to be processed with getData (). |
|
527 | 526 | ''' |
|
528 | 527 | if not self.isConfig: |
|
529 | 528 | self.setup(**kwargs) |
|
530 | 529 | self.isConfig = True |
|
531 | 530 | |
|
532 | 531 | self.getData() |
|
533 | 532 | #print 'running' |
|
534 | 533 | |
|
535 | 534 | |
|
536 | 535 | def setup(self, path=None, |
|
537 | 536 | startDate=None, |
|
538 | 537 | endDate=None, |
|
539 | 538 | startTime=None, |
|
540 | 539 | endTime=None, |
|
541 | 540 | walk=True, |
|
542 | 541 | timezone='utc', |
|
543 | 542 | code = None, |
|
544 | 543 | online=False, |
|
545 | 544 | ReadMode=None, |
|
546 | 545 | **kwargs): |
|
547 | 546 | |
|
548 | 547 | self.isConfig = True |
|
549 | 548 | |
|
550 | 549 | self.path=path |
|
551 | 550 | self.startDate=startDate |
|
552 | 551 | self.endDate=endDate |
|
553 | 552 | self.startTime=startTime |
|
554 | 553 | self.endTime=endTime |
|
555 | 554 | self.walk=walk |
|
556 | 555 | self.ReadMode=int(ReadMode) |
|
557 | 556 | |
|
558 | 557 | pass |
|
559 | 558 | |
|
560 | 559 | |
|
561 | 560 | def getData(self): |
|
562 | 561 | ''' |
|
563 | 562 | Before starting this function, you should check that there is still an unread file, |
|
564 | 563 | If there are still blocks to read or if the data block is empty. |
|
565 | 564 | |
|
566 | 565 | You should call the file "read". |
|
567 | 566 | |
|
568 | 567 | ''' |
|
569 | 568 | |
|
570 | 569 | if self.flagNoMoreFiles: |
|
571 | 570 | self.dataOut.flagNoData = True |
|
572 | 571 | print 'NoData se vuelve true' |
|
573 | 572 | return 0 |
|
574 | 573 | |
|
575 | 574 | self.fp=self.path |
|
576 | 575 | self.Files2Read(self.fp) |
|
577 | 576 | self.readFile(self.fp) |
|
578 | 577 | self.dataOut.data_spc = self.data_spc |
|
579 | 578 | self.dataOut.data_cspc =self.data_cspc |
|
580 | 579 | self.dataOut.data_output=self.data_output |
|
581 | 580 | |
|
582 | 581 | print 'self.dataOut.data_output', shape(self.dataOut.data_output) |
|
583 | 582 | |
|
584 | 583 | #self.removeDC() |
|
585 | 584 | return self.dataOut.data_spc |
|
586 | 585 | |
|
587 | 586 | |
|
588 | 587 | def readFile(self,fp): |
|
589 | 588 | ''' |
|
590 | 589 | You must indicate if you are reading in Online or Offline mode and load the |
|
591 | 590 | The parameters for this file reading mode. |
|
592 | 591 | |
|
593 | 592 | Then you must do 2 actions: |
|
594 | 593 | |
|
595 | 594 | 1. Get the BLTR FileHeader. |
|
596 | 595 | 2. Start reading the first block. |
|
597 | 596 | ''' |
|
598 | 597 | |
|
599 | 598 | #The address of the folder is generated the name of the .fdt file that will be read |
|
600 | 599 | print "File: ",self.fileSelector+1 |
|
601 | 600 | |
|
602 | 601 | if self.fileSelector < len(self.filenameList): |
|
603 | 602 | |
|
604 | 603 | self.fpFile=str(fp)+'/'+str(self.filenameList[self.fileSelector]) |
|
605 | 604 | #print self.fpFile |
|
606 | 605 | fheader = FileHeaderBLTR() |
|
607 | 606 | fheader.FHread(self.fpFile) #Bltr FileHeader Reading |
|
608 | 607 | self.nFDTdataRecors=fheader.nFDTdataRecors |
|
609 | 608 | |
|
610 | 609 | self.readBlock() #Block reading |
|
611 | 610 | else: |
|
612 | 611 | print 'readFile FlagNoData becomes true' |
|
613 | 612 | self.flagNoMoreFiles=True |
|
614 | 613 | self.dataOut.flagNoData = True |
|
615 | 614 | return 0 |
|
616 | 615 | |
|
617 | 616 | def getVelRange(self, extrapoints=0): |
|
618 | 617 | Lambda= SPEED_OF_LIGHT/50000000 |
|
619 | 618 | PRF = self.dataOut.PRF#1./(self.dataOut.ippSeconds * self.dataOut.nCohInt) |
|
620 | 619 | Vmax=-Lambda/(4.*(1./PRF)*self.dataOut.nCohInt*2.) |
|
621 | 620 | deltafreq = PRF / (self.nProfiles) |
|
622 | 621 | deltavel = (Vmax*2) / (self.nProfiles) |
|
623 | 622 | freqrange = deltafreq*(numpy.arange(self.nProfiles)-self.nProfiles/2.) - deltafreq/2 |
|
624 | 623 | velrange = deltavel*(numpy.arange(self.nProfiles)-self.nProfiles/2.) |
|
625 | 624 | return velrange |
|
626 | 625 | |
|
627 | 626 | def readBlock(self): |
|
628 | 627 | ''' |
|
629 | 628 | It should be checked if the block has data, if it is not passed to the next file. |
|
630 | 629 | |
|
631 | 630 | Then the following is done: |
|
632 | 631 | |
|
633 | 632 | 1. Read the RecordHeader |
|
634 | 633 | 2. Fill the buffer with the current block number. |
|
635 | 634 | |
|
636 | 635 | ''' |
|
637 | 636 | |
|
638 | 637 | if self.BlockCounter < self.nFDTdataRecors-2: |
|
639 | 638 | print self.nFDTdataRecors, 'CONDICION!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!' |
|
640 | 639 | if self.ReadMode==1: |
|
641 | 640 | rheader = RecordHeaderBLTR(RecCounter=self.BlockCounter+1) |
|
642 | 641 | elif self.ReadMode==0: |
|
643 | 642 | rheader = RecordHeaderBLTR(RecCounter=self.BlockCounter) |
|
644 | 643 | |
|
645 | 644 | rheader.RHread(self.fpFile) #Bltr FileHeader Reading |
|
646 | 645 | |
|
647 | 646 | self.OffsetStartHeader=rheader.OffsetStartHeader |
|
648 | 647 | self.RecCounter=rheader.RecCounter |
|
649 | 648 | self.Off2StartNxtRec=rheader.Off2StartNxtRec |
|
650 | 649 | self.Off2StartData=rheader.Off2StartData |
|
651 | 650 | self.nProfiles=rheader.nProfiles |
|
652 | 651 | self.nChannels=rheader.nChannels |
|
653 | 652 | self.nHeights=rheader.nHeights |
|
654 | 653 | self.frequency=rheader.TransmitFrec |
|
655 | 654 | self.DualModeIndex=rheader.DualModeIndex |
|
656 | 655 | |
|
657 | 656 | self.pairsList =[(0,1),(0,2),(1,2)] |
|
658 | 657 | self.dataOut.pairsList = self.pairsList |
|
659 | 658 | |
|
660 | 659 | self.nRdPairs=len(self.dataOut.pairsList) |
|
661 | 660 | self.dataOut.nRdPairs = self.nRdPairs |
|
662 | 661 | |
|
663 | 662 | self.__firstHeigth=rheader.StartRangeSamp |
|
664 | 663 | self.__deltaHeigth=rheader.SampResolution |
|
665 | 664 | self.dataOut.heightList= self.__firstHeigth + numpy.array(range(self.nHeights))*self.__deltaHeigth |
|
666 | 665 | self.dataOut.channelList = range(self.nChannels) |
|
667 | 666 | self.dataOut.nProfiles=rheader.nProfiles |
|
668 | 667 | self.dataOut.nIncohInt=rheader.nIncohInt |
|
669 | 668 | self.dataOut.nCohInt=rheader.nCohInt |
|
670 | 669 | self.dataOut.ippSeconds= 1/float(rheader.PRFhz) |
|
671 | 670 | self.dataOut.PRF=rheader.PRFhz |
|
672 | 671 | self.dataOut.nFFTPoints=rheader.nProfiles |
|
673 | 672 | self.dataOut.utctime=rheader.nUtime |
|
674 | 673 | self.dataOut.timeZone=0 |
|
675 | 674 | self.dataOut.normFactor= self.dataOut.nProfiles*self.dataOut.nIncohInt*self.dataOut.nCohInt |
|
676 | 675 | self.dataOut.outputInterval= self.dataOut.ippSeconds * self.dataOut.nCohInt * self.dataOut.nIncohInt * self.nProfiles |
|
677 | 676 | |
|
678 | 677 | self.data_output=numpy.ones([3,rheader.nHeights])*numpy.NaN |
|
679 | 678 | print 'self.data_output', shape(self.data_output) |
|
680 | 679 | self.dataOut.velocityX=[] |
|
681 | 680 | self.dataOut.velocityY=[] |
|
682 | 681 | self.dataOut.velocityV=[] |
|
683 | 682 | |
|
684 | 683 | '''Block Reading, the Block Data is received and Reshape is used to give it |
|
685 | 684 | shape. |
|
686 | 685 | ''' |
|
687 | 686 | |
|
688 | 687 | #Procedure to take the pointer to where the date block starts |
|
689 | 688 | startDATA = open(self.fpFile,"rb") |
|
690 | 689 | OffDATA= self.OffsetStartHeader + self.RecCounter*self.Off2StartNxtRec+self.Off2StartData |
|
691 | 690 | startDATA.seek(OffDATA, os.SEEK_SET) |
|
692 | 691 | |
|
693 | 692 | def moving_average(x, N=2): |
|
694 | 693 | return numpy.convolve(x, numpy.ones((N,))/N)[(N-1):] |
|
695 | 694 | |
|
696 | 695 | def gaus(xSamples,a,x0,sigma): |
|
697 | 696 | return a*exp(-(xSamples-x0)**2/(2*sigma**2)) |
|
698 | 697 | |
|
699 | 698 | def Find(x,value): |
|
700 | 699 | for index in range(len(x)): |
|
701 | 700 | if x[index]==value: |
|
702 | 701 | return index |
|
703 | 702 | |
|
704 | 703 | def pol2cart(rho, phi): |
|
705 | 704 | x = rho * numpy.cos(phi) |
|
706 | 705 | y = rho * numpy.sin(phi) |
|
707 | 706 | return(x, y) |
|
708 | 707 | |
|
709 | 708 | |
|
710 | 709 | |
|
711 | 710 | |
|
712 | 711 | if self.DualModeIndex==self.ReadMode: |
|
713 | 712 | |
|
714 | 713 | self.data_fft = numpy.fromfile( startDATA, [('complex','<c8')],self.nProfiles*self.nChannels*self.nHeights ) |
|
715 | 714 | |
|
716 | 715 | self.data_fft=self.data_fft.astype(numpy.dtype('complex')) |
|
717 | 716 | |
|
718 | 717 | self.data_block=numpy.reshape(self.data_fft,(self.nHeights, self.nChannels, self.nProfiles )) |
|
719 | 718 | |
|
720 | 719 | self.data_block = numpy.transpose(self.data_block, (1,2,0)) |
|
721 | 720 | |
|
722 | 721 | copy = self.data_block.copy() |
|
723 | 722 | spc = copy * numpy.conjugate(copy) |
|
724 | 723 | |
|
725 | 724 | self.data_spc = numpy.absolute(spc) # valor absoluto o magnitud |
|
726 | 725 | |
|
727 | 726 | factor = self.dataOut.normFactor |
|
728 | 727 | |
|
729 | 728 | |
|
730 | 729 | z = self.data_spc.copy()#/factor |
|
731 | 730 | z = numpy.where(numpy.isfinite(z), z, numpy.NAN) |
|
732 | 731 | #zdB = 10*numpy.log10(z) |
|
733 | 732 | print ' ' |
|
734 | 733 | print 'Z: ' |
|
735 | 734 | print shape(z) |
|
736 | 735 | print ' ' |
|
737 | 736 | print ' ' |
|
738 | 737 | |
|
739 | 738 | self.dataOut.data_spc=self.data_spc |
|
740 | 739 | |
|
741 | 740 | self.noise = self.dataOut.getNoise(ymin_index=80, ymax_index=132)#/factor |
|
742 | 741 | #noisedB = 10*numpy.log10(self.noise) |
|
743 | 742 | |
|
744 | 743 | |
|
745 | 744 | ySamples=numpy.ones([3,self.nProfiles]) |
|
746 | 745 | phase=numpy.ones([3,self.nProfiles]) |
|
747 | 746 | CSPCSamples=numpy.ones([3,self.nProfiles],dtype=numpy.complex_) |
|
748 | 747 | coherence=numpy.ones([3,self.nProfiles]) |
|
749 | 748 | PhaseSlope=numpy.ones(3) |
|
750 | 749 | PhaseInter=numpy.ones(3) |
|
751 | 750 | |
|
752 | 751 | '''****** Getting CrossSpectra ******''' |
|
753 | 752 | cspc=self.data_block.copy() |
|
754 | 753 | self.data_cspc=self.data_block.copy() |
|
755 | 754 | |
|
756 | 755 | xFrec=self.getVelRange(1) |
|
757 | 756 | VelRange=self.getVelRange(1) |
|
758 | 757 | self.dataOut.VelRange=VelRange |
|
759 | 758 | #print ' ' |
|
760 | 759 | #print ' ' |
|
761 | 760 | #print 'xFrec',xFrec |
|
762 | 761 | #print ' ' |
|
763 | 762 | #print ' ' |
|
764 | 763 | #Height=35 |
|
765 | 764 | for i in range(self.nRdPairs): |
|
766 | 765 | |
|
767 | 766 | chan_index0 = self.dataOut.pairsList[i][0] |
|
768 | 767 | chan_index1 = self.dataOut.pairsList[i][1] |
|
769 | 768 | |
|
770 | 769 | self.data_cspc[i,:,:]=cspc[chan_index0,:,:] * numpy.conjugate(cspc[chan_index1,:,:]) |
|
771 | 770 | |
|
772 | 771 | |
|
773 | 772 | '''Getting Eij and Nij''' |
|
774 | 773 | (AntennaX0,AntennaY0)=pol2cart(rheader.AntennaCoord0, rheader.AntennaAngl0*numpy.pi/180) |
|
775 | 774 | (AntennaX1,AntennaY1)=pol2cart(rheader.AntennaCoord1, rheader.AntennaAngl1*numpy.pi/180) |
|
776 | 775 | (AntennaX2,AntennaY2)=pol2cart(rheader.AntennaCoord2, rheader.AntennaAngl2*numpy.pi/180) |
|
777 | 776 | |
|
778 | 777 | E01=AntennaX0-AntennaX1 |
|
779 | 778 | N01=AntennaY0-AntennaY1 |
|
780 | 779 | |
|
781 | 780 | E02=AntennaX0-AntennaX2 |
|
782 | 781 | N02=AntennaY0-AntennaY2 |
|
783 | 782 | |
|
784 | 783 | E12=AntennaX1-AntennaX2 |
|
785 | 784 | N12=AntennaY1-AntennaY2 |
|
786 | 785 | |
|
787 | 786 | self.ChanDist= numpy.array([[E01, N01],[E02,N02],[E12,N12]]) |
|
788 | 787 | |
|
789 | 788 | self.dataOut.ChanDist = self.ChanDist |
|
790 | 789 | |
|
791 | 790 | |
|
792 | 791 | # for Height in range(self.nHeights): |
|
793 | 792 | # |
|
794 | 793 | # for i in range(self.nRdPairs): |
|
795 | 794 | # |
|
796 | 795 | # '''****** Line of Data SPC ******''' |
|
797 | 796 | # zline=z[i,:,Height] |
|
798 | 797 | # |
|
799 | 798 | # '''****** DC is removed ******''' |
|
800 | 799 | # DC=Find(zline,numpy.amax(zline)) |
|
801 | 800 | # zline[DC]=(zline[DC-1]+zline[DC+1])/2 |
|
802 | 801 | # |
|
803 | 802 | # |
|
804 | 803 | # '''****** SPC is normalized ******''' |
|
805 | 804 | # FactNorm= zline.copy() / numpy.sum(zline.copy()) |
|
806 | 805 | # FactNorm= FactNorm/numpy.sum(FactNorm) |
|
807 | 806 | # |
|
808 | 807 | # SmoothSPC=moving_average(FactNorm,N=3) |
|
809 | 808 | # |
|
810 | 809 | # xSamples = ar(range(len(SmoothSPC))) |
|
811 | 810 | # ySamples[i] = SmoothSPC-self.noise[i] |
|
812 | 811 | # |
|
813 | 812 | # for i in range(self.nRdPairs): |
|
814 | 813 | # |
|
815 | 814 | # '''****** Line of Data CSPC ******''' |
|
816 | 815 | # cspcLine=self.data_cspc[i,:,Height].copy() |
|
817 | 816 | # |
|
818 | 817 | # |
|
819 | 818 | # |
|
820 | 819 | # '''****** CSPC is normalized ******''' |
|
821 | 820 | # chan_index0 = self.dataOut.pairsList[i][0] |
|
822 | 821 | # chan_index1 = self.dataOut.pairsList[i][1] |
|
823 | 822 | # CSPCFactor= numpy.sum(ySamples[chan_index0]) * numpy.sum(ySamples[chan_index1]) |
|
824 | 823 | # |
|
825 | 824 | # |
|
826 | 825 | # CSPCNorm= cspcLine.copy() / numpy.sqrt(CSPCFactor) |
|
827 | 826 | # |
|
828 | 827 | # |
|
829 | 828 | # CSPCSamples[i] = CSPCNorm-self.noise[i] |
|
830 | 829 | # coherence[i] = numpy.abs(CSPCSamples[i]) / numpy.sqrt(CSPCFactor) |
|
831 | 830 | # |
|
832 | 831 | # '''****** DC is removed ******''' |
|
833 | 832 | # DC=Find(coherence[i],numpy.amax(coherence[i])) |
|
834 | 833 | # coherence[i][DC]=(coherence[i][DC-1]+coherence[i][DC+1])/2 |
|
835 | 834 | # coherence[i]= moving_average(coherence[i],N=2) |
|
836 | 835 | # |
|
837 | 836 | # phase[i] = moving_average( numpy.arctan2(CSPCSamples[i].imag, CSPCSamples[i].real),N=1)#*180/numpy.pi |
|
838 | 837 | # |
|
839 | 838 | # |
|
840 | 839 | # '''****** Getting fij width ******''' |
|
841 | 840 | # |
|
842 | 841 | # yMean=[] |
|
843 | 842 | # yMean2=[] |
|
844 | 843 | # |
|
845 | 844 | # for j in range(len(ySamples[1])): |
|
846 | 845 | # yMean=numpy.append(yMean,numpy.average([ySamples[0,j],ySamples[1,j],ySamples[2,j]])) |
|
847 | 846 | # |
|
848 | 847 | # '''******* Getting fitting Gaussian ******''' |
|
849 | 848 | # meanGauss=sum(xSamples*yMean) / len(xSamples) |
|
850 | 849 | # sigma=sum(yMean*(xSamples-meanGauss)**2) / len(xSamples) |
|
851 | 850 | # #print 'Height',Height,'SNR', meanGauss/sigma**2 |
|
852 | 851 | # |
|
853 | 852 | # if (abs(meanGauss/sigma**2) > 0.0001) : |
|
854 | 853 | # |
|
855 | 854 | # try: |
|
856 | 855 | # popt,pcov = curve_fit(gaus,xSamples,yMean,p0=[1,meanGauss,sigma]) |
|
857 | 856 | # |
|
858 | 857 | # if numpy.amax(popt)>numpy.amax(yMean)*0.3: |
|
859 | 858 | # FitGauss=gaus(xSamples,*popt) |
|
860 | 859 | # |
|
861 | 860 | # else: |
|
862 | 861 | # FitGauss=numpy.ones(len(xSamples))*numpy.mean(yMean) |
|
863 | 862 | # print 'Verificador: Dentro', Height |
|
864 | 863 | # except RuntimeError: |
|
865 | 864 | # |
|
866 | 865 | # try: |
|
867 | 866 | # for j in range(len(ySamples[1])): |
|
868 | 867 | # yMean2=numpy.append(yMean2,numpy.average([ySamples[1,j],ySamples[2,j]])) |
|
869 | 868 | # popt,pcov = curve_fit(gaus,xSamples,yMean2,p0=[1,meanGauss,sigma]) |
|
870 | 869 | # FitGauss=gaus(xSamples,*popt) |
|
871 | 870 | # print 'Verificador: Exepcion1', Height |
|
872 | 871 | # except RuntimeError: |
|
873 | 872 | # |
|
874 | 873 | # try: |
|
875 | 874 | # popt,pcov = curve_fit(gaus,xSamples,ySamples[1],p0=[1,meanGauss,sigma]) |
|
876 | 875 | # FitGauss=gaus(xSamples,*popt) |
|
877 | 876 | # print 'Verificador: Exepcion2', Height |
|
878 | 877 | # except RuntimeError: |
|
879 | 878 | # FitGauss=numpy.ones(len(xSamples))*numpy.mean(yMean) |
|
880 | 879 | # print 'Verificador: Exepcion3', Height |
|
881 | 880 | # else: |
|
882 | 881 | # FitGauss=numpy.ones(len(xSamples))*numpy.mean(yMean) |
|
883 | 882 | # #print 'Verificador: Fuera', Height |
|
884 | 883 | # |
|
885 | 884 | # |
|
886 | 885 | # |
|
887 | 886 | # Maximun=numpy.amax(yMean) |
|
888 | 887 | # eMinus1=Maximun*numpy.exp(-1) |
|
889 | 888 | # |
|
890 | 889 | # HWpos=Find(FitGauss,min(FitGauss, key=lambda value:abs(value-eMinus1))) |
|
891 | 890 | # HalfWidth= xFrec[HWpos] |
|
892 | 891 | # GCpos=Find(FitGauss, numpy.amax(FitGauss)) |
|
893 | 892 | # Vpos=Find(FactNorm, numpy.amax(FactNorm)) |
|
894 | 893 | # #Vpos=numpy.sum(FactNorm)/len(FactNorm) |
|
895 | 894 | # #Vpos=Find(FactNorm, min(FactNorm, key=lambda value:abs(value- numpy.mean(FactNorm) ))) |
|
896 | 895 | # #print 'GCpos',GCpos, numpy.amax(FitGauss), 'HWpos',HWpos |
|
897 | 896 | # '''****** Getting Fij ******''' |
|
898 | 897 | # |
|
899 | 898 | # GaussCenter=xFrec[GCpos] |
|
900 | 899 | # if (GaussCenter<0 and HalfWidth>0) or (GaussCenter>0 and HalfWidth<0): |
|
901 | 900 | # Fij=abs(GaussCenter)+abs(HalfWidth)+0.0000001 |
|
902 | 901 | # else: |
|
903 | 902 | # Fij=abs(GaussCenter-HalfWidth)+0.0000001 |
|
904 | 903 | # |
|
905 | 904 | # '''****** Getting Frecuency range of significant data ******''' |
|
906 | 905 | # |
|
907 | 906 | # Rangpos=Find(FitGauss,min(FitGauss, key=lambda value:abs(value-Maximun*0.10))) |
|
908 | 907 | # |
|
909 | 908 | # if Rangpos<GCpos: |
|
910 | 909 | # Range=numpy.array([Rangpos,2*GCpos-Rangpos]) |
|
911 | 910 | # else: |
|
912 | 911 | # Range=numpy.array([2*GCpos-Rangpos,Rangpos]) |
|
913 | 912 | # |
|
914 | 913 | # FrecRange=xFrec[Range[0]:Range[1]] |
|
915 | 914 | # |
|
916 | 915 | # #print 'FrecRange', FrecRange |
|
917 | 916 | # '''****** Getting SCPC Slope ******''' |
|
918 | 917 | # |
|
919 | 918 | # for i in range(self.nRdPairs): |
|
920 | 919 | # |
|
921 | 920 | # if len(FrecRange)>5 and len(FrecRange)<self.nProfiles*0.5: |
|
922 | 921 | # PhaseRange=moving_average(phase[i,Range[0]:Range[1]],N=3) |
|
923 | 922 | # |
|
924 | 923 | # slope, intercept, r_value, p_value, std_err = stats.linregress(FrecRange,PhaseRange) |
|
925 | 924 | # PhaseSlope[i]=slope |
|
926 | 925 | # PhaseInter[i]=intercept |
|
927 | 926 | # else: |
|
928 | 927 | # PhaseSlope[i]=0 |
|
929 | 928 | # PhaseInter[i]=0 |
|
930 | 929 | # |
|
931 | 930 | # # plt.figure(i+15) |
|
932 | 931 | # # plt.title('FASE ( CH%s*CH%s )' %(self.dataOut.pairsList[i][0],self.dataOut.pairsList[i][1])) |
|
933 | 932 | # # plt.xlabel('Frecuencia (KHz)') |
|
934 | 933 | # # plt.ylabel('Magnitud') |
|
935 | 934 | # # #plt.subplot(311+i) |
|
936 | 935 | # # plt.plot(FrecRange,PhaseRange,'b') |
|
937 | 936 | # # plt.plot(FrecRange,FrecRange*PhaseSlope[i]+PhaseInter[i],'r') |
|
938 | 937 | # |
|
939 | 938 | # #plt.axis([-0.6, 0.2, -3.2, 3.2]) |
|
940 | 939 | # |
|
941 | 940 | # |
|
942 | 941 | # '''Getting constant C''' |
|
943 | 942 | # cC=(Fij*numpy.pi)**2 |
|
944 | 943 | # |
|
945 | 944 | # # '''Getting Eij and Nij''' |
|
946 | 945 | # # (AntennaX0,AntennaY0)=pol2cart(rheader.AntennaCoord0, rheader.AntennaAngl0*numpy.pi/180) |
|
947 | 946 | # # (AntennaX1,AntennaY1)=pol2cart(rheader.AntennaCoord1, rheader.AntennaAngl1*numpy.pi/180) |
|
948 | 947 | # # (AntennaX2,AntennaY2)=pol2cart(rheader.AntennaCoord2, rheader.AntennaAngl2*numpy.pi/180) |
|
949 | 948 | # # |
|
950 | 949 | # # E01=AntennaX0-AntennaX1 |
|
951 | 950 | # # N01=AntennaY0-AntennaY1 |
|
952 | 951 | # # |
|
953 | 952 | # # E02=AntennaX0-AntennaX2 |
|
954 | 953 | # # N02=AntennaY0-AntennaY2 |
|
955 | 954 | # # |
|
956 | 955 | # # E12=AntennaX1-AntennaX2 |
|
957 | 956 | # # N12=AntennaY1-AntennaY2 |
|
958 | 957 | # |
|
959 | 958 | # '''****** Getting constants F and G ******''' |
|
960 | 959 | # MijEijNij=numpy.array([[E02,N02], [E12,N12]]) |
|
961 | 960 | # MijResult0=(-PhaseSlope[1]*cC) / (2*numpy.pi) |
|
962 | 961 | # MijResult1=(-PhaseSlope[2]*cC) / (2*numpy.pi) |
|
963 | 962 | # MijResults=numpy.array([MijResult0,MijResult1]) |
|
964 | 963 | # (cF,cG) = numpy.linalg.solve(MijEijNij, MijResults) |
|
965 | 964 | # |
|
966 | 965 | # '''****** Getting constants A, B and H ******''' |
|
967 | 966 | # W01=numpy.amax(coherence[0]) |
|
968 | 967 | # W02=numpy.amax(coherence[1]) |
|
969 | 968 | # W12=numpy.amax(coherence[2]) |
|
970 | 969 | # |
|
971 | 970 | # WijResult0=((cF*E01+cG*N01)**2)/cC - numpy.log(W01 / numpy.sqrt(numpy.pi/cC)) |
|
972 | 971 | # WijResult1=((cF*E02+cG*N02)**2)/cC - numpy.log(W02 / numpy.sqrt(numpy.pi/cC)) |
|
973 | 972 | # WijResult2=((cF*E12+cG*N12)**2)/cC - numpy.log(W12 / numpy.sqrt(numpy.pi/cC)) |
|
974 | 973 | # |
|
975 | 974 | # WijResults=numpy.array([WijResult0, WijResult1, WijResult2]) |
|
976 | 975 | # |
|
977 | 976 | # WijEijNij=numpy.array([ [E01**2, N01**2, 2*E01*N01] , [E02**2, N02**2, 2*E02*N02] , [E12**2, N12**2, 2*E12*N12] ]) |
|
978 | 977 | # (cA,cB,cH) = numpy.linalg.solve(WijEijNij, WijResults) |
|
979 | 978 | # |
|
980 | 979 | # VxVy=numpy.array([[cA,cH],[cH,cB]]) |
|
981 | 980 | # |
|
982 | 981 | # VxVyResults=numpy.array([-cF,-cG]) |
|
983 | 982 | # (Vx,Vy) = numpy.linalg.solve(VxVy, VxVyResults) |
|
984 | 983 | # Vzon = Vy |
|
985 | 984 | # Vmer = Vx |
|
986 | 985 | # Vmag=numpy.sqrt(Vzon**2+Vmer**2) |
|
987 | 986 | # Vang=numpy.arctan2(Vmer,Vzon) |
|
988 | 987 | # |
|
989 | 988 | # if abs(Vy)<100 and abs(Vy)> 0.: |
|
990 | 989 | # self.dataOut.velocityX=numpy.append(self.dataOut.velocityX, Vzon) #Vmag |
|
991 | 990 | # #print 'Vmag',Vmag |
|
992 | 991 | # else: |
|
993 | 992 | # self.dataOut.velocityX=numpy.append(self.dataOut.velocityX, NaN) |
|
994 | 993 | # |
|
995 | 994 | # if abs(Vx)<100 and abs(Vx) > 0.: |
|
996 | 995 | # self.dataOut.velocityY=numpy.append(self.dataOut.velocityY, Vmer) #Vang |
|
997 | 996 | # #print 'Vang',Vang |
|
998 | 997 | # else: |
|
999 | 998 | # self.dataOut.velocityY=numpy.append(self.dataOut.velocityY, NaN) |
|
1000 | 999 | # |
|
1001 | 1000 | # if abs(GaussCenter)<2: |
|
1002 | 1001 | # self.dataOut.velocityV=numpy.append(self.dataOut.velocityV, xFrec[Vpos]) |
|
1003 | 1002 | # |
|
1004 | 1003 | # else: |
|
1005 | 1004 | # self.dataOut.velocityV=numpy.append(self.dataOut.velocityV, NaN) |
|
1006 | 1005 | # |
|
1007 | 1006 | # |
|
1008 | 1007 | # # print '********************************************' |
|
1009 | 1008 | # # print 'HalfWidth ', HalfWidth |
|
1010 | 1009 | # # print 'Maximun ', Maximun |
|
1011 | 1010 | # # print 'eMinus1 ', eMinus1 |
|
1012 | 1011 | # # print 'Rangpos ', Rangpos |
|
1013 | 1012 | # # print 'GaussCenter ',GaussCenter |
|
1014 | 1013 | # # print 'E01 ',E01 |
|
1015 | 1014 | # # print 'N01 ',N01 |
|
1016 | 1015 | # # print 'E02 ',E02 |
|
1017 | 1016 | # # print 'N02 ',N02 |
|
1018 | 1017 | # # print 'E12 ',E12 |
|
1019 | 1018 | # # print 'N12 ',N12 |
|
1020 | 1019 | # #print 'self.dataOut.velocityX ', self.dataOut.velocityX |
|
1021 | 1020 | # # print 'Fij ', Fij |
|
1022 | 1021 | # # print 'cC ', cC |
|
1023 | 1022 | # # print 'cF ', cF |
|
1024 | 1023 | # # print 'cG ', cG |
|
1025 | 1024 | # # print 'cA ', cA |
|
1026 | 1025 | # # print 'cB ', cB |
|
1027 | 1026 | # # print 'cH ', cH |
|
1028 | 1027 | # # print 'Vx ', Vx |
|
1029 | 1028 | # # print 'Vy ', Vy |
|
1030 | 1029 | # # print 'Vmag ', Vmag |
|
1031 | 1030 | # # print 'Vang ', Vang*180/numpy.pi |
|
1032 | 1031 | # # print 'PhaseSlope ',PhaseSlope[0] |
|
1033 | 1032 | # # print 'PhaseSlope ',PhaseSlope[1] |
|
1034 | 1033 | # # print 'PhaseSlope ',PhaseSlope[2] |
|
1035 | 1034 | # # print '********************************************' |
|
1036 | 1035 | # #print 'data_output',shape(self.dataOut.velocityX), shape(self.dataOut.velocityY) |
|
1037 | 1036 | # |
|
1038 | 1037 | # #print 'self.dataOut.velocityX', len(self.dataOut.velocityX) |
|
1039 | 1038 | # #print 'self.dataOut.velocityY', len(self.dataOut.velocityY) |
|
1040 | 1039 | # #print 'self.dataOut.velocityV', self.dataOut.velocityV |
|
1041 | 1040 | # |
|
1042 | 1041 | # self.data_output[0]=numpy.array(self.dataOut.velocityX) |
|
1043 | 1042 | # self.data_output[1]=numpy.array(self.dataOut.velocityY) |
|
1044 | 1043 | # self.data_output[2]=numpy.array(self.dataOut.velocityV) |
|
1045 | 1044 | # |
|
1046 | 1045 | # prin= self.data_output[0][~numpy.isnan(self.data_output[0])] |
|
1047 | 1046 | # print ' ' |
|
1048 | 1047 | # print 'VmagAverage',numpy.mean(prin) |
|
1049 | 1048 | # print ' ' |
|
1050 | 1049 | # # plt.figure(5) |
|
1051 | 1050 | # # plt.subplot(211) |
|
1052 | 1051 | # # plt.plot(self.dataOut.velocityX,'yo:') |
|
1053 | 1052 | # # plt.subplot(212) |
|
1054 | 1053 | # # plt.plot(self.dataOut.velocityY,'yo:') |
|
1055 | 1054 | # |
|
1056 | 1055 | # # plt.figure(1) |
|
1057 | 1056 | # # # plt.subplot(121) |
|
1058 | 1057 | # # # plt.plot(xFrec,ySamples[0],'k',label='Ch0') |
|
1059 | 1058 | # # # plt.plot(xFrec,ySamples[1],'g',label='Ch1') |
|
1060 | 1059 | # # # plt.plot(xFrec,ySamples[2],'r',label='Ch2') |
|
1061 | 1060 | # # # plt.plot(xFrec,FitGauss,'yo:',label='fit') |
|
1062 | 1061 | # # # plt.legend() |
|
1063 | 1062 | # # plt.title('DATOS A ALTURA DE 2850 METROS') |
|
1064 | 1063 | # # |
|
1065 | 1064 | # # plt.xlabel('Frecuencia (KHz)') |
|
1066 | 1065 | # # plt.ylabel('Magnitud') |
|
1067 | 1066 | # # # plt.subplot(122) |
|
1068 | 1067 | # # # plt.title('Fit for Time Constant') |
|
1069 | 1068 | # # #plt.plot(xFrec,zline) |
|
1070 | 1069 | # # #plt.plot(xFrec,SmoothSPC,'g') |
|
1071 | 1070 | # # plt.plot(xFrec,FactNorm) |
|
1072 | 1071 | # # plt.axis([-4, 4, 0, 0.15]) |
|
1073 | 1072 | # # # plt.xlabel('SelfSpectra KHz') |
|
1074 | 1073 | # # |
|
1075 | 1074 | # # plt.figure(10) |
|
1076 | 1075 | # # # plt.subplot(121) |
|
1077 | 1076 | # # plt.plot(xFrec,ySamples[0],'b',label='Ch0') |
|
1078 | 1077 | # # plt.plot(xFrec,ySamples[1],'y',label='Ch1') |
|
1079 | 1078 | # # plt.plot(xFrec,ySamples[2],'r',label='Ch2') |
|
1080 | 1079 | # # # plt.plot(xFrec,FitGauss,'yo:',label='fit') |
|
1081 | 1080 | # # plt.legend() |
|
1082 | 1081 | # # plt.title('SELFSPECTRA EN CANALES') |
|
1083 | 1082 | # # |
|
1084 | 1083 | # # plt.xlabel('Frecuencia (KHz)') |
|
1085 | 1084 | # # plt.ylabel('Magnitud') |
|
1086 | 1085 | # # # plt.subplot(122) |
|
1087 | 1086 | # # # plt.title('Fit for Time Constant') |
|
1088 | 1087 | # # #plt.plot(xFrec,zline) |
|
1089 | 1088 | # # #plt.plot(xFrec,SmoothSPC,'g') |
|
1090 | 1089 | # # # plt.plot(xFrec,FactNorm) |
|
1091 | 1090 | # # # plt.axis([-4, 4, 0, 0.15]) |
|
1092 | 1091 | # # # plt.xlabel('SelfSpectra KHz') |
|
1093 | 1092 | # # |
|
1094 | 1093 | # # plt.figure(9) |
|
1095 | 1094 | # # |
|
1096 | 1095 | # # |
|
1097 | 1096 | # # plt.title('DATOS SUAVIZADOS') |
|
1098 | 1097 | # # plt.xlabel('Frecuencia (KHz)') |
|
1099 | 1098 | # # plt.ylabel('Magnitud') |
|
1100 | 1099 | # # plt.plot(xFrec,SmoothSPC,'g') |
|
1101 | 1100 | # # |
|
1102 | 1101 | # # #plt.plot(xFrec,FactNorm) |
|
1103 | 1102 | # # plt.axis([-4, 4, 0, 0.15]) |
|
1104 | 1103 | # # # plt.xlabel('SelfSpectra KHz') |
|
1105 | 1104 | # # # |
|
1106 | 1105 | # # plt.figure(2) |
|
1107 | 1106 | # # # #plt.subplot(121) |
|
1108 | 1107 | # # plt.plot(xFrec,yMean,'r',label='Mean SelfSpectra') |
|
1109 | 1108 | # # plt.plot(xFrec,FitGauss,'yo:',label='Ajuste Gaussiano') |
|
1110 | 1109 | # # # plt.plot(xFrec[Rangpos],FitGauss[Find(FitGauss,min(FitGauss, key=lambda value:abs(value-Maximun*0.1)))],'bo') |
|
1111 | 1110 | # # # #plt.plot(xFrec,phase) |
|
1112 | 1111 | # # # plt.xlabel('Suavizado, promediado KHz') |
|
1113 | 1112 | # # plt.title('SELFSPECTRA PROMEDIADO') |
|
1114 | 1113 | # # # #plt.subplot(122) |
|
1115 | 1114 | # # # #plt.plot(xSamples,zline) |
|
1116 | 1115 | # # plt.xlabel('Frecuencia (KHz)') |
|
1117 | 1116 | # # plt.ylabel('Magnitud') |
|
1118 | 1117 | # # plt.legend() |
|
1119 | 1118 | # # # |
|
1120 | 1119 | # # # plt.figure(3) |
|
1121 | 1120 | # # # plt.subplot(311) |
|
1122 | 1121 | # # # #plt.plot(xFrec,phase[0]) |
|
1123 | 1122 | # # # plt.plot(xFrec,phase[0],'g') |
|
1124 | 1123 | # # # plt.subplot(312) |
|
1125 | 1124 | # # # plt.plot(xFrec,phase[1],'g') |
|
1126 | 1125 | # # # plt.subplot(313) |
|
1127 | 1126 | # # # plt.plot(xFrec,phase[2],'g') |
|
1128 | 1127 | # # # #plt.plot(xFrec,phase[2]) |
|
1129 | 1128 | # # # |
|
1130 | 1129 | # # # plt.figure(4) |
|
1131 | 1130 | # # # |
|
1132 | 1131 | # # # plt.plot(xSamples,coherence[0],'b') |
|
1133 | 1132 | # # # plt.plot(xSamples,coherence[1],'r') |
|
1134 | 1133 | # # # plt.plot(xSamples,coherence[2],'g') |
|
1135 | 1134 | # # plt.show() |
|
1136 | 1135 | # # # |
|
1137 | 1136 | # # # plt.clf() |
|
1138 | 1137 | # # # plt.cla() |
|
1139 | 1138 | # # # plt.close() |
|
1140 | 1139 | # |
|
1141 | 1140 | # print ' ' |
|
1142 | 1141 | |
|
1143 | 1142 | |
|
1144 | 1143 | |
|
1145 | 1144 | self.BlockCounter+=2 |
|
1146 | 1145 | |
|
1147 | 1146 | else: |
|
1148 | 1147 | self.fileSelector+=1 |
|
1149 | 1148 | self.BlockCounter=0 |
|
1150 | 1149 | print "Next File" |
|
1151 | 1150 | |
|
1152 | 1151 | |
|
1153 | 1152 | |
|
1154 | class BLTRWriter(ProcessingUnit): | |
|
1155 | ''' | |
|
1156 | classdocs | |
|
1157 | ''' | |
|
1158 | ||
|
1159 | def __init__(self): | |
|
1160 | ''' | |
|
1161 | Constructor | |
|
1162 | ''' | |
|
1163 | self.dataOut = None | |
|
1164 | ||
|
1165 | self.isConfig = False | |
|
1166 | ||
|
1167 | def setup(self, dataIn, path, blocksPerFile, set=0, ext=None): | |
|
1168 | ''' | |
|
1169 | In this method we should set all initial parameters. | |
|
1170 | ||
|
1171 | Input: | |
|
1172 | dataIn : Input data will also be outputa data | |
|
1173 | ||
|
1174 | ''' | |
|
1175 | self.dataOut = dataIn | |
|
1176 | ||
|
1177 | self.isConfig = True | |
|
1178 | ||
|
1179 | return | |
|
1180 | ||
|
1181 | def run(self, dataIn, **kwargs): | |
|
1182 | ''' | |
|
1183 | This method will be called many times so here you should put all your code | |
|
1184 | ||
|
1185 | Inputs: | |
|
1186 | ||
|
1187 | dataIn : object with the data | |
|
1188 | ||
|
1189 | ''' | |
|
1190 | ||
|
1191 | if not self.isConfig: | |
|
1192 | self.setup(dataIn, **kwargs) | |
|
1153 | ||
|
1193 | 1154 |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
@@ -1,804 +1,803 | |||
|
1 | 1 | import os, sys |
|
2 | 2 | import glob |
|
3 | 3 | import fnmatch |
|
4 | 4 | import datetime |
|
5 | 5 | import time |
|
6 | 6 | import re |
|
7 | 7 | import h5py |
|
8 | 8 | import numpy |
|
9 | 9 | import matplotlib.pyplot as plt |
|
10 | 10 | |
|
11 | 11 | import pylab as plb |
|
12 | 12 | from scipy.optimize import curve_fit |
|
13 | 13 | from scipy import asarray as ar,exp |
|
14 | 14 | from scipy import stats |
|
15 | 15 | |
|
16 | from duplicity.path import Path | |
|
17 | 16 | from numpy.ma.core import getdata |
|
18 | 17 | |
|
19 | 18 | SPEED_OF_LIGHT = 299792458 |
|
20 | 19 | SPEED_OF_LIGHT = 3e8 |
|
21 | 20 | |
|
22 | 21 | try: |
|
23 | 22 | from gevent import sleep |
|
24 | 23 | except: |
|
25 | 24 | from time import sleep |
|
26 | 25 | |
|
27 | 26 | from schainpy.model.data.jrodata import Spectra |
|
28 | 27 | #from schainpy.model.data.BLTRheaderIO import FileHeader, RecordHeader |
|
29 | 28 | from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation |
|
30 | 29 | #from schainpy.model.io.jroIO_bltr import BLTRReader |
|
31 | 30 | from numpy import imag, shape, NaN, empty |
|
32 | 31 | |
|
33 | 32 | |
|
34 | 33 | |
|
35 | 34 | class Header(object): |
|
36 | 35 | |
|
37 | 36 | def __init__(self): |
|
38 | 37 | raise NotImplementedError |
|
39 | 38 | |
|
40 | 39 | |
|
41 | 40 | def read(self): |
|
42 | 41 | |
|
43 | 42 | raise NotImplementedError |
|
44 | 43 | |
|
45 | 44 | def write(self): |
|
46 | 45 | |
|
47 | 46 | raise NotImplementedError |
|
48 | 47 | |
|
49 | 48 | def printInfo(self): |
|
50 | 49 | |
|
51 | 50 | message = "#"*50 + "\n" |
|
52 | 51 | message += self.__class__.__name__.upper() + "\n" |
|
53 | 52 | message += "#"*50 + "\n" |
|
54 | 53 | |
|
55 | 54 | keyList = self.__dict__.keys() |
|
56 | 55 | keyList.sort() |
|
57 | 56 | |
|
58 | 57 | for key in keyList: |
|
59 | 58 | message += "%s = %s" %(key, self.__dict__[key]) + "\n" |
|
60 | 59 | |
|
61 | 60 | if "size" not in keyList: |
|
62 | 61 | attr = getattr(self, "size") |
|
63 | 62 | |
|
64 | 63 | if attr: |
|
65 | 64 | message += "%s = %s" %("size", attr) + "\n" |
|
66 | 65 | |
|
67 | 66 | #print message |
|
68 | 67 | |
|
69 | 68 | |
|
70 | 69 | FILE_HEADER = numpy.dtype([ #HEADER 1024bytes |
|
71 | 70 | ('Hname','a32'), #Original file name |
|
72 | 71 | ('Htime',numpy.str_,32), #Date and time when the file was created |
|
73 | 72 | ('Hoper',numpy.str_,64), #Name of operator who created the file |
|
74 | 73 | ('Hplace',numpy.str_,128), #Place where the measurements was carried out |
|
75 | 74 | ('Hdescr',numpy.str_,256), #Description of measurements |
|
76 | 75 | ('Hdummy',numpy.str_,512), #Reserved space |
|
77 | 76 | #Main chunk 8bytes |
|
78 | 77 | ('Msign',numpy.str_,4), #Main chunk signature FZKF or NUIG |
|
79 | 78 | ('MsizeData','<i4'), #Size of data block main chunk |
|
80 | 79 | #Processing DSP parameters 36bytes |
|
81 | 80 | ('PPARsign',numpy.str_,4), #PPAR signature |
|
82 | 81 | ('PPARsize','<i4'), #PPAR size of block |
|
83 | 82 | ('PPARprf','<i4'), #Pulse repetition frequency |
|
84 | 83 | ('PPARpdr','<i4'), #Pulse duration |
|
85 | 84 | ('PPARsft','<i4'), #FFT length |
|
86 | 85 | ('PPARavc','<i4'), #Number of spectral (in-coherent) averages |
|
87 | 86 | ('PPARihp','<i4'), #Number of lowest range gate for moment estimation |
|
88 | 87 | ('PPARchg','<i4'), #Count for gates for moment estimation |
|
89 | 88 | ('PPARpol','<i4'), #switch on/off polarimetric measurements. Should be 1. |
|
90 | 89 | #Service DSP parameters 112bytes |
|
91 | 90 | ('SPARatt','<i4'), #STC attenuation on the lowest ranges on/off |
|
92 | 91 | ('SPARtx','<i4'), #OBSOLETE |
|
93 | 92 | ('SPARaddGain0','<f4'), #OBSOLETE |
|
94 | 93 | ('SPARaddGain1','<f4'), #OBSOLETE |
|
95 | 94 | ('SPARwnd','<i4'), #Debug only. It normal mode it is 0. |
|
96 | 95 | ('SPARpos','<i4'), #Delay between sync pulse and tx pulse for phase corr, ns |
|
97 | 96 | ('SPARadd','<i4'), #"add to pulse" to compensate for delay between the leading edge of driver pulse and envelope of the RF signal. |
|
98 | 97 | ('SPARlen','<i4'), #Time for measuring txn pulse phase. OBSOLETE |
|
99 | 98 | ('SPARcal','<i4'), #OBSOLETE |
|
100 | 99 | ('SPARnos','<i4'), #OBSOLETE |
|
101 | 100 | ('SPARof0','<i4'), #detection threshold |
|
102 | 101 | ('SPARof1','<i4'), #OBSOLETE |
|
103 | 102 | ('SPARswt','<i4'), #2nd moment estimation threshold |
|
104 | 103 | ('SPARsum','<i4'), #OBSOLETE |
|
105 | 104 | ('SPARosc','<i4'), #flag Oscillosgram mode |
|
106 | 105 | ('SPARtst','<i4'), #OBSOLETE |
|
107 | 106 | ('SPARcor','<i4'), #OBSOLETE |
|
108 | 107 | ('SPARofs','<i4'), #OBSOLETE |
|
109 | 108 | ('SPARhsn','<i4'), #Hildebrand div noise detection on noise gate |
|
110 | 109 | ('SPARhsa','<f4'), #Hildebrand div noise detection on all gates |
|
111 | 110 | ('SPARcalibPow_M','<f4'), #OBSOLETE |
|
112 | 111 | ('SPARcalibSNR_M','<f4'), #OBSOLETE |
|
113 | 112 | ('SPARcalibPow_S','<f4'), #OBSOLETE |
|
114 | 113 | ('SPARcalibSNR_S','<f4'), #OBSOLETE |
|
115 | 114 | ('SPARrawGate1','<i4'), #Lowest range gate for spectra saving Raw_Gate1 >=5 |
|
116 | 115 | ('SPARrawGate2','<i4'), #Number of range gates with atmospheric signal |
|
117 | 116 | ('SPARraw','<i4'), #flag - IQ or spectra saving on/off |
|
118 | 117 | ('SPARprc','<i4'),]) #flag - Moment estimation switched on/off |
|
119 | 118 | |
|
120 | 119 | |
|
121 | 120 | |
|
122 | 121 | class FileHeaderMIRA35c(Header): |
|
123 | 122 | |
|
124 | 123 | def __init__(self): |
|
125 | 124 | |
|
126 | 125 | self.Hname= None |
|
127 | 126 | self.Htime= None |
|
128 | 127 | self.Hoper= None |
|
129 | 128 | self.Hplace= None |
|
130 | 129 | self.Hdescr= None |
|
131 | 130 | self.Hdummy= None |
|
132 | 131 | |
|
133 | 132 | self.Msign=None |
|
134 | 133 | self.MsizeData=None |
|
135 | 134 | |
|
136 | 135 | self.PPARsign=None |
|
137 | 136 | self.PPARsize=None |
|
138 | 137 | self.PPARprf=None |
|
139 | 138 | self.PPARpdr=None |
|
140 | 139 | self.PPARsft=None |
|
141 | 140 | self.PPARavc=None |
|
142 | 141 | self.PPARihp=None |
|
143 | 142 | self.PPARchg=None |
|
144 | 143 | self.PPARpol=None |
|
145 | 144 | #Service DSP parameters |
|
146 | 145 | self.SPARatt=None |
|
147 | 146 | self.SPARtx=None |
|
148 | 147 | self.SPARaddGain0=None |
|
149 | 148 | self.SPARaddGain1=None |
|
150 | 149 | self.SPARwnd=None |
|
151 | 150 | self.SPARpos=None |
|
152 | 151 | self.SPARadd=None |
|
153 | 152 | self.SPARlen=None |
|
154 | 153 | self.SPARcal=None |
|
155 | 154 | self.SPARnos=None |
|
156 | 155 | self.SPARof0=None |
|
157 | 156 | self.SPARof1=None |
|
158 | 157 | self.SPARswt=None |
|
159 | 158 | self.SPARsum=None |
|
160 | 159 | self.SPARosc=None |
|
161 | 160 | self.SPARtst=None |
|
162 | 161 | self.SPARcor=None |
|
163 | 162 | self.SPARofs=None |
|
164 | 163 | self.SPARhsn=None |
|
165 | 164 | self.SPARhsa=None |
|
166 | 165 | self.SPARcalibPow_M=None |
|
167 | 166 | self.SPARcalibSNR_M=None |
|
168 | 167 | self.SPARcalibPow_S=None |
|
169 | 168 | self.SPARcalibSNR_S=None |
|
170 | 169 | self.SPARrawGate1=None |
|
171 | 170 | self.SPARrawGate2=None |
|
172 | 171 | self.SPARraw=None |
|
173 | 172 | self.SPARprc=None |
|
174 | 173 | |
|
175 | 174 | self.FHsize=1180 |
|
176 | 175 | |
|
177 | 176 | def FHread(self, fp): |
|
178 | 177 | |
|
179 | 178 | header = numpy.fromfile(fp, FILE_HEADER,1) |
|
180 | 179 | ''' numpy.fromfile(file, dtype, count, sep='') |
|
181 | 180 | file : file or str |
|
182 | 181 | Open file object or filename. |
|
183 | 182 | |
|
184 | 183 | dtype : data-type |
|
185 | 184 | Data type of the returned array. For binary files, it is used to determine |
|
186 | 185 | the size and byte-order of the items in the file. |
|
187 | 186 | |
|
188 | 187 | count : int |
|
189 | 188 | Number of items to read. -1 means all items (i.e., the complete file). |
|
190 | 189 | |
|
191 | 190 | sep : str |
|
192 | 191 | Separator between items if file is a text file. Empty ("") separator means |
|
193 | 192 | the file should be treated as binary. Spaces (" ") in the separator match zero |
|
194 | 193 | or more whitespace characters. A separator consisting only of spaces must match |
|
195 | 194 | at least one whitespace. |
|
196 | 195 | |
|
197 | 196 | ''' |
|
198 | 197 | |
|
199 | 198 | |
|
200 | 199 | self.Hname= str(header['Hname'][0]) |
|
201 | 200 | self.Htime= str(header['Htime'][0]) |
|
202 | 201 | self.Hoper= str(header['Hoper'][0]) |
|
203 | 202 | self.Hplace= str(header['Hplace'][0]) |
|
204 | 203 | self.Hdescr= str(header['Hdescr'][0]) |
|
205 | 204 | self.Hdummy= str(header['Hdummy'][0]) |
|
206 | 205 | #1024 |
|
207 | 206 | |
|
208 | 207 | self.Msign=str(header['Msign'][0]) |
|
209 | 208 | self.MsizeData=header['MsizeData'][0] |
|
210 | 209 | #8 |
|
211 | 210 | |
|
212 | 211 | self.PPARsign=str(header['PPARsign'][0]) |
|
213 | 212 | self.PPARsize=header['PPARsize'][0] |
|
214 | 213 | self.PPARprf=header['PPARprf'][0] |
|
215 | 214 | self.PPARpdr=header['PPARpdr'][0] |
|
216 | 215 | self.PPARsft=header['PPARsft'][0] |
|
217 | 216 | self.PPARavc=header['PPARavc'][0] |
|
218 | 217 | self.PPARihp=header['PPARihp'][0] |
|
219 | 218 | self.PPARchg=header['PPARchg'][0] |
|
220 | 219 | self.PPARpol=header['PPARpol'][0] |
|
221 | 220 | #Service DSP parameters |
|
222 | 221 | #36 |
|
223 | 222 | |
|
224 | 223 | self.SPARatt=header['SPARatt'][0] |
|
225 | 224 | self.SPARtx=header['SPARtx'][0] |
|
226 | 225 | self.SPARaddGain0=header['SPARaddGain0'][0] |
|
227 | 226 | self.SPARaddGain1=header['SPARaddGain1'][0] |
|
228 | 227 | self.SPARwnd=header['SPARwnd'][0] |
|
229 | 228 | self.SPARpos=header['SPARpos'][0] |
|
230 | 229 | self.SPARadd=header['SPARadd'][0] |
|
231 | 230 | self.SPARlen=header['SPARlen'][0] |
|
232 | 231 | self.SPARcal=header['SPARcal'][0] |
|
233 | 232 | self.SPARnos=header['SPARnos'][0] |
|
234 | 233 | self.SPARof0=header['SPARof0'][0] |
|
235 | 234 | self.SPARof1=header['SPARof1'][0] |
|
236 | 235 | self.SPARswt=header['SPARswt'][0] |
|
237 | 236 | self.SPARsum=header['SPARsum'][0] |
|
238 | 237 | self.SPARosc=header['SPARosc'][0] |
|
239 | 238 | self.SPARtst=header['SPARtst'][0] |
|
240 | 239 | self.SPARcor=header['SPARcor'][0] |
|
241 | 240 | self.SPARofs=header['SPARofs'][0] |
|
242 | 241 | self.SPARhsn=header['SPARhsn'][0] |
|
243 | 242 | self.SPARhsa=header['SPARhsa'][0] |
|
244 | 243 | self.SPARcalibPow_M=header['SPARcalibPow_M'][0] |
|
245 | 244 | self.SPARcalibSNR_M=header['SPARcalibSNR_M'][0] |
|
246 | 245 | self.SPARcalibPow_S=header['SPARcalibPow_S'][0] |
|
247 | 246 | self.SPARcalibSNR_S=header['SPARcalibSNR_S'][0] |
|
248 | 247 | self.SPARrawGate1=header['SPARrawGate1'][0] |
|
249 | 248 | self.SPARrawGate2=header['SPARrawGate2'][0] |
|
250 | 249 | self.SPARraw=header['SPARraw'][0] |
|
251 | 250 | self.SPARprc=header['SPARprc'][0] |
|
252 | 251 | #112 |
|
253 | 252 | #1180 |
|
254 | 253 | #print 'Pointer fp header', fp.tell() |
|
255 | 254 | #print ' ' |
|
256 | 255 | #print 'SPARrawGate' |
|
257 | 256 | #print self.SPARrawGate2 - self.SPARrawGate1 |
|
258 | 257 | |
|
259 | 258 | #print ' ' |
|
260 | 259 | #print 'Hname' |
|
261 | 260 | #print self.Hname |
|
262 | 261 | |
|
263 | 262 | #print ' ' |
|
264 | 263 | #print 'Msign' |
|
265 | 264 | #print self.Msign |
|
266 | 265 | |
|
267 | 266 | def write(self, fp): |
|
268 | 267 | |
|
269 | 268 | headerTuple = (self.Hname, |
|
270 | 269 | self.Htime, |
|
271 | 270 | self.Hoper, |
|
272 | 271 | self.Hplace, |
|
273 | 272 | self.Hdescr, |
|
274 | 273 | self.Hdummy) |
|
275 | 274 | |
|
276 | 275 | |
|
277 | 276 | header = numpy.array(headerTuple, FILE_HEADER) |
|
278 | 277 | # numpy.array(object, dtype=None, copy=True, order=None, subok=False, ndmin=0) |
|
279 | 278 | header.tofile(fp) |
|
280 | 279 | ''' ndarray.tofile(fid, sep, format) Write array to a file as text or binary (default). |
|
281 | 280 | |
|
282 | 281 | fid : file or str |
|
283 | 282 | An open file object, or a string containing a filename. |
|
284 | 283 | |
|
285 | 284 | sep : str |
|
286 | 285 | Separator between array items for text output. If "" (empty), a binary file is written, |
|
287 | 286 | equivalent to file.write(a.tobytes()). |
|
288 | 287 | |
|
289 | 288 | format : str |
|
290 | 289 | Format string for text file output. Each entry in the array is formatted to text by |
|
291 | 290 | first converting it to the closest Python type, and then using "format" % item. |
|
292 | 291 | |
|
293 | 292 | ''' |
|
294 | 293 | |
|
295 | 294 | return 1 |
|
296 | 295 | |
|
297 | 296 | SRVI_HEADER = numpy.dtype([ |
|
298 | 297 | ('SignatureSRVI1',numpy.str_,4),# |
|
299 | 298 | ('SizeOfDataBlock1','<i4'),# |
|
300 | 299 | ('DataBlockTitleSRVI1',numpy.str_,4),# |
|
301 | 300 | ('SizeOfSRVI1','<i4'),])# |
|
302 | 301 | |
|
303 | 302 | class SRVIHeader(Header): |
|
304 | 303 | def __init__(self, SignatureSRVI1=0, SizeOfDataBlock1=0, DataBlockTitleSRVI1=0, SizeOfSRVI1=0): |
|
305 | 304 | |
|
306 | 305 | self.SignatureSRVI1 = SignatureSRVI1 |
|
307 | 306 | self.SizeOfDataBlock1 = SizeOfDataBlock1 |
|
308 | 307 | self.DataBlockTitleSRVI1 = DataBlockTitleSRVI1 |
|
309 | 308 | self.SizeOfSRVI1 = SizeOfSRVI1 |
|
310 | 309 | |
|
311 | 310 | self.SRVIHsize=16 |
|
312 | 311 | |
|
313 | 312 | def SRVIread(self, fp): |
|
314 | 313 | |
|
315 | 314 | header = numpy.fromfile(fp, SRVI_HEADER,1) |
|
316 | 315 | |
|
317 | 316 | self.SignatureSRVI1 = str(header['SignatureSRVI1'][0]) |
|
318 | 317 | self.SizeOfDataBlock1 = header['SizeOfDataBlock1'][0] |
|
319 | 318 | self.DataBlockTitleSRVI1 = str(header['DataBlockTitleSRVI1'][0]) |
|
320 | 319 | self.SizeOfSRVI1 = header['SizeOfSRVI1'][0] |
|
321 | 320 | #16 |
|
322 | 321 | print 'Pointer fp SRVIheader', fp.tell() |
|
323 | 322 | |
|
324 | 323 | |
|
325 | 324 | SRVI_STRUCTURE = numpy.dtype([ |
|
326 | 325 | ('frame_cnt','<u4'),# |
|
327 | 326 | ('time_t','<u4'), # |
|
328 | 327 | ('tpow','<f4'), # |
|
329 | 328 | ('npw1','<f4'), # |
|
330 | 329 | ('npw2','<f4'), # |
|
331 | 330 | ('cpw1','<f4'), # |
|
332 | 331 | ('pcw2','<f4'), # |
|
333 | 332 | ('ps_err','<u4'), # |
|
334 | 333 | ('te_err','<u4'), # |
|
335 | 334 | ('rc_err','<u4'), # |
|
336 | 335 | ('grs1','<u4'), # |
|
337 | 336 | ('grs2','<u4'), # |
|
338 | 337 | ('azipos','<f4'), # |
|
339 | 338 | ('azivel','<f4'), # |
|
340 | 339 | ('elvpos','<f4'), # |
|
341 | 340 | ('elvvel','<f4'), # |
|
342 | 341 | ('northAngle','<f4'), # |
|
343 | 342 | ('microsec','<u4'), # |
|
344 | 343 | ('azisetvel','<f4'), # |
|
345 | 344 | ('elvsetpos','<f4'), # |
|
346 | 345 | ('RadarConst','<f4'),]) # |
|
347 | 346 | |
|
348 | 347 | |
|
349 | 348 | |
|
350 | 349 | |
|
351 | 350 | class RecordHeader(Header): |
|
352 | 351 | |
|
353 | 352 | |
|
354 | 353 | def __init__(self, frame_cnt=0, time_t= 0, tpow=0, npw1=0, npw2=0, |
|
355 | 354 | cpw1=0, pcw2=0, ps_err=0, te_err=0, rc_err=0, grs1=0, |
|
356 | 355 | grs2=0, azipos=0, azivel=0, elvpos=0, elvvel=0, northangle=0, |
|
357 | 356 | microsec=0, azisetvel=0, elvsetpos=0, RadarConst=0 , RecCounter=0, Off2StartNxtRec=0): |
|
358 | 357 | |
|
359 | 358 | |
|
360 | 359 | self.frame_cnt = frame_cnt |
|
361 | 360 | self.dwell = time_t |
|
362 | 361 | self.tpow = tpow |
|
363 | 362 | self.npw1 = npw1 |
|
364 | 363 | self.npw2 = npw2 |
|
365 | 364 | self.cpw1 = cpw1 |
|
366 | 365 | self.pcw2 = pcw2 |
|
367 | 366 | self.ps_err = ps_err |
|
368 | 367 | self.te_err = te_err |
|
369 | 368 | self.rc_err = rc_err |
|
370 | 369 | self.grs1 = grs1 |
|
371 | 370 | self.grs2 = grs2 |
|
372 | 371 | self.azipos = azipos |
|
373 | 372 | self.azivel = azivel |
|
374 | 373 | self.elvpos = elvpos |
|
375 | 374 | self.elvvel = elvvel |
|
376 | 375 | self.northAngle = northangle |
|
377 | 376 | self.microsec = microsec |
|
378 | 377 | self.azisetvel = azisetvel |
|
379 | 378 | self.elvsetpos = elvsetpos |
|
380 | 379 | self.RadarConst = RadarConst |
|
381 | 380 | self.RHsize=84 |
|
382 | 381 | self.RecCounter = RecCounter |
|
383 | 382 | self.Off2StartNxtRec=Off2StartNxtRec |
|
384 | 383 | |
|
385 | 384 | def RHread(self, fp): |
|
386 | 385 | |
|
387 | 386 | #startFp = open(fp,"rb") #The method tell() returns the current position of the file read/write pointer within the file. |
|
388 | 387 | |
|
389 | 388 | #OffRHeader= 1180 + self.RecCounter*(self.Off2StartNxtRec) |
|
390 | 389 | #startFp.seek(OffRHeader, os.SEEK_SET) |
|
391 | 390 | |
|
392 | 391 | #print 'Posicion del bloque: ',OffRHeader |
|
393 | 392 | |
|
394 | 393 | header = numpy.fromfile(fp,SRVI_STRUCTURE,1) |
|
395 | 394 | |
|
396 | 395 | self.frame_cnt = header['frame_cnt'][0]# |
|
397 | 396 | self.time_t = header['time_t'][0] # |
|
398 | 397 | self.tpow = header['tpow'][0] # |
|
399 | 398 | self.npw1 = header['npw1'][0] # |
|
400 | 399 | self.npw2 = header['npw2'][0] # |
|
401 | 400 | self.cpw1 = header['cpw1'][0] # |
|
402 | 401 | self.pcw2 = header['pcw2'][0] # |
|
403 | 402 | self.ps_err = header['ps_err'][0] # |
|
404 | 403 | self.te_err = header['te_err'][0] # |
|
405 | 404 | self.rc_err = header['rc_err'][0] # |
|
406 | 405 | self.grs1 = header['grs1'][0] # |
|
407 | 406 | self.grs2 = header['grs2'][0] # |
|
408 | 407 | self.azipos = header['azipos'][0] # |
|
409 | 408 | self.azivel = header['azivel'][0] # |
|
410 | 409 | self.elvpos = header['elvpos'][0] # |
|
411 | 410 | self.elvvel = header['elvvel'][0] # |
|
412 | 411 | self.northAngle = header['northAngle'][0] # |
|
413 | 412 | self.microsec = header['microsec'][0] # |
|
414 | 413 | self.azisetvel = header['azisetvel'][0] # |
|
415 | 414 | self.elvsetpos = header['elvsetpos'][0] # |
|
416 | 415 | self.RadarConst = header['RadarConst'][0] # |
|
417 | 416 | #84 |
|
418 | 417 | |
|
419 | 418 | #print 'Pointer fp RECheader', fp.tell() |
|
420 | 419 | |
|
421 | 420 | #self.ipp= 0.5*(SPEED_OF_LIGHT/self.PRFhz) |
|
422 | 421 | |
|
423 | 422 | #self.RHsize = 180+20*self.nChannels |
|
424 | 423 | #self.Datasize= self.nProfiles*self.nChannels*self.nHeights*2*4 |
|
425 | 424 | #print 'Datasize',self.Datasize |
|
426 | 425 | #endFp = self.OffsetStartHeader + self.RecCounter*self.Off2StartNxtRec |
|
427 | 426 | |
|
428 | 427 | print '==============================================' |
|
429 | 428 | |
|
430 | 429 | print '==============================================' |
|
431 | 430 | |
|
432 | 431 | |
|
433 | 432 | return 1 |
|
434 | 433 | |
|
435 | 434 | class MIRA35CReader (ProcessingUnit,FileHeaderMIRA35c,SRVIHeader,RecordHeader): |
|
436 | 435 | |
|
437 | 436 | path = None |
|
438 | 437 | startDate = None |
|
439 | 438 | endDate = None |
|
440 | 439 | startTime = None |
|
441 | 440 | endTime = None |
|
442 | 441 | walk = None |
|
443 | 442 | isConfig = False |
|
444 | 443 | |
|
445 | 444 | |
|
446 | 445 | fileList= None |
|
447 | 446 | |
|
448 | 447 | #metadata |
|
449 | 448 | TimeZone= None |
|
450 | 449 | Interval= None |
|
451 | 450 | heightList= None |
|
452 | 451 | |
|
453 | 452 | #data |
|
454 | 453 | data= None |
|
455 | 454 | utctime= None |
|
456 | 455 | |
|
457 | 456 | |
|
458 | 457 | |
|
459 | 458 | def __init__(self, **kwargs): |
|
460 | 459 | |
|
461 | 460 | #Eliminar de la base la herencia |
|
462 | 461 | ProcessingUnit.__init__(self, **kwargs) |
|
463 | 462 | self.PointerReader = 0 |
|
464 | 463 | self.FileHeaderFlag = False |
|
465 | 464 | self.utc = None |
|
466 | 465 | self.ext = ".zspca" |
|
467 | 466 | self.optchar = "P" |
|
468 | 467 | self.fpFile=None |
|
469 | 468 | self.fp = None |
|
470 | 469 | self.BlockCounter=0 |
|
471 | 470 | self.dtype = None |
|
472 | 471 | self.fileSizeByHeader = None |
|
473 | 472 | self.filenameList = [] |
|
474 | 473 | self.fileSelector = 0 |
|
475 | 474 | self.Off2StartNxtRec=0 |
|
476 | 475 | self.RecCounter=0 |
|
477 | 476 | self.flagNoMoreFiles = 0 |
|
478 | 477 | self.data_spc=None |
|
479 | 478 | #self.data_cspc=None |
|
480 | 479 | self.data_output=None |
|
481 | 480 | self.path = None |
|
482 | 481 | self.OffsetStartHeader=0 |
|
483 | 482 | self.Off2StartData=0 |
|
484 | 483 | self.ipp = 0 |
|
485 | 484 | self.nFDTdataRecors=0 |
|
486 | 485 | self.blocksize = 0 |
|
487 | 486 | self.dataOut = Spectra() |
|
488 | 487 | self.profileIndex = 1 #Always |
|
489 | 488 | self.dataOut.flagNoData=False |
|
490 | 489 | self.dataOut.nRdPairs = 0 |
|
491 | 490 | self.dataOut.pairsList = [] |
|
492 | 491 | self.dataOut.data_spc=None |
|
493 | 492 | |
|
494 | 493 | self.dataOut.normFactor=1 |
|
495 | 494 | self.nextfileflag = True |
|
496 | 495 | self.dataOut.RadarConst = 0 |
|
497 | 496 | self.dataOut.HSDV = [] |
|
498 | 497 | self.dataOut.NPW = [] |
|
499 | 498 | self.dataOut.COFA = [] |
|
500 | 499 | self.dataOut.noise = 0 |
|
501 | 500 | |
|
502 | 501 | |
|
503 | 502 | def Files2Read(self, fp): |
|
504 | 503 | ''' |
|
505 | 504 | Function that indicates the number of .fdt files that exist in the folder to be read. |
|
506 | 505 | It also creates an organized list with the names of the files to read. |
|
507 | 506 | ''' |
|
508 | 507 | #self.__checkPath() |
|
509 | 508 | |
|
510 | 509 | ListaData=os.listdir(fp) #Gets the list of files within the fp address |
|
511 | 510 | ListaData=sorted(ListaData) #Sort the list of files from least to largest by names |
|
512 | 511 | nFiles=0 #File Counter |
|
513 | 512 | FileList=[] #A list is created that will contain the .fdt files |
|
514 | 513 | for IndexFile in ListaData : |
|
515 | 514 | if '.zspca' in IndexFile and '.gz' not in IndexFile: |
|
516 | 515 | FileList.append(IndexFile) |
|
517 | 516 | nFiles+=1 |
|
518 | 517 | |
|
519 | 518 | #print 'Files2Read' |
|
520 | 519 | #print 'Existen '+str(nFiles)+' archivos .fdt' |
|
521 | 520 | |
|
522 | 521 | self.filenameList=FileList #List of files from least to largest by names |
|
523 | 522 | |
|
524 | 523 | |
|
525 | 524 | def run(self, **kwargs): |
|
526 | 525 | ''' |
|
527 | 526 | This method will be the one that will initiate the data entry, will be called constantly. |
|
528 | 527 | You should first verify that your Setup () is set up and then continue to acquire |
|
529 | 528 | the data to be processed with getData (). |
|
530 | 529 | ''' |
|
531 | 530 | if not self.isConfig: |
|
532 | 531 | self.setup(**kwargs) |
|
533 | 532 | self.isConfig = True |
|
534 | 533 | |
|
535 | 534 | self.getData() |
|
536 | 535 | |
|
537 | 536 | |
|
538 | 537 | def setup(self, path=None, |
|
539 | 538 | startDate=None, |
|
540 | 539 | endDate=None, |
|
541 | 540 | startTime=None, |
|
542 | 541 | endTime=None, |
|
543 | 542 | walk=True, |
|
544 | 543 | timezone='utc', |
|
545 | 544 | code = None, |
|
546 | 545 | online=False, |
|
547 | 546 | ReadMode=None, **kwargs): |
|
548 | 547 | |
|
549 | 548 | self.isConfig = True |
|
550 | 549 | |
|
551 | 550 | self.path=path |
|
552 | 551 | self.startDate=startDate |
|
553 | 552 | self.endDate=endDate |
|
554 | 553 | self.startTime=startTime |
|
555 | 554 | self.endTime=endTime |
|
556 | 555 | self.walk=walk |
|
557 | 556 | #self.ReadMode=int(ReadMode) |
|
558 | 557 | |
|
559 | 558 | pass |
|
560 | 559 | |
|
561 | 560 | |
|
562 | 561 | def getData(self): |
|
563 | 562 | ''' |
|
564 | 563 | Before starting this function, you should check that there is still an unread file, |
|
565 | 564 | If there are still blocks to read or if the data block is empty. |
|
566 | 565 | |
|
567 | 566 | You should call the file "read". |
|
568 | 567 | |
|
569 | 568 | ''' |
|
570 | 569 | |
|
571 | 570 | if self.flagNoMoreFiles: |
|
572 | 571 | self.dataOut.flagNoData = True |
|
573 | 572 | print 'NoData se vuelve true' |
|
574 | 573 | return 0 |
|
575 | 574 | |
|
576 | 575 | self.fp=self.path |
|
577 | 576 | self.Files2Read(self.fp) |
|
578 | 577 | self.readFile(self.fp) |
|
579 | 578 | |
|
580 | 579 | self.dataOut.data_spc = self.dataOut_spc#self.data_spc.copy() |
|
581 | 580 | self.dataOut.RadarConst = self.RadarConst |
|
582 | 581 | self.dataOut.data_output=self.data_output |
|
583 | 582 | self.dataOut.noise = self.dataOut.getNoise() |
|
584 | 583 | #print 'ACAAAAAA', self.dataOut.noise |
|
585 | 584 | self.dataOut.data_spc = self.dataOut.data_spc+self.dataOut.noise |
|
586 | 585 | #print 'self.dataOut.noise',self.dataOut.noise |
|
587 | 586 | |
|
588 | 587 | |
|
589 | 588 | return self.dataOut.data_spc |
|
590 | 589 | |
|
591 | 590 | |
|
592 | 591 | def readFile(self,fp): |
|
593 | 592 | ''' |
|
594 | 593 | You must indicate if you are reading in Online or Offline mode and load the |
|
595 | 594 | The parameters for this file reading mode. |
|
596 | 595 | |
|
597 | 596 | Then you must do 2 actions: |
|
598 | 597 | |
|
599 | 598 | 1. Get the BLTR FileHeader. |
|
600 | 599 | 2. Start reading the first block. |
|
601 | 600 | ''' |
|
602 | 601 | |
|
603 | 602 | #The address of the folder is generated the name of the .fdt file that will be read |
|
604 | 603 | print "File: ",self.fileSelector+1 |
|
605 | 604 | |
|
606 | 605 | if self.fileSelector < len(self.filenameList): |
|
607 | 606 | |
|
608 | 607 | self.fpFile=str(fp)+'/'+str(self.filenameList[self.fileSelector]) |
|
609 | 608 | |
|
610 | 609 | if self.nextfileflag==True: |
|
611 | 610 | self.fp = open(self.fpFile,"rb") |
|
612 | 611 | self.nextfileflag==False |
|
613 | 612 | |
|
614 | 613 | '''HERE STARTING THE FILE READING''' |
|
615 | 614 | |
|
616 | 615 | |
|
617 | 616 | self.fheader = FileHeaderMIRA35c() |
|
618 | 617 | self.fheader.FHread(self.fp) #Bltr FileHeader Reading |
|
619 | 618 | |
|
620 | 619 | |
|
621 | 620 | self.SPARrawGate1 = self.fheader.SPARrawGate1 |
|
622 | 621 | self.SPARrawGate2 = self.fheader.SPARrawGate2 |
|
623 | 622 | self.Num_Hei = self.SPARrawGate2 - self.SPARrawGate1 |
|
624 | 623 | self.Num_Bins = self.fheader.PPARsft |
|
625 | 624 | self.dataOut.nFFTPoints = self.fheader.PPARsft |
|
626 | 625 | |
|
627 | 626 | |
|
628 | 627 | self.Num_inCoh = self.fheader.PPARavc |
|
629 | 628 | self.dataOut.PRF = self.fheader.PPARprf |
|
630 | 629 | self.dataOut.frequency = 34.85*10**9 |
|
631 | 630 | self.Lambda = SPEED_OF_LIGHT/self.dataOut.frequency |
|
632 | 631 | self.dataOut.ippSeconds= 1./float(self.dataOut.PRF) |
|
633 | 632 | |
|
634 | 633 | pulse_width = self.fheader.PPARpdr * 10**-9 |
|
635 | 634 | self.__deltaHeigth = 0.5 * SPEED_OF_LIGHT * pulse_width |
|
636 | 635 | |
|
637 | 636 | self.data_spc = numpy.zeros((self.Num_Hei, self.Num_Bins,2))# |
|
638 | 637 | self.dataOut.HSDV = numpy.zeros((self.Num_Hei, 2)) |
|
639 | 638 | |
|
640 | 639 | self.Ze = numpy.zeros(self.Num_Hei) |
|
641 | 640 | self.ETA = numpy.zeros(([2,self.Num_Hei])) |
|
642 | 641 | |
|
643 | 642 | |
|
644 | 643 | |
|
645 | 644 | self.readBlock() #Block reading |
|
646 | 645 | |
|
647 | 646 | else: |
|
648 | 647 | print 'readFile FlagNoData becomes true' |
|
649 | 648 | self.flagNoMoreFiles=True |
|
650 | 649 | self.dataOut.flagNoData = True |
|
651 | 650 | self.FileHeaderFlag == True |
|
652 | 651 | return 0 |
|
653 | 652 | |
|
654 | 653 | |
|
655 | 654 | |
|
656 | 655 | def readBlock(self): |
|
657 | 656 | ''' |
|
658 | 657 | It should be checked if the block has data, if it is not passed to the next file. |
|
659 | 658 | |
|
660 | 659 | Then the following is done: |
|
661 | 660 | |
|
662 | 661 | 1. Read the RecordHeader |
|
663 | 662 | 2. Fill the buffer with the current block number. |
|
664 | 663 | |
|
665 | 664 | ''' |
|
666 | 665 | |
|
667 | 666 | if self.PointerReader > 1180: |
|
668 | 667 | self.fp.seek(self.PointerReader , os.SEEK_SET) |
|
669 | 668 | self.FirstPoint = self.PointerReader |
|
670 | 669 | |
|
671 | 670 | else : |
|
672 | 671 | self.FirstPoint = 1180 |
|
673 | 672 | |
|
674 | 673 | |
|
675 | 674 | |
|
676 | 675 | self.srviHeader = SRVIHeader() |
|
677 | 676 | |
|
678 | 677 | self.srviHeader.SRVIread(self.fp) #Se obtiene la cabecera del SRVI |
|
679 | 678 | |
|
680 | 679 | self.blocksize = self.srviHeader.SizeOfDataBlock1 # Se obtiene el tamao del bloque |
|
681 | 680 | |
|
682 | 681 | if self.blocksize == 148: |
|
683 | 682 | print 'blocksize == 148 bug' |
|
684 | 683 | jump = numpy.fromfile(self.fp,[('jump',numpy.str_,140)] ,1) |
|
685 | 684 | |
|
686 | 685 | self.srviHeader.SRVIread(self.fp) #Se obtiene la cabecera del SRVI |
|
687 | 686 | |
|
688 | 687 | if not self.srviHeader.SizeOfSRVI1: |
|
689 | 688 | self.fileSelector+=1 |
|
690 | 689 | self.nextfileflag==True |
|
691 | 690 | self.FileHeaderFlag == True |
|
692 | 691 | |
|
693 | 692 | self.recordheader = RecordHeader() |
|
694 | 693 | self.recordheader.RHread(self.fp) |
|
695 | 694 | self.RadarConst = self.recordheader.RadarConst |
|
696 | 695 | dwell = self.recordheader.time_t |
|
697 | 696 | npw1 = self.recordheader.npw1 |
|
698 | 697 | npw2 = self.recordheader.npw2 |
|
699 | 698 | |
|
700 | 699 | |
|
701 | 700 | self.dataOut.channelList = range(1) |
|
702 | 701 | self.dataOut.nIncohInt = self.Num_inCoh |
|
703 | 702 | self.dataOut.nProfiles = self.Num_Bins |
|
704 | 703 | self.dataOut.nCohInt = 1 |
|
705 | 704 | self.dataOut.windowOfFilter = 1 |
|
706 | 705 | self.dataOut.utctime = dwell |
|
707 | 706 | self.dataOut.timeZone=0 |
|
708 | 707 | |
|
709 | 708 | self.dataOut.outputInterval = self.dataOut.getTimeInterval() |
|
710 | 709 | self.dataOut.heightList = self.SPARrawGate1*self.__deltaHeigth + numpy.array(range(self.Num_Hei))*self.__deltaHeigth |
|
711 | 710 | |
|
712 | 711 | |
|
713 | 712 | |
|
714 | 713 | self.HSDVsign = numpy.fromfile( self.fp, [('HSDV',numpy.str_,4)],1) |
|
715 | 714 | self.SizeHSDV = numpy.fromfile( self.fp, [('SizeHSDV','<i4')],1) |
|
716 | 715 | self.HSDV_Co = numpy.fromfile( self.fp, [('HSDV_Co','<f4')],self.Num_Hei) |
|
717 | 716 | self.HSDV_Cx = numpy.fromfile( self.fp, [('HSDV_Cx','<f4')],self.Num_Hei) |
|
718 | 717 | |
|
719 | 718 | self.COFAsign = numpy.fromfile( self.fp, [('COFA',numpy.str_,4)],1) |
|
720 | 719 | self.SizeCOFA = numpy.fromfile( self.fp, [('SizeCOFA','<i4')],1) |
|
721 | 720 | self.COFA_Co = numpy.fromfile( self.fp, [('COFA_Co','<f4')],self.Num_Hei) |
|
722 | 721 | self.COFA_Cx = numpy.fromfile( self.fp, [('COFA_Cx','<f4')],self.Num_Hei) |
|
723 | 722 | |
|
724 | 723 | self.ZSPCsign = numpy.fromfile(self.fp, [('ZSPCsign',numpy.str_,4)],1) |
|
725 | 724 | self.SizeZSPC = numpy.fromfile(self.fp, [('SizeZSPC','<i4')],1) |
|
726 | 725 | |
|
727 | 726 | self.dataOut.HSDV[0]=self.HSDV_Co[:][0] |
|
728 | 727 | self.dataOut.HSDV[1]=self.HSDV_Cx[:][0] |
|
729 | 728 | |
|
730 | 729 | for irg in range(self.Num_Hei): |
|
731 | 730 | nspc = numpy.fromfile(self.fp, [('nspc','int16')],1)[0][0] # Number of spectral sub pieces containing significant power |
|
732 | 731 | |
|
733 | 732 | for k in range(nspc): |
|
734 | 733 | binIndex = numpy.fromfile(self.fp, [('binIndex','int16')],1)[0][0] # Index of the spectral bin where the piece is beginning |
|
735 | 734 | nbins = numpy.fromfile(self.fp, [('nbins','int16')],1)[0][0] # Number of bins of the piece |
|
736 | 735 | |
|
737 | 736 | #Co_Channel |
|
738 | 737 | jbin = numpy.fromfile(self.fp, [('jbin','uint16')],nbins)[0][0] # Spectrum piece to be normaliced |
|
739 | 738 | jmax = numpy.fromfile(self.fp, [('jmax','float32')],1)[0][0] # Maximun piece to be normaliced |
|
740 | 739 | |
|
741 | 740 | |
|
742 | 741 | self.data_spc[irg,binIndex:binIndex+nbins,0] = self.data_spc[irg,binIndex:binIndex+nbins,0]+jbin/65530.*jmax |
|
743 | 742 | |
|
744 | 743 | #Cx_Channel |
|
745 | 744 | jbin = numpy.fromfile(self.fp, [('jbin','uint16')],nbins)[0][0] |
|
746 | 745 | jmax = numpy.fromfile(self.fp, [('jmax','float32')],1)[0][0] |
|
747 | 746 | |
|
748 | 747 | |
|
749 | 748 | self.data_spc[irg,binIndex:binIndex+nbins,1] = self.data_spc[irg,binIndex:binIndex+nbins,1]+jbin/65530.*jmax |
|
750 | 749 | |
|
751 | 750 | for bin in range(self.Num_Bins): |
|
752 | 751 | |
|
753 | 752 | self.data_spc[:,bin,0] = self.data_spc[:,bin,0] - self.dataOut.HSDV[:,0] |
|
754 | 753 | |
|
755 | 754 | self.data_spc[:,bin,1] = self.data_spc[:,bin,1] - self.dataOut.HSDV[:,1] |
|
756 | 755 | |
|
757 | 756 | |
|
758 | 757 | numpy.set_printoptions(threshold='nan') |
|
759 | 758 | |
|
760 | 759 | self.data_spc = numpy.where(self.data_spc > 0. , self.data_spc, 0) |
|
761 | 760 | |
|
762 | 761 | self.dataOut.COFA = numpy.array([self.COFA_Co , self.COFA_Cx]) |
|
763 | 762 | |
|
764 | 763 | print ' ' |
|
765 | 764 | print 'SPC',numpy.shape(self.dataOut.data_spc) |
|
766 | 765 | #print 'SPC',self.dataOut.data_spc |
|
767 | 766 | |
|
768 | 767 | noinor1 = 713031680 |
|
769 | 768 | noinor2 = 30 |
|
770 | 769 | |
|
771 | 770 | npw1 = 1#0**(npw1/10) * noinor1 * noinor2 |
|
772 | 771 | npw2 = 1#0**(npw2/10) * noinor1 * noinor2 |
|
773 | 772 | self.dataOut.NPW = numpy.array([npw1, npw2]) |
|
774 | 773 | |
|
775 | 774 | print ' ' |
|
776 | 775 | |
|
777 | 776 | self.data_spc = numpy.transpose(self.data_spc, (2,1,0)) |
|
778 | 777 | self.data_spc = numpy.fft.fftshift(self.data_spc, axes = 1) |
|
779 | 778 | |
|
780 | 779 | self.data_spc = numpy.fliplr(self.data_spc) |
|
781 | 780 | |
|
782 | 781 | self.data_spc = numpy.where(self.data_spc > 0. , self.data_spc, 0) |
|
783 | 782 | self.dataOut_spc= numpy.ones([1, self.Num_Bins , self.Num_Hei]) |
|
784 | 783 | self.dataOut_spc[0,:,:] = self.data_spc[0,:,:] |
|
785 | 784 | #print 'SHAPE', self.dataOut_spc.shape |
|
786 | 785 | #For nyquist correction: |
|
787 | 786 | #fix = 20 # ~3m/s |
|
788 | 787 | #shift = self.Num_Bins/2 + fix |
|
789 | 788 | #self.data_spc = numpy.array([ self.data_spc[: , self.Num_Bins-shift+1: , :] , self.data_spc[: , 0:self.Num_Bins-shift , :]]) |
|
790 | 789 | |
|
791 | 790 | |
|
792 | 791 | |
|
793 | 792 | '''Block Reading, the Block Data is received and Reshape is used to give it |
|
794 | 793 | shape. |
|
795 | 794 | ''' |
|
796 | 795 | |
|
797 | 796 | self.PointerReader = self.fp.tell() |
|
798 | 797 | |
|
799 | 798 | |
|
800 | 799 | |
|
801 | 800 | |
|
802 | 801 | |
|
803 | 802 | |
|
804 | 803 | No newline at end of file |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
@@ -1,576 +1,564 | |||
|
1 | 1 | ''' |
|
2 | 2 | Created on Oct 24, 2016 |
|
3 | 3 | |
|
4 | 4 | @author: roj- LouVD |
|
5 | 5 | ''' |
|
6 | 6 | |
|
7 | 7 | import numpy |
|
8 | 8 | import copy |
|
9 | 9 | import datetime |
|
10 | 10 | import time |
|
11 | 11 | from time import gmtime |
|
12 | 12 | |
|
13 | 13 | from jroproc_base import ProcessingUnit |
|
14 | 14 | from schainpy.model.data.jrodata import Parameters |
|
15 | 15 | from numpy import transpose |
|
16 | 16 | |
|
17 | 17 | from matplotlib import cm |
|
18 | 18 | import matplotlib.pyplot as plt |
|
19 | 19 | from matplotlib.mlab import griddata |
|
20 | 20 | |
|
21 | 21 | |
|
22 | ||
|
23 | ||
|
24 | class BLTRProcess(ProcessingUnit): | |
|
25 | isConfig = False | |
|
22 | class BLTRParametersProc(ProcessingUnit): | |
|
26 | 23 | ''' |
|
27 |
Processing unit for BLTR |
|
|
28 | ||
|
24 | Processing unit for BLTR parameters data (winds) | |
|
25 | ||
|
29 | 26 | Inputs: |
|
30 | 27 | self.dataOut.nmodes - Number of operation modes |
|
31 | 28 | self.dataOut.nchannels - Number of channels |
|
32 | 29 | self.dataOut.nranges - Number of ranges |
|
33 | ||
|
30 | ||
|
34 | 31 | self.dataOut.data_SNR - SNR array |
|
35 | 32 | self.dataOut.data_output - Zonal, Vertical and Meridional velocity array |
|
36 | 33 | self.dataOut.height - Height array (km) |
|
37 | 34 | self.dataOut.time - Time array (seconds) |
|
38 | ||
|
35 | ||
|
39 | 36 | self.dataOut.fileIndex -Index of the file currently read |
|
40 | 37 | self.dataOut.lat - Latitude coordinate of BLTR location |
|
41 | ||
|
38 | ||
|
42 | 39 | self.dataOut.doy - Experiment doy (number of the day in the current year) |
|
43 | 40 | self.dataOut.month - Experiment month |
|
44 | 41 | self.dataOut.day - Experiment day |
|
45 | 42 | self.dataOut.year - Experiment year |
|
46 | 43 | ''' |
|
47 | ||
|
48 |
def __init__(self, **kwargs): |
|
|
44 | ||
|
45 | def __init__(self, **kwargs): | |
|
49 | 46 | ''' |
|
50 | Inputs: None | |
|
51 | ||
|
47 | Inputs: None | |
|
52 | 48 | ''' |
|
53 | 49 | ProcessingUnit.__init__(self, **kwargs) |
|
54 | 50 | self.dataOut = Parameters() |
|
55 | 51 | |
|
56 | # Filters | |
|
57 | snr_val = None | |
|
58 | value = None | |
|
59 | svalue2 = None | |
|
60 | method = None | |
|
61 | factor = None | |
|
62 | filter = None | |
|
63 | npoints = None | |
|
64 | status_value = None | |
|
65 | width = None | |
|
66 | self.flagfirstmode = 0 | |
|
67 | ||
|
68 | def run (self): | |
|
52 | def run (self, mode): | |
|
53 | ''' | |
|
54 | ''' | |
|
69 | 55 | if self.dataIn.type == "Parameters": |
|
70 | 56 | self.dataOut.copy(self.dataIn) |
|
71 | 57 | |
|
58 | self.dataOut.data_output = self.dataOut.data_output[mode] | |
|
59 | self.dataOut.heightList = self.dataOut.height[mode] | |
|
72 | 60 | |
|
73 | 61 | def TimeSelect(self): |
|
74 | 62 | ''' |
|
75 | 63 | Selecting the time array according to the day of the experiment with a duration of 24 hours |
|
76 | 64 | ''' |
|
77 | 65 | |
|
78 | 66 | k1 = datetime.datetime(self.dataOut.year, self.dataOut.month, self.dataOut.day) - datetime.timedelta(hours=5) |
|
79 | 67 | k2 = datetime.datetime(self.dataOut.year, self.dataOut.month, self.dataOut.day) + datetime.timedelta(hours=25) - datetime.timedelta(hours=5) |
|
80 | 68 | limit_sec1 = time.mktime(k1.timetuple()) |
|
81 | 69 | limit_sec2 = time.mktime(k2.timetuple()) |
|
82 | 70 | valid_data = 0 |
|
83 | 71 | |
|
84 | 72 | doy = self.dataOut.doy |
|
85 | 73 | t1 = numpy.where(self.dataOut.time[0, :] >= limit_sec1) |
|
86 | 74 | t2 = numpy.where(self.dataOut.time[0, :] < limit_sec2) |
|
87 | 75 | time_select = [] |
|
88 | 76 | for val_sec in t1[0]: |
|
89 | 77 | if val_sec in t2[0]: |
|
90 | 78 | time_select.append(val_sec) |
|
91 | 79 | |
|
92 | 80 | time_select = numpy.array(time_select, dtype='int') |
|
93 | 81 | valid_data = valid_data + len(time_select) |
|
94 | 82 | |
|
95 | 83 | |
|
96 | 84 | if len(time_select) > 0: |
|
97 | 85 | self.f_timesec = self.dataOut.time[:, time_select] |
|
98 | 86 | snr = self.dataOut.data_SNR[time_select, :, :, :] |
|
99 | 87 | zon = self.dataOut.data_output[0][time_select, :, :] |
|
100 | 88 | mer = self.dataOut.data_output[1][time_select, :, :] |
|
101 | 89 | ver = self.dataOut.data_output[2][time_select, :, :] |
|
102 | 90 | |
|
103 | 91 | if valid_data > 0: |
|
104 | 92 | self.timesec1 = self.f_timesec[0, :] |
|
105 | 93 | self.f_height = self.dataOut.height |
|
106 | 94 | self.f_zon = zon |
|
107 | 95 | self.f_mer = mer |
|
108 | 96 | self.f_ver = ver |
|
109 | 97 | self.f_snr = snr |
|
110 | 98 | self.f_timedate = [] |
|
111 | 99 | self.f_time = [] |
|
112 | 100 | |
|
113 | 101 | for valuet in self.timesec1: |
|
114 | 102 | time_t = time.gmtime(valuet) |
|
115 | 103 | year = time_t.tm_year |
|
116 | 104 | month = time_t.tm_mon |
|
117 | 105 | day = time_t.tm_mday |
|
118 | 106 | hour = time_t.tm_hour |
|
119 | 107 | minute = time_t.tm_min |
|
120 | 108 | second = time_t.tm_sec |
|
121 | 109 | f_timedate_0 = datetime.datetime(year, month, day, hour, minute, second) |
|
122 | 110 | self.f_timedate.append(f_timedate_0) |
|
123 | 111 | |
|
124 | 112 | return self.f_timedate, self.f_timesec, self.f_height, self.f_zon, self.f_mer, self.f_ver, self.f_snr |
|
125 | 113 | |
|
126 | 114 | else: |
|
127 | 115 | self.f_timesec = None |
|
128 | 116 | self.f_timedate = None |
|
129 | 117 | self.f_height = None |
|
130 | 118 | self.f_zon = None |
|
131 | 119 | self.f_mer = None |
|
132 | 120 | self.f_ver = None |
|
133 | 121 | self.f_snr = None |
|
134 | 122 | print 'Invalid time' |
|
135 | 123 | |
|
136 | 124 | return self.f_timedate, self.f_height, self.f_zon, self.f_mer, self.f_ver, self.f_snr |
|
137 | 125 | |
|
138 | 126 | def SnrFilter(self, snr_val,modetofilter): |
|
139 | 127 | ''' |
|
140 | 128 | Inputs: snr_val - Threshold value |
|
141 | 129 | |
|
142 | 130 | ''' |
|
143 | 131 | if modetofilter!=2 and modetofilter!=1 : |
|
144 | 132 | raise ValueError,'Mode to filter should be "1" or "2". {} is not valid, check "Modetofilter" value.'.format(modetofilter) |
|
145 | 133 | m = modetofilter-1 |
|
146 | 134 | |
|
147 | 135 | print ' SNR filter [mode {}]: SNR <= {}: data_output = NA'.format(modetofilter,snr_val) |
|
148 | 136 | for k in range(self.dataOut.nchannels): |
|
149 | 137 | for r in range(self.dataOut.nranges): |
|
150 | 138 | if self.dataOut.data_SNR[r,k,m] <= snr_val: |
|
151 | 139 | self.dataOut.data_output[2][r,m] = numpy.nan |
|
152 | 140 | self.dataOut.data_output[1][r,m] = numpy.nan |
|
153 | 141 | self.dataOut.data_output[0][r,m] = numpy.nan |
|
154 | 142 | |
|
155 | 143 | |
|
156 | 144 | |
|
157 | 145 | def OutliersFilter(self,modetofilter,svalue,svalue2,method,factor,filter,npoints): |
|
158 | 146 | ''' |
|
159 | 147 | Inputs: |
|
160 | 148 | svalue - string to select array velocity |
|
161 | 149 | svalue2 - string to choose axis filtering |
|
162 | 150 | method - 0 for SMOOTH or 1 for MEDIAN |
|
163 | 151 | factor - number used to set threshold |
|
164 | 152 | filter - 1 for data filtering using the standard deviation criteria else 0 |
|
165 | 153 | npoints - number of points for mask filter |
|
166 | 154 | |
|
167 | 155 | ''' |
|
168 | 156 | if modetofilter!=2 and modetofilter!=1 : |
|
169 | 157 | raise ValueError,'Mode to filter should be "1" or "2". {} is not valid, check "Modetofilter" value.'.format(modetofilter) |
|
170 | 158 | |
|
171 | 159 | m = modetofilter-1 |
|
172 | 160 | |
|
173 | 161 | print ' Outliers Filter [mode {}]: {} {} / threshold = {}'.format(modetofilter,svalue,svalue,factor) |
|
174 | 162 | |
|
175 | 163 | npoints = 9 |
|
176 | 164 | novalid = 0.1 |
|
177 | 165 | if svalue == 'zonal': |
|
178 | 166 | value = self.dataOut.data_output[0] |
|
179 | 167 | |
|
180 | 168 | elif svalue == 'meridional': |
|
181 | 169 | value = self.dataOut.data_output[1] |
|
182 | 170 | |
|
183 | 171 | elif svalue == 'vertical': |
|
184 | 172 | value = self.dataOut.data_output[2] |
|
185 | 173 | |
|
186 | 174 | else: |
|
187 | 175 | print 'value is not defined' |
|
188 | 176 | return |
|
189 | 177 | |
|
190 | 178 | if svalue2 == 'inTime': |
|
191 | 179 | yaxis = self.dataOut.height |
|
192 | 180 | xaxis = numpy.array([[self.dataOut.time1],[self.dataOut.time1]]) |
|
193 | 181 | |
|
194 | 182 | elif svalue2 == 'inHeight': |
|
195 | 183 | yaxis = numpy.array([[self.dataOut.time1],[self.dataOut.time1]]) |
|
196 | 184 | xaxis = self.dataOut.height |
|
197 | 185 | |
|
198 | 186 | else: |
|
199 | 187 | print 'svalue2 is required, either inHeight or inTime' |
|
200 | 188 | return |
|
201 | 189 | |
|
202 | 190 | output_array = value |
|
203 | 191 | |
|
204 | 192 | value_temp = value[:,m] |
|
205 | 193 | error = numpy.zeros(len(self.dataOut.time[m,:])) |
|
206 | 194 | if svalue2 == 'inHeight': |
|
207 | 195 | value_temp = numpy.transpose(value_temp) |
|
208 | 196 | error = numpy.zeros(len(self.dataOut.height)) |
|
209 | 197 | |
|
210 | 198 | htemp = yaxis[m,:] |
|
211 | 199 | std = value_temp |
|
212 | 200 | for h in range(len(htemp)): |
|
213 | 201 | if filter: #standard deviation filtering |
|
214 | 202 | std[h] = numpy.std(value_temp[h],ddof = npoints) |
|
215 | 203 | value_temp[numpy.where(std[h] > 5),h] = numpy.nan |
|
216 | 204 | error[numpy.where(std[h] > 5)] = error[numpy.where(std[h] > 5)] + 1 |
|
217 | 205 | |
|
218 | 206 | |
|
219 | 207 | nvalues_valid = len(numpy.where(numpy.isfinite(value_temp[h]))[0]) |
|
220 | 208 | minvalid = novalid*len(xaxis[m,:]) |
|
221 | 209 | if minvalid <= npoints: |
|
222 | 210 | minvalid = npoints |
|
223 | 211 | |
|
224 | 212 | #only if valid values greater than the minimum required (10%) |
|
225 | 213 | if nvalues_valid > minvalid: |
|
226 | 214 | |
|
227 | 215 | if method == 0: |
|
228 | 216 | #SMOOTH |
|
229 | 217 | w = value_temp[h] - self.Smooth(input=value_temp[h], width=npoints, edge_truncate=1) |
|
230 | 218 | |
|
231 | 219 | |
|
232 | 220 | if method == 1: |
|
233 | 221 | #MEDIAN |
|
234 | 222 | w = value_temp[h] - self.Median(input=value_temp[h], width = npoints) |
|
235 | 223 | |
|
236 | 224 | dw = numpy.std(w[numpy.where(numpy.isfinite(w))],ddof = 1) |
|
237 | 225 | |
|
238 | 226 | threshold = dw*factor |
|
239 | 227 | value_temp[numpy.where(w > threshold),h] = numpy.nan |
|
240 | 228 | value_temp[numpy.where(w < -1*threshold),h] = numpy.nan |
|
241 | 229 | |
|
242 | 230 | |
|
243 | 231 | #At the end |
|
244 | 232 | if svalue2 == 'inHeight': |
|
245 | 233 | value_temp = numpy.transpose(value_temp) |
|
246 | 234 | output_array[:,m] = value_temp |
|
247 | 235 | |
|
248 | 236 | if svalue == 'zonal': |
|
249 | 237 | self.dataOut.data_output[0] = output_array |
|
250 | 238 | |
|
251 | 239 | elif svalue == 'meridional': |
|
252 | 240 | self.dataOut.data_output[1] = output_array |
|
253 | 241 | |
|
254 | 242 | elif svalue == 'vertical': |
|
255 | 243 | self.dataOut.data_output[2] = output_array |
|
256 | 244 | |
|
257 | 245 | return self.dataOut.data_output |
|
258 | 246 | |
|
259 | 247 | |
|
260 | 248 | def Median(self,input,width): |
|
261 | 249 | ''' |
|
262 | 250 | Inputs: |
|
263 | 251 | input - Velocity array |
|
264 | 252 | width - Number of points for mask filter |
|
265 | 253 | |
|
266 | 254 | ''' |
|
267 | 255 | |
|
268 | 256 | if numpy.mod(width,2) == 1: |
|
269 | 257 | pc = int((width - 1) / 2) |
|
270 | 258 | cont = 0 |
|
271 | 259 | output = [] |
|
272 | 260 | |
|
273 | 261 | for i in range(len(input)): |
|
274 | 262 | if i >= pc and i < len(input) - pc: |
|
275 | 263 | new2 = input[i-pc:i+pc+1] |
|
276 | 264 | temp = numpy.where(numpy.isfinite(new2)) |
|
277 | 265 | new = new2[temp] |
|
278 | 266 | value = numpy.median(new) |
|
279 | 267 | output.append(value) |
|
280 | 268 | |
|
281 | 269 | output = numpy.array(output) |
|
282 | 270 | output = numpy.hstack((input[0:pc],output)) |
|
283 | 271 | output = numpy.hstack((output,input[-pc:len(input)])) |
|
284 | 272 | |
|
285 | 273 | return output |
|
286 | 274 | |
|
287 | 275 | def Smooth(self,input,width,edge_truncate = None): |
|
288 | 276 | ''' |
|
289 | 277 | Inputs: |
|
290 | 278 | input - Velocity array |
|
291 | 279 | width - Number of points for mask filter |
|
292 | 280 | edge_truncate - 1 for truncate the convolution product else |
|
293 | 281 | |
|
294 | 282 | ''' |
|
295 | 283 | |
|
296 | 284 | if numpy.mod(width,2) == 0: |
|
297 | 285 | real_width = width + 1 |
|
298 | 286 | nzeros = width / 2 |
|
299 | 287 | else: |
|
300 | 288 | real_width = width |
|
301 | 289 | nzeros = (width - 1) / 2 |
|
302 | 290 | |
|
303 | 291 | half_width = int(real_width)/2 |
|
304 | 292 | length = len(input) |
|
305 | 293 | |
|
306 | 294 | gate = numpy.ones(real_width,dtype='float') |
|
307 | 295 | norm_of_gate = numpy.sum(gate) |
|
308 | 296 | |
|
309 | 297 | nan_process = 0 |
|
310 | 298 | nan_id = numpy.where(numpy.isnan(input)) |
|
311 | 299 | if len(nan_id[0]) > 0: |
|
312 | 300 | nan_process = 1 |
|
313 | 301 | pb = numpy.zeros(len(input)) |
|
314 | 302 | pb[nan_id] = 1. |
|
315 | 303 | input[nan_id] = 0. |
|
316 | 304 | |
|
317 | 305 | if edge_truncate == True: |
|
318 | 306 | output = numpy.convolve(input/norm_of_gate,gate,mode='same') |
|
319 | 307 | elif edge_truncate == False or edge_truncate == None: |
|
320 | 308 | output = numpy.convolve(input/norm_of_gate,gate,mode='valid') |
|
321 | 309 | output = numpy.hstack((input[0:half_width],output)) |
|
322 | 310 | output = numpy.hstack((output,input[len(input)-half_width:len(input)])) |
|
323 | 311 | |
|
324 | 312 | if nan_process: |
|
325 | 313 | pb = numpy.convolve(pb/norm_of_gate,gate,mode='valid') |
|
326 | 314 | pb = numpy.hstack((numpy.zeros(half_width),pb)) |
|
327 | 315 | pb = numpy.hstack((pb,numpy.zeros(half_width))) |
|
328 | 316 | output[numpy.where(pb > 0.9999)] = numpy.nan |
|
329 | 317 | input[nan_id] = numpy.nan |
|
330 | 318 | return output |
|
331 | 319 | |
|
332 | 320 | def Average(self,aver=0,nhaver=1): |
|
333 | 321 | ''' |
|
334 | 322 | Inputs: |
|
335 | 323 | aver - Indicates the time period over which is averaged or consensus data |
|
336 | 324 | nhaver - Indicates the decimation factor in heights |
|
337 | 325 | |
|
338 | 326 | ''' |
|
339 | 327 | nhpoints = 48 |
|
340 | 328 | |
|
341 | 329 | lat_piura = -5.17 |
|
342 | 330 | lat_huancayo = -12.04 |
|
343 | 331 | lat_porcuya = -5.8 |
|
344 | 332 | |
|
345 | 333 | if '%2.2f'%self.dataOut.lat == '%2.2f'%lat_piura: |
|
346 | 334 | hcm = 3. |
|
347 | 335 | if self.dataOut.year == 2003 : |
|
348 | 336 | if self.dataOut.doy >= 25 and self.dataOut.doy < 64: |
|
349 | 337 | nhpoints = 12 |
|
350 | 338 | |
|
351 | 339 | elif '%2.2f'%self.dataOut.lat == '%2.2f'%lat_huancayo: |
|
352 | 340 | hcm = 3. |
|
353 | 341 | if self.dataOut.year == 2003 : |
|
354 | 342 | if self.dataOut.doy >= 25 and self.dataOut.doy < 64: |
|
355 | 343 | nhpoints = 12 |
|
356 | 344 | |
|
357 | 345 | |
|
358 | 346 | elif '%2.2f'%self.dataOut.lat == '%2.2f'%lat_porcuya: |
|
359 | 347 | hcm = 5.#2 |
|
360 | 348 | |
|
361 | 349 | pdata = 0.2 |
|
362 | 350 | taver = [1,2,3,4,6,8,12,24] |
|
363 | 351 | t0 = 0 |
|
364 | 352 | tf = 24 |
|
365 | 353 | ntime =(tf-t0)/taver[aver] |
|
366 | 354 | ti = numpy.arange(ntime) |
|
367 | 355 | tf = numpy.arange(ntime) + taver[aver] |
|
368 | 356 | |
|
369 | 357 | |
|
370 | 358 | old_height = self.dataOut.heightList |
|
371 | 359 | |
|
372 | 360 | if nhaver > 1: |
|
373 | 361 | num_hei = len(self.dataOut.heightList)/nhaver/self.dataOut.nmodes |
|
374 | 362 | deltha = 0.05*nhaver |
|
375 | 363 | minhvalid = pdata*nhaver |
|
376 | 364 | for im in range(self.dataOut.nmodes): |
|
377 | 365 | new_height = numpy.arange(num_hei)*deltha + self.dataOut.height[im,0] + deltha/2. |
|
378 | 366 | |
|
379 | 367 | |
|
380 | 368 | data_fHeigths_List = [] |
|
381 | 369 | data_fZonal_List = [] |
|
382 | 370 | data_fMeridional_List = [] |
|
383 | 371 | data_fVertical_List = [] |
|
384 | 372 | startDTList = [] |
|
385 | 373 | |
|
386 | 374 | |
|
387 | 375 | for i in range(ntime): |
|
388 | 376 | height = old_height |
|
389 | 377 | |
|
390 | 378 | start = datetime.datetime(self.dataOut.year,self.dataOut.month,self.dataOut.day) + datetime.timedelta(hours = int(ti[i])) - datetime.timedelta(hours = 5) |
|
391 | 379 | stop = datetime.datetime(self.dataOut.year,self.dataOut.month,self.dataOut.day) + datetime.timedelta(hours = int(tf[i])) - datetime.timedelta(hours = 5) |
|
392 | 380 | |
|
393 | 381 | |
|
394 | 382 | limit_sec1 = time.mktime(start.timetuple()) |
|
395 | 383 | limit_sec2 = time.mktime(stop.timetuple()) |
|
396 | 384 | |
|
397 | 385 | t1 = numpy.where(self.f_timesec >= limit_sec1) |
|
398 | 386 | t2 = numpy.where(self.f_timesec < limit_sec2) |
|
399 | 387 | time_select = [] |
|
400 | 388 | for val_sec in t1[0]: |
|
401 | 389 | if val_sec in t2[0]: |
|
402 | 390 | time_select.append(val_sec) |
|
403 | 391 | |
|
404 | 392 | |
|
405 | 393 | time_select = numpy.array(time_select,dtype = 'int') |
|
406 | 394 | minvalid = numpy.ceil(pdata*nhpoints) |
|
407 | 395 | |
|
408 | 396 | zon_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan |
|
409 | 397 | mer_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan |
|
410 | 398 | ver_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan |
|
411 | 399 | |
|
412 | 400 | if nhaver > 1: |
|
413 | 401 | new_zon_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan |
|
414 | 402 | new_mer_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan |
|
415 | 403 | new_ver_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan |
|
416 | 404 | |
|
417 | 405 | if len(time_select) > minvalid: |
|
418 | 406 | time_average = self.f_timesec[time_select] |
|
419 | 407 | |
|
420 | 408 | for im in range(self.dataOut.nmodes): |
|
421 | 409 | |
|
422 | 410 | for ih in range(self.dataOut.nranges): |
|
423 | 411 | if numpy.sum(numpy.isfinite(self.f_zon[time_select,ih,im])) >= minvalid: |
|
424 | 412 | zon_aver[ih,im] = numpy.nansum(self.f_zon[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_zon[time_select,ih,im])) |
|
425 | 413 | |
|
426 | 414 | if numpy.sum(numpy.isfinite(self.f_mer[time_select,ih,im])) >= minvalid: |
|
427 | 415 | mer_aver[ih,im] = numpy.nansum(self.f_mer[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_mer[time_select,ih,im])) |
|
428 | 416 | |
|
429 | 417 | if numpy.sum(numpy.isfinite(self.f_ver[time_select,ih,im])) >= minvalid: |
|
430 | 418 | ver_aver[ih,im] = numpy.nansum(self.f_ver[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_ver[time_select,ih,im])) |
|
431 | 419 | |
|
432 | 420 | if nhaver > 1: |
|
433 | 421 | for ih in range(num_hei): |
|
434 | 422 | hvalid = numpy.arange(nhaver) + nhaver*ih |
|
435 | 423 | |
|
436 | 424 | if numpy.sum(numpy.isfinite(zon_aver[hvalid,im])) >= minvalid: |
|
437 | 425 | new_zon_aver[ih,im] = numpy.nansum(zon_aver[hvalid,im]) / numpy.sum(numpy.isfinite(zon_aver[hvalid,im])) |
|
438 | 426 | |
|
439 | 427 | if numpy.sum(numpy.isfinite(mer_aver[hvalid,im])) >= minvalid: |
|
440 | 428 | new_mer_aver[ih,im] = numpy.nansum(mer_aver[hvalid,im]) / numpy.sum(numpy.isfinite(mer_aver[hvalid,im])) |
|
441 | 429 | |
|
442 | 430 | if numpy.sum(numpy.isfinite(ver_aver[hvalid,im])) >= minvalid: |
|
443 | 431 | new_ver_aver[ih,im] = numpy.nansum(ver_aver[hvalid,im]) / numpy.sum(numpy.isfinite(ver_aver[hvalid,im])) |
|
444 | 432 | if nhaver > 1: |
|
445 | 433 | zon_aver = new_zon_aver |
|
446 | 434 | mer_aver = new_mer_aver |
|
447 | 435 | ver_aver = new_ver_aver |
|
448 | 436 | height = new_height |
|
449 | 437 | |
|
450 | 438 | |
|
451 | 439 | tstart = time_average[0] |
|
452 | 440 | tend = time_average[-1] |
|
453 | 441 | startTime = time.gmtime(tstart) |
|
454 | 442 | |
|
455 | 443 | year = startTime.tm_year |
|
456 | 444 | month = startTime.tm_mon |
|
457 | 445 | day = startTime.tm_mday |
|
458 | 446 | hour = startTime.tm_hour |
|
459 | 447 | minute = startTime.tm_min |
|
460 | 448 | second = startTime.tm_sec |
|
461 | 449 | |
|
462 | 450 | startDTList.append(datetime.datetime(year,month,day,hour,minute,second)) |
|
463 | 451 | |
|
464 | 452 | |
|
465 | 453 | o_height = numpy.array([]) |
|
466 | 454 | o_zon_aver = numpy.array([]) |
|
467 | 455 | o_mer_aver = numpy.array([]) |
|
468 | 456 | o_ver_aver = numpy.array([]) |
|
469 | 457 | if self.dataOut.nmodes > 1: |
|
470 | 458 | for im in range(self.dataOut.nmodes): |
|
471 | 459 | |
|
472 | 460 | if im == 0: |
|
473 | 461 | h_select = numpy.where(numpy.bitwise_and(height[0,:] >=0,height[0,:] <= hcm,numpy.isfinite(height[0,:]))) |
|
474 | 462 | else: |
|
475 | 463 | h_select = numpy.where(numpy.bitwise_and(height[1,:] > hcm,height[1,:] < 20,numpy.isfinite(height[1,:]))) |
|
476 | 464 | |
|
477 | 465 | |
|
478 | 466 | ht = h_select[0] |
|
479 | 467 | |
|
480 | 468 | o_height = numpy.hstack((o_height,height[im,ht])) |
|
481 | 469 | o_zon_aver = numpy.hstack((o_zon_aver,zon_aver[ht,im])) |
|
482 | 470 | o_mer_aver = numpy.hstack((o_mer_aver,mer_aver[ht,im])) |
|
483 | 471 | o_ver_aver = numpy.hstack((o_ver_aver,ver_aver[ht,im])) |
|
484 | 472 | |
|
485 | 473 | data_fHeigths_List.append(o_height) |
|
486 | 474 | data_fZonal_List.append(o_zon_aver) |
|
487 | 475 | data_fMeridional_List.append(o_mer_aver) |
|
488 | 476 | data_fVertical_List.append(o_ver_aver) |
|
489 | 477 | |
|
490 | 478 | |
|
491 | 479 | else: |
|
492 | 480 | h_select = numpy.where(numpy.bitwise_and(height[0,:] <= hcm,numpy.isfinite(height[0,:]))) |
|
493 | 481 | ht = h_select[0] |
|
494 | 482 | o_height = numpy.hstack((o_height,height[im,ht])) |
|
495 | 483 | o_zon_aver = numpy.hstack((o_zon_aver,zon_aver[ht,im])) |
|
496 | 484 | o_mer_aver = numpy.hstack((o_mer_aver,mer_aver[ht,im])) |
|
497 | 485 | o_ver_aver = numpy.hstack((o_ver_aver,ver_aver[ht,im])) |
|
498 | 486 | |
|
499 | 487 | data_fHeigths_List.append(o_height) |
|
500 | 488 | data_fZonal_List.append(o_zon_aver) |
|
501 | 489 | data_fMeridional_List.append(o_mer_aver) |
|
502 | 490 | data_fVertical_List.append(o_ver_aver) |
|
503 | 491 | |
|
504 | 492 | |
|
505 | 493 | return startDTList, data_fHeigths_List, data_fZonal_List, data_fMeridional_List, data_fVertical_List |
|
506 | 494 | |
|
507 | 495 | |
|
508 | 496 | def prePlot(self,modeselect=None): |
|
509 | 497 | |
|
510 | 498 | ''' |
|
511 | 499 | Inputs: |
|
512 | 500 | |
|
513 | 501 | self.dataOut.data_output - Zonal, Meridional and Vertical velocity array |
|
514 | 502 | self.dataOut.height - height array |
|
515 | 503 | self.dataOut.time - Time array (seconds) |
|
516 | 504 | self.dataOut.data_SNR - SNR array |
|
517 | 505 | |
|
518 | 506 | ''' |
|
519 | 507 | |
|
520 | 508 | m = modeselect -1 |
|
521 | 509 | |
|
522 | 510 | print ' [Plotting mode {}]'.format(modeselect) |
|
523 | 511 | if not (m ==1 or m==0): |
|
524 | 512 | raise IndexError("'Mode' must be egual to : 1 or 2") |
|
525 | 513 | # |
|
526 | 514 | if self.flagfirstmode==0: |
|
527 | 515 | #copy of the data |
|
528 | 516 | self.data_output_copy = self.dataOut.data_output.copy() |
|
529 | 517 | self.data_height_copy = self.dataOut.height.copy() |
|
530 | 518 | self.data_time_copy = self.dataOut.time.copy() |
|
531 | 519 | self.data_SNR_copy = self.dataOut.data_SNR.copy() |
|
532 | 520 | self.flagfirstmode = 1 |
|
533 | 521 | |
|
534 | 522 | else: |
|
535 | 523 | self.dataOut.data_output = self.data_output_copy |
|
536 | 524 | self.dataOut.height = self.data_height_copy |
|
537 | 525 | self.dataOut.time = self.data_time_copy |
|
538 | 526 | self.dataOut.data_SNR = self.data_SNR_copy |
|
539 | 527 | self.flagfirstmode = 0 |
|
540 | 528 | |
|
541 | 529 | |
|
542 | 530 | #select data for mode m |
|
543 | 531 | #self.dataOut.data_output = self.dataOut.data_output[:,:,m] |
|
544 | 532 | self.dataOut.heightList = self.dataOut.height[0,:] |
|
545 | 533 | |
|
546 | 534 | data_SNR = self.dataOut.data_SNR[:,:,m] |
|
547 | 535 | self.dataOut.data_SNR= transpose(data_SNR) |
|
548 | 536 | |
|
549 | 537 | if m==1 and self.dataOut.counter_records%2==0: |
|
550 | 538 | print '*********' |
|
551 | 539 | print 'MODO 2' |
|
552 | 540 | #print 'Zonal', self.dataOut.data_output[0] |
|
553 | 541 | #print 'Meridional', self.dataOut.data_output[1] |
|
554 | 542 | #print 'Vertical', self.dataOut.data_output[2] |
|
555 | 543 | |
|
556 | 544 | print '*********' |
|
557 | 545 | |
|
558 | 546 | Vx=self.dataOut.data_output[0,:,m] |
|
559 | 547 | Vy=self.dataOut.data_output[1,:,m] |
|
560 | 548 | |
|
561 | 549 | Vmag=numpy.sqrt(Vx**2+Vy**2) |
|
562 | 550 | Vang=numpy.arctan2(Vy,Vx) |
|
563 | 551 | #print 'Vmag', Vmag |
|
564 | 552 | #print 'Vang', Vang |
|
565 | 553 | |
|
566 | 554 | self.dataOut.data_output[0,:,m]=Vmag |
|
567 | 555 | self.dataOut.data_output[1,:,m]=Vang |
|
568 | 556 | |
|
569 | 557 | prin= self.dataOut.data_output[0,:,m][~numpy.isnan(self.dataOut.data_output[0,:,m])] |
|
570 | 558 | print ' ' |
|
571 | 559 | print 'VmagAverage',numpy.mean(prin) |
|
572 | 560 | print ' ' |
|
573 | 561 | self.dataOut.data_output = self.dataOut.data_output[:,:,m] |
|
574 | 562 | |
|
575 | 563 | |
|
576 | 564 | No newline at end of file |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: modified file, binary diff hidden |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed | |
This diff has been collapsed as it changes many lines, (950 lines changed) Show them Hide them |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
|
1 | NO CONTENT: file was removed |
General Comments 0
You need to be logged in to leave comments.
Login now