@@ -0,0 +1,362 | |||||
|
1 | ''' | |||
|
2 | Created on Nov 9, 2016 | |||
|
3 | ||||
|
4 | @author: roj- LouVD | |||
|
5 | ''' | |||
|
6 | ||||
|
7 | ||||
|
8 | import os | |||
|
9 | import sys | |||
|
10 | import time | |||
|
11 | import glob | |||
|
12 | import datetime | |||
|
13 | ||||
|
14 | import numpy | |||
|
15 | ||||
|
16 | from schainpy.model.proc.jroproc_base import ProcessingUnit | |||
|
17 | from schainpy.model.data.jrodata import Parameters | |||
|
18 | from schainpy.model.io.jroIO_base import JRODataReader, isNumber | |||
|
19 | ||||
|
20 | FILE_HEADER_STRUCTURE = numpy.dtype([ | |||
|
21 | ('FMN', '<u4'), | |||
|
22 | ('nrec', '<u4'), | |||
|
23 | ('fr_offset', '<u4'), | |||
|
24 | ('id', '<u4'), | |||
|
25 | ('site', 'u1', (32,)) | |||
|
26 | ]) | |||
|
27 | ||||
|
28 | REC_HEADER_STRUCTURE = numpy.dtype([ | |||
|
29 | ('rmn', '<u4'), | |||
|
30 | ('rcounter', '<u4'), | |||
|
31 | ('nr_offset', '<u4'), | |||
|
32 | ('tr_offset', '<u4'), | |||
|
33 | ('time', '<u4'), | |||
|
34 | ('time_msec', '<u4'), | |||
|
35 | ('tag', 'u1', (32,)), | |||
|
36 | ('comments', 'u1', (32,)), | |||
|
37 | ('lat', '<f4'), | |||
|
38 | ('lon', '<f4'), | |||
|
39 | ('gps_status', '<u4'), | |||
|
40 | ('freq', '<u4'), | |||
|
41 | ('freq0', '<u4'), | |||
|
42 | ('nchan', '<u4'), | |||
|
43 | ('delta_r', '<u4'), | |||
|
44 | ('nranges', '<u4'), | |||
|
45 | ('r0', '<u4'), | |||
|
46 | ('prf', '<u4'), | |||
|
47 | ('ncoh', '<u4'), | |||
|
48 | ('npoints', '<u4'), | |||
|
49 | ('polarization', '<i4'), | |||
|
50 | ('rx_filter', '<u4'), | |||
|
51 | ('nmodes', '<u4'), | |||
|
52 | ('dmode_index', '<u4'), | |||
|
53 | ('dmode_rngcorr', '<u4'), | |||
|
54 | ('nrxs', '<u4'), | |||
|
55 | ('acf_length', '<u4'), | |||
|
56 | ('acf_lags', '<u4'), | |||
|
57 | ('sea_to_atmos', '<f4'), | |||
|
58 | ('sea_notch', '<u4'), | |||
|
59 | ('lh_sea', '<u4'), | |||
|
60 | ('hh_sea', '<u4'), | |||
|
61 | ('nbins_sea', '<u4'), | |||
|
62 | ('min_snr', '<f4'), | |||
|
63 | ('min_cc', '<f4'), | |||
|
64 | ('max_time_diff', '<f4') | |||
|
65 | ]) | |||
|
66 | ||||
|
67 | DATA_STRUCTURE = numpy.dtype([ | |||
|
68 | ('range', '<u4'), | |||
|
69 | ('status', '<u4'), | |||
|
70 | ('zonal', '<f4'), | |||
|
71 | ('meridional', '<f4'), | |||
|
72 | ('vertical', '<f4'), | |||
|
73 | ('zonal_a', '<f4'), | |||
|
74 | ('meridional_a', '<f4'), | |||
|
75 | ('corrected_fading', '<f4'), # seconds | |||
|
76 | ('uncorrected_fading', '<f4'), # seconds | |||
|
77 | ('time_diff', '<f4'), | |||
|
78 | ('major_axis', '<f4'), | |||
|
79 | ('axial_ratio', '<f4'), | |||
|
80 | ('orientation', '<f4'), | |||
|
81 | ('sea_power', '<u4'), | |||
|
82 | ('sea_algorithm', '<u4') | |||
|
83 | ]) | |||
|
84 | ||||
|
85 | class BLTRParamReader(JRODataReader, ProcessingUnit): | |||
|
86 | ''' | |||
|
87 | Boundary Layer and Tropospheric Radar (BLTR) reader, Wind velocities and SNR from *.sswma files | |||
|
88 | ''' | |||
|
89 | ||||
|
90 | ext = '.sswma' | |||
|
91 | ||||
|
92 | def __init__(self, **kwargs): | |||
|
93 | ||||
|
94 | ProcessingUnit.__init__(self , **kwargs) | |||
|
95 | ||||
|
96 | self.dataOut = Parameters() | |||
|
97 | self.counter_records = 0 | |||
|
98 | self.flagNoMoreFiles = 0 | |||
|
99 | self.isConfig = False | |||
|
100 | self.filename = None | |||
|
101 | ||||
|
102 | def setup(self, | |||
|
103 | path=None, | |||
|
104 | startDate=None, | |||
|
105 | endDate=None, | |||
|
106 | ext=None, | |||
|
107 | startTime=datetime.time(0, 0, 0), | |||
|
108 | endTime=datetime.time(23, 59, 59), | |||
|
109 | timezone=0, | |||
|
110 | status_value=0, | |||
|
111 | **kwargs): | |||
|
112 | ||||
|
113 | self.path = path | |||
|
114 | self.startTime = startTime | |||
|
115 | self.endTime = endTime | |||
|
116 | self.status_value = status_value | |||
|
117 | ||||
|
118 | if self.path is None: | |||
|
119 | raise ValueError, "The path is not valid" | |||
|
120 | ||||
|
121 | if ext is None: | |||
|
122 | ext = self.ext | |||
|
123 | ||||
|
124 | self.search_files(self.path, startDate, endDate, ext) | |||
|
125 | self.timezone = timezone | |||
|
126 | self.fileIndex = 0 | |||
|
127 | ||||
|
128 | if not self.fileList: | |||
|
129 | raise Warning, "There is no files matching these date in the folder: %s. \n Check 'startDate' and 'endDate' "%(path) | |||
|
130 | ||||
|
131 | self.setNextFile() | |||
|
132 | ||||
|
133 | def search_files(self, path, startDate, endDate, ext): | |||
|
134 | ''' | |||
|
135 | Searching for BLTR rawdata file in path | |||
|
136 | Creating a list of file to proces included in [startDate,endDate] | |||
|
137 | ||||
|
138 | Input: | |||
|
139 | path - Path to find BLTR rawdata files | |||
|
140 | startDate - Select file from this date | |||
|
141 | enDate - Select file until this date | |||
|
142 | ext - Extension of the file to read | |||
|
143 | ||||
|
144 | ''' | |||
|
145 | ||||
|
146 | print 'Searching file in %s ' % (path) | |||
|
147 | foldercounter = 0 | |||
|
148 | fileList0 = glob.glob1(path, "*%s" % ext) | |||
|
149 | fileList0.sort() | |||
|
150 | ||||
|
151 | self.fileList = [] | |||
|
152 | self.dateFileList = [] | |||
|
153 | ||||
|
154 | for thisFile in fileList0: | |||
|
155 | year = thisFile[-14:-10] | |||
|
156 | if not isNumber(year): | |||
|
157 | continue | |||
|
158 | ||||
|
159 | month = thisFile[-10:-8] | |||
|
160 | if not isNumber(month): | |||
|
161 | continue | |||
|
162 | ||||
|
163 | day = thisFile[-8:-6] | |||
|
164 | if not isNumber(day): | |||
|
165 | continue | |||
|
166 | ||||
|
167 | year, month, day = int(year), int(month), int(day) | |||
|
168 | dateFile = datetime.date(year, month, day) | |||
|
169 | ||||
|
170 | if (startDate > dateFile) or (endDate < dateFile): | |||
|
171 | continue | |||
|
172 | ||||
|
173 | self.fileList.append(thisFile) | |||
|
174 | self.dateFileList.append(dateFile) | |||
|
175 | ||||
|
176 | return | |||
|
177 | ||||
|
178 | def setNextFile(self): | |||
|
179 | ||||
|
180 | file_id = self.fileIndex | |||
|
181 | ||||
|
182 | if file_id == len(self.fileList): | |||
|
183 | print '\nNo more files in the folder' | |||
|
184 | print 'Total number of file(s) read : {}'.format(self.fileIndex + 1) | |||
|
185 | self.flagNoMoreFiles = 1 | |||
|
186 | return 0 | |||
|
187 | ||||
|
188 | print '\n[Setting file] (%s) ...' % self.fileList[file_id] | |||
|
189 | filename = os.path.join(self.path, self.fileList[file_id]) | |||
|
190 | ||||
|
191 | dirname, name = os.path.split(filename) | |||
|
192 | self.siteFile = name.split('.')[0] # 'peru2' ---> Piura - 'peru1' ---> Huancayo or Porcuya | |||
|
193 | if self.filename is not None: | |||
|
194 | self.fp.close() | |||
|
195 | self.filename = filename | |||
|
196 | self.fp = open(self.filename, 'rb') | |||
|
197 | self.header_file = numpy.fromfile(self.fp, FILE_HEADER_STRUCTURE, 1) | |||
|
198 | self.nrecords = self.header_file['nrec'][0] | |||
|
199 | self.sizeOfFile = os.path.getsize(self.filename) | |||
|
200 | self.counter_records = 0 | |||
|
201 | self.flagIsNewFile = 0 | |||
|
202 | self.fileIndex += 1 | |||
|
203 | ||||
|
204 | return 1 | |||
|
205 | ||||
|
206 | def readNextBlock(self): | |||
|
207 | ||||
|
208 | while True: | |||
|
209 | if self.counter_records == self.nrecords: | |||
|
210 | self.flagIsNewFile = 1 | |||
|
211 | if not self.setNextFile(): | |||
|
212 | return 0 | |||
|
213 | ||||
|
214 | self.readBlock() | |||
|
215 | ||||
|
216 | if (self.datatime.time() < self.startTime) or (self.datatime.time() > self.endTime): | |||
|
217 | print "[Reading] Record No. %d/%d -> %s [Skipping]" %( | |||
|
218 | self.counter_records, | |||
|
219 | self.nrecords, | |||
|
220 | self.datatime.ctime()) | |||
|
221 | continue | |||
|
222 | break | |||
|
223 | ||||
|
224 | print "[Reading] Record No. %d/%d -> %s" %( | |||
|
225 | self.counter_records, | |||
|
226 | self.nrecords, | |||
|
227 | self.datatime.ctime()) | |||
|
228 | ||||
|
229 | return 1 | |||
|
230 | ||||
|
231 | def readBlock(self): | |||
|
232 | ||||
|
233 | pointer = self.fp.tell() | |||
|
234 | header_rec = numpy.fromfile(self.fp, REC_HEADER_STRUCTURE, 1) | |||
|
235 | self.nchannels = header_rec['nchan'][0]/2 | |||
|
236 | self.kchan = header_rec['nrxs'][0] | |||
|
237 | self.nmodes = header_rec['nmodes'][0] | |||
|
238 | self.nranges = header_rec['nranges'][0] | |||
|
239 | self.fp.seek(pointer) | |||
|
240 | self.height = numpy.empty((self.nmodes, self.nranges)) | |||
|
241 | self.snr = numpy.empty((self.nmodes, self.nchannels, self.nranges)) | |||
|
242 | self.buffer = numpy.empty((self.nmodes, 3, self.nranges)) | |||
|
243 | ||||
|
244 | for mode in range(self.nmodes): | |||
|
245 | self.readHeader() | |||
|
246 | data = self.readData() | |||
|
247 | self.height[mode] = (data[0] - self.correction) / 1000. | |||
|
248 | self.buffer[mode] = data[1] | |||
|
249 | self.snr[mode] = data[2] | |||
|
250 | ||||
|
251 | self.counter_records = self.counter_records + self.nmodes | |||
|
252 | ||||
|
253 | return | |||
|
254 | ||||
|
255 | def readHeader(self): | |||
|
256 | ''' | |||
|
257 | RecordHeader of BLTR rawdata file | |||
|
258 | ''' | |||
|
259 | ||||
|
260 | header_structure = numpy.dtype( | |||
|
261 | REC_HEADER_STRUCTURE.descr + [ | |||
|
262 | ('antenna_coord', 'f4', (2, self.nchannels)), | |||
|
263 | ('rx_gains', 'u4', (self.nchannels,)), | |||
|
264 | ('rx_analysis', 'u4', (self.nchannels,)) | |||
|
265 | ] | |||
|
266 | ) | |||
|
267 | ||||
|
268 | self.header_rec = numpy.fromfile(self.fp, header_structure, 1) | |||
|
269 | self.lat = self.header_rec['lat'][0] | |||
|
270 | self.lon = self.header_rec['lon'][0] | |||
|
271 | self.delta = self.header_rec['delta_r'][0] | |||
|
272 | self.correction = self.header_rec['dmode_rngcorr'][0] | |||
|
273 | self.imode = self.header_rec['dmode_index'][0] | |||
|
274 | self.antenna = self.header_rec['antenna_coord'] | |||
|
275 | self.rx_gains = self.header_rec['rx_gains'] | |||
|
276 | self.time = self.header_rec['time'][0] | |||
|
277 | tseconds = self.header_rec['time'][0] | |||
|
278 | local_t1 = time.localtime(tseconds) | |||
|
279 | self.year = local_t1.tm_year | |||
|
280 | self.month = local_t1.tm_mon | |||
|
281 | self.day = local_t1.tm_mday | |||
|
282 | self.t = datetime.datetime(self.year, self.month, self.day) | |||
|
283 | self.datatime = datetime.datetime.utcfromtimestamp(self.time) | |||
|
284 | ||||
|
285 | def readData(self): | |||
|
286 | ''' | |||
|
287 | Reading and filtering data block record of BLTR rawdata file, filtering is according to status_value. | |||
|
288 | ||||
|
289 | Input: | |||
|
290 | status_value - Array data is set to NAN for values that are not equal to status_value | |||
|
291 | ||||
|
292 | ''' | |||
|
293 | ||||
|
294 | data_structure = numpy.dtype( | |||
|
295 | DATA_STRUCTURE.descr + [ | |||
|
296 | ('rx_saturation', 'u4', (self.nchannels,)), | |||
|
297 | ('chan_offset', 'u4', (2 * self.nchannels,)), | |||
|
298 | ('rx_amp', 'u4', (self.nchannels,)), | |||
|
299 | ('rx_snr', 'f4', (self.nchannels,)), | |||
|
300 | ('cross_snr', 'f4', (self.kchan,)), | |||
|
301 | ('sea_power_relative', 'f4', (self.kchan,))] | |||
|
302 | ) | |||
|
303 | ||||
|
304 | data = numpy.fromfile(self.fp, data_structure, self.nranges) | |||
|
305 | ||||
|
306 | height = data['range'] | |||
|
307 | winds = numpy.array((data['zonal'], data['meridional'], data['vertical'])) | |||
|
308 | snr = data['rx_snr'].T | |||
|
309 | ||||
|
310 | winds[numpy.where(winds == -9999.)] = numpy.nan | |||
|
311 | winds[:, numpy.where(data['status'] != self.status_value)] = numpy.nan | |||
|
312 | snr[numpy.where(snr == -9999.)] = numpy.nan | |||
|
313 | snr[:, numpy.where(data['status'] != self.status_value)] = numpy.nan | |||
|
314 | snr = numpy.power(10, snr / 10) | |||
|
315 | ||||
|
316 | return height, winds, snr | |||
|
317 | ||||
|
318 | def set_output(self): | |||
|
319 | ''' | |||
|
320 | Storing data from databuffer to dataOut object | |||
|
321 | ''' | |||
|
322 | ||||
|
323 | self.dataOut.data_SNR = self.snr | |||
|
324 | self.dataOut.height = self.height | |||
|
325 | self.dataOut.data_output = self.buffer | |||
|
326 | self.dataOut.utctimeInit = self.time | |||
|
327 | self.dataOut.utctime = self.dataOut.utctimeInit | |||
|
328 | self.dataOut.useLocalTime = False | |||
|
329 | self.dataOut.paramInterval = 157 | |||
|
330 | self.dataOut.timezone = self.timezone | |||
|
331 | self.dataOut.site = self.siteFile | |||
|
332 | self.dataOut.nrecords = self.nrecords/self.nmodes | |||
|
333 | self.dataOut.sizeOfFile = self.sizeOfFile | |||
|
334 | self.dataOut.lat = self.lat | |||
|
335 | self.dataOut.lon = self.lon | |||
|
336 | self.dataOut.channelList = range(self.nchannels) | |||
|
337 | self.dataOut.kchan = self.kchan | |||
|
338 | # self.dataOut.nHeights = self.nranges | |||
|
339 | self.dataOut.delta = self.delta | |||
|
340 | self.dataOut.correction = self.correction | |||
|
341 | self.dataOut.nmodes = self.nmodes | |||
|
342 | self.dataOut.imode = self.imode | |||
|
343 | self.dataOut.antenna = self.antenna | |||
|
344 | self.dataOut.rx_gains = self.rx_gains | |||
|
345 | self.dataOut.flagNoData = False | |||
|
346 | ||||
|
347 | def getData(self): | |||
|
348 | ''' | |||
|
349 | Storing data from databuffer to dataOut object | |||
|
350 | ''' | |||
|
351 | if self.flagNoMoreFiles: | |||
|
352 | self.dataOut.flagNoData = True | |||
|
353 | print 'No file left to process' | |||
|
354 | return 0 | |||
|
355 | ||||
|
356 | if not self.readNextBlock(): | |||
|
357 | self.dataOut.flagNoData = True | |||
|
358 | return 0 | |||
|
359 | ||||
|
360 | self.set_output() | |||
|
361 | ||||
|
362 | return 1 |
@@ -0,0 +1,243 | |||||
|
1 | ''' | |||
|
2 | Created on Aug 1, 2017 | |||
|
3 | ||||
|
4 | @author: Juan C. Espinoza | |||
|
5 | ''' | |||
|
6 | ||||
|
7 | import os | |||
|
8 | import sys | |||
|
9 | import time | |||
|
10 | import json | |||
|
11 | import datetime | |||
|
12 | ||||
|
13 | import numpy | |||
|
14 | ||||
|
15 | try: | |||
|
16 | import madrigal | |||
|
17 | import madrigal.cedar | |||
|
18 | except: | |||
|
19 | print 'You should install "madrigal library" module if you want to read/write Madrigal data' | |||
|
20 | ||||
|
21 | from schainpy.model.proc.jroproc_base import Operation | |||
|
22 | from schainpy.model.data.jrodata import Parameters | |||
|
23 | ||||
|
24 | MISSING = -32767 | |||
|
25 | DEF_CATALOG = { | |||
|
26 | 'principleInvestigator': 'Marco Milla', | |||
|
27 | 'expPurpose': None, | |||
|
28 | 'expMode': None, | |||
|
29 | 'cycleTime': None, | |||
|
30 | 'correlativeExp': None, | |||
|
31 | 'sciRemarks': None, | |||
|
32 | 'instRemarks': None | |||
|
33 | } | |||
|
34 | DEF_HEADER = { | |||
|
35 | 'kindatDesc': None, | |||
|
36 | 'analyst': 'Jicamarca User', | |||
|
37 | 'comments': None, | |||
|
38 | 'history': None | |||
|
39 | } | |||
|
40 | MNEMONICS = { | |||
|
41 | 10: 'jro', | |||
|
42 | 11: 'jbr', | |||
|
43 | 840: 'jul', | |||
|
44 | 13: 'jas', | |||
|
45 | 1000: 'pbr', | |||
|
46 | 1001: 'hbr', | |||
|
47 | 1002: 'obr', | |||
|
48 | } | |||
|
49 | ||||
|
50 | def load_json(obj): | |||
|
51 | ''' | |||
|
52 | Parse json as string instead of unicode | |||
|
53 | ''' | |||
|
54 | ||||
|
55 | if isinstance(obj, str): | |||
|
56 | obj = json.loads(obj) | |||
|
57 | ||||
|
58 | return {str(k): load_json(v) if isinstance(v, dict) else str(v) if isinstance(v, unicode) else v | |||
|
59 | for k, v in obj.items()} | |||
|
60 | ||||
|
61 | ||||
|
62 | class MAD2Writer(Operation): | |||
|
63 | ||||
|
64 | def __init__(self, **kwargs): | |||
|
65 | ||||
|
66 | Operation.__init__(self, **kwargs) | |||
|
67 | self.dataOut = Parameters() | |||
|
68 | self.path = None | |||
|
69 | self.dataOut = None | |||
|
70 | self.ext = '.dat' | |||
|
71 | ||||
|
72 | return | |||
|
73 | ||||
|
74 | def run(self, dataOut, path, oneDList, twoDParam='', twoDList='{}', metadata='{}', **kwargs): | |||
|
75 | ''' | |||
|
76 | Inputs: | |||
|
77 | path - path where files will be created | |||
|
78 | oneDList - json of one-dimensional parameters in record where keys | |||
|
79 | are Madrigal codes (integers or mnemonics) and values the corresponding | |||
|
80 | dataOut attribute e.g: { | |||
|
81 | 'gdlatr': 'lat', | |||
|
82 | 'gdlonr': 'lon', | |||
|
83 | 'gdlat2':'lat', | |||
|
84 | 'glon2':'lon'} | |||
|
85 | twoDParam - independent parameter to get the number of rows e.g: | |||
|
86 | heighList | |||
|
87 | twoDList - json of two-dimensional parameters in record where keys | |||
|
88 | are Madrigal codes (integers or mnemonics) and values the corresponding | |||
|
89 | dataOut attribute if multidimensional array specify as tupple | |||
|
90 | ('attr', pos) e.g: { | |||
|
91 | 'gdalt': 'heightList', | |||
|
92 | 'vn1p2': ('data_output', 0), | |||
|
93 | 'vn2p2': ('data_output', 1), | |||
|
94 | 'vn3': ('data_output', 2), | |||
|
95 | 'snl': ('data_SNR', 'db') | |||
|
96 | } | |||
|
97 | metadata - json of madrigal metadata (kinst, kindat, catalog and header) | |||
|
98 | ''' | |||
|
99 | if not self.isConfig: | |||
|
100 | self.setup(dataOut, path, oneDList, twoDParam, twoDList, metadata, **kwargs) | |||
|
101 | self.isConfig = True | |||
|
102 | ||||
|
103 | self.putData() | |||
|
104 | return | |||
|
105 | ||||
|
106 | def setup(self, dataOut, path, oneDList, twoDParam, twoDList, metadata, **kwargs): | |||
|
107 | ''' | |||
|
108 | Configure Operation | |||
|
109 | ''' | |||
|
110 | ||||
|
111 | self.dataOut = dataOut | |||
|
112 | self.nmodes = self.dataOut.nmodes | |||
|
113 | self.path = path | |||
|
114 | self.blocks = kwargs.get('blocks', None) | |||
|
115 | self.counter = 0 | |||
|
116 | self.oneDList = load_json(oneDList) | |||
|
117 | self.twoDList = load_json(twoDList) | |||
|
118 | self.twoDParam = twoDParam | |||
|
119 | meta = load_json(metadata) | |||
|
120 | self.kinst = meta.get('kinst') | |||
|
121 | self.kindat = meta.get('kindat') | |||
|
122 | self.catalog = meta.get('catalog', DEF_CATALOG) | |||
|
123 | self.header = meta.get('header', DEF_HEADER) | |||
|
124 | ||||
|
125 | return | |||
|
126 | ||||
|
127 | def setFile(self): | |||
|
128 | ''' | |||
|
129 | Create new cedar file object | |||
|
130 | ''' | |||
|
131 | ||||
|
132 | self.mnemonic = MNEMONICS[self.kinst] #TODO get mnemonic from madrigal | |||
|
133 | date = datetime.datetime.utcfromtimestamp(self.dataOut.utctime) | |||
|
134 | ||||
|
135 | filename = '%s%s_%s%s' % (self.mnemonic, | |||
|
136 | date.strftime('%Y%m%d_%H%M%S'), | |||
|
137 | self.dataOut.mode, | |||
|
138 | self.ext) | |||
|
139 | ||||
|
140 | self.fullname = os.path.join(self.path, filename) | |||
|
141 | ||||
|
142 | if os.path.isfile(self.fullname) : | |||
|
143 | print "Destination path '%s' already exists. Previous file deleted. " %self.fullname | |||
|
144 | os.remove(self.fullname) | |||
|
145 | ||||
|
146 | try: | |||
|
147 | print '[Writing] creating file : %s' % (self.fullname) | |||
|
148 | self.cedarObj = madrigal.cedar.MadrigalCedarFile(self.fullname, True) | |||
|
149 | except ValueError, e: | |||
|
150 | print '[Error]: Impossible to create a cedar object with "madrigal.cedar.MadrigalCedarFile" ' | |||
|
151 | return | |||
|
152 | ||||
|
153 | return 1 | |||
|
154 | ||||
|
155 | def writeBlock(self): | |||
|
156 | ''' | |||
|
157 | Add data records to cedar file taking data from oneDList and twoDList | |||
|
158 | attributes. | |||
|
159 | Allowed parameters in: parcodes.tab | |||
|
160 | ''' | |||
|
161 | ||||
|
162 | startTime = datetime.datetime.utcfromtimestamp(self.dataOut.utctime) | |||
|
163 | endTime = startTime + datetime.timedelta(seconds=self.dataOut.paramInterval) | |||
|
164 | nrows = len(getattr(self.dataOut, self.twoDParam)) | |||
|
165 | ||||
|
166 | rec = madrigal.cedar.MadrigalDataRecord( | |||
|
167 | self.kinst, | |||
|
168 | self.kindat, | |||
|
169 | startTime.year, | |||
|
170 | startTime.month, | |||
|
171 | startTime.day, | |||
|
172 | startTime.hour, | |||
|
173 | startTime.minute, | |||
|
174 | startTime.second, | |||
|
175 | startTime.microsecond/10000, | |||
|
176 | endTime.year, | |||
|
177 | endTime.month, | |||
|
178 | endTime.day, | |||
|
179 | endTime.hour, | |||
|
180 | endTime.minute, | |||
|
181 | endTime.second, | |||
|
182 | endTime.microsecond/10000, | |||
|
183 | self.oneDList.keys(), | |||
|
184 | self.twoDList.keys(), | |||
|
185 | nrows | |||
|
186 | ) | |||
|
187 | ||||
|
188 | # Setting 1d values | |||
|
189 | for key in self.oneDList: | |||
|
190 | rec.set1D(key, getattr(self.dataOut, self.oneDList[key])) | |||
|
191 | ||||
|
192 | # Setting 2d values | |||
|
193 | invalid = numpy.isnan(self.dataOut.data_output) | |||
|
194 | self.dataOut.data_output[invalid] = MISSING | |||
|
195 | out = {} | |||
|
196 | for key, value in self.twoDList.items(): | |||
|
197 | if isinstance(value, str): | |||
|
198 | out[key] = getattr(self.dataOut, value) | |||
|
199 | elif isinstance(value, tuple): | |||
|
200 | attr, x = value | |||
|
201 | if isinstance(x, (int, float)): | |||
|
202 | out[key] = getattr(self.dataOut, attr)[int(x)] | |||
|
203 | elif x.lower()=='db': | |||
|
204 | tmp = getattr(self.dataOut, attr) | |||
|
205 | SNRavg = numpy.average(tmp, axis=0) | |||
|
206 | out[key] = 10*numpy.log10(SNRavg) | |||
|
207 | ||||
|
208 | for n in range(nrows): | |||
|
209 | for key in out: | |||
|
210 | rec.set2D(key, n, out[key][n]) | |||
|
211 | ||||
|
212 | self.cedarObj.append(rec) | |||
|
213 | self.cedarObj.dump() | |||
|
214 | print '[Writing] Record No. {} (mode {}).'.format( | |||
|
215 | self.counter, | |||
|
216 | self.dataOut.mode | |||
|
217 | ) | |||
|
218 | ||||
|
219 | def setHeader(self): | |||
|
220 | ''' | |||
|
221 | Create an add catalog and header to cedar file | |||
|
222 | ''' | |||
|
223 | ||||
|
224 | header = madrigal.cedar.CatalogHeaderCreator(self.fullname) | |||
|
225 | header.createCatalog(**self.catalog) | |||
|
226 | header.createHeader(**self.header) | |||
|
227 | header.write() | |||
|
228 | ||||
|
229 | def putData(self): | |||
|
230 | ||||
|
231 | if self.dataOut.flagNoData: | |||
|
232 | return 0 | |||
|
233 | ||||
|
234 | if self.counter == 0: | |||
|
235 | self.setFile() | |||
|
236 | ||||
|
237 | if self.counter <= self.dataOut.nrecords: | |||
|
238 | self.writeBlock() | |||
|
239 | self.counter += 1 | |||
|
240 | ||||
|
241 | if self.counter == self.dataOut.nrecords or self.counter == self.blocks: | |||
|
242 | self.setHeader() | |||
|
243 | self.counter = 0 |
@@ -0,0 +1,403 | |||||
|
1 | ''' | |||
|
2 | Created on Oct 24, 2016 | |||
|
3 | ||||
|
4 | @author: roj- LouVD | |||
|
5 | ''' | |||
|
6 | ||||
|
7 | import numpy | |||
|
8 | import copy | |||
|
9 | import datetime | |||
|
10 | import time | |||
|
11 | from time import gmtime | |||
|
12 | ||||
|
13 | from numpy import transpose | |||
|
14 | ||||
|
15 | from jroproc_base import ProcessingUnit, Operation | |||
|
16 | from schainpy.model.data.jrodata import Parameters | |||
|
17 | ||||
|
18 | ||||
|
19 | class BLTRParametersProc(ProcessingUnit): | |||
|
20 | ''' | |||
|
21 | Processing unit for BLTR parameters data (winds) | |||
|
22 | ||||
|
23 | Inputs: | |||
|
24 | self.dataOut.nmodes - Number of operation modes | |||
|
25 | self.dataOut.nchannels - Number of channels | |||
|
26 | self.dataOut.nranges - Number of ranges | |||
|
27 | ||||
|
28 | self.dataOut.data_SNR - SNR array | |||
|
29 | self.dataOut.data_output - Zonal, Vertical and Meridional velocity array | |||
|
30 | self.dataOut.height - Height array (km) | |||
|
31 | self.dataOut.time - Time array (seconds) | |||
|
32 | ||||
|
33 | self.dataOut.fileIndex -Index of the file currently read | |||
|
34 | self.dataOut.lat - Latitude coordinate of BLTR location | |||
|
35 | ||||
|
36 | self.dataOut.doy - Experiment doy (number of the day in the current year) | |||
|
37 | self.dataOut.month - Experiment month | |||
|
38 | self.dataOut.day - Experiment day | |||
|
39 | self.dataOut.year - Experiment year | |||
|
40 | ''' | |||
|
41 | ||||
|
42 | def __init__(self, **kwargs): | |||
|
43 | ''' | |||
|
44 | Inputs: None | |||
|
45 | ''' | |||
|
46 | ProcessingUnit.__init__(self, **kwargs) | |||
|
47 | self.dataOut = Parameters() | |||
|
48 | self.isConfig = False | |||
|
49 | ||||
|
50 | def setup(self, mode): | |||
|
51 | ''' | |||
|
52 | ''' | |||
|
53 | self.dataOut.mode = mode | |||
|
54 | ||||
|
55 | def run(self, mode, snr_threshold=None): | |||
|
56 | ''' | |||
|
57 | Inputs: | |||
|
58 | mode = High resolution (0) or Low resolution (1) data | |||
|
59 | snr_threshold = snr filter value | |||
|
60 | ''' | |||
|
61 | ||||
|
62 | if not self.isConfig: | |||
|
63 | self.setup(mode) | |||
|
64 | self.isConfig = True | |||
|
65 | ||||
|
66 | if self.dataIn.type == 'Parameters': | |||
|
67 | self.dataOut.copy(self.dataIn) | |||
|
68 | ||||
|
69 | self.dataOut.data_output = self.dataOut.data_output[mode] | |||
|
70 | self.dataOut.heightList = self.dataOut.height[0] | |||
|
71 | self.dataOut.data_SNR = self.dataOut.data_SNR[mode] | |||
|
72 | ||||
|
73 | if snr_threshold is not None: | |||
|
74 | SNRavg = numpy.average(self.dataOut.data_SNR, axis=0) | |||
|
75 | SNRavgdB = 10*numpy.log10(SNRavg) | |||
|
76 | for i in range(3): | |||
|
77 | self.dataOut.data_output[i][SNRavgdB <= snr_threshold] = numpy.nan | |||
|
78 | ||||
|
79 | # TODO | |||
|
80 | class OutliersFilter(Operation): | |||
|
81 | ||||
|
82 | def __init__(self, **kwargs): | |||
|
83 | ''' | |||
|
84 | ''' | |||
|
85 | Operation.__init__(self, **kwargs) | |||
|
86 | ||||
|
87 | def run(self, svalue2, method, factor, filter, npoints=9): | |||
|
88 | ''' | |||
|
89 | Inputs: | |||
|
90 | svalue - string to select array velocity | |||
|
91 | svalue2 - string to choose axis filtering | |||
|
92 | method - 0 for SMOOTH or 1 for MEDIAN | |||
|
93 | factor - number used to set threshold | |||
|
94 | filter - 1 for data filtering using the standard deviation criteria else 0 | |||
|
95 | npoints - number of points for mask filter | |||
|
96 | ''' | |||
|
97 | ||||
|
98 | print ' Outliers Filter {} {} / threshold = {}'.format(svalue, svalue, factor) | |||
|
99 | ||||
|
100 | ||||
|
101 | yaxis = self.dataOut.heightList | |||
|
102 | xaxis = numpy.array([[self.dataOut.utctime]]) | |||
|
103 | ||||
|
104 | # Zonal | |||
|
105 | value_temp = self.dataOut.data_output[0] | |||
|
106 | ||||
|
107 | # Zonal | |||
|
108 | value_temp = self.dataOut.data_output[1] | |||
|
109 | ||||
|
110 | # Vertical | |||
|
111 | value_temp = numpy.transpose(self.dataOut.data_output[2]) | |||
|
112 | ||||
|
113 | htemp = yaxis | |||
|
114 | std = value_temp | |||
|
115 | for h in range(len(htemp)): | |||
|
116 | nvalues_valid = len(numpy.where(numpy.isfinite(value_temp[h]))[0]) | |||
|
117 | minvalid = npoints | |||
|
118 | ||||
|
119 | #only if valid values greater than the minimum required (10%) | |||
|
120 | if nvalues_valid > minvalid: | |||
|
121 | ||||
|
122 | if method == 0: | |||
|
123 | #SMOOTH | |||
|
124 | w = value_temp[h] - self.Smooth(input=value_temp[h], width=npoints, edge_truncate=1) | |||
|
125 | ||||
|
126 | ||||
|
127 | if method == 1: | |||
|
128 | #MEDIAN | |||
|
129 | w = value_temp[h] - self.Median(input=value_temp[h], width = npoints) | |||
|
130 | ||||
|
131 | dw = numpy.std(w[numpy.where(numpy.isfinite(w))],ddof = 1) | |||
|
132 | ||||
|
133 | threshold = dw*factor | |||
|
134 | value_temp[numpy.where(w > threshold),h] = numpy.nan | |||
|
135 | value_temp[numpy.where(w < -1*threshold),h] = numpy.nan | |||
|
136 | ||||
|
137 | ||||
|
138 | #At the end | |||
|
139 | if svalue2 == 'inHeight': | |||
|
140 | value_temp = numpy.transpose(value_temp) | |||
|
141 | output_array[:,m] = value_temp | |||
|
142 | ||||
|
143 | if svalue == 'zonal': | |||
|
144 | self.dataOut.data_output[0] = output_array | |||
|
145 | ||||
|
146 | elif svalue == 'meridional': | |||
|
147 | self.dataOut.data_output[1] = output_array | |||
|
148 | ||||
|
149 | elif svalue == 'vertical': | |||
|
150 | self.dataOut.data_output[2] = output_array | |||
|
151 | ||||
|
152 | return self.dataOut.data_output | |||
|
153 | ||||
|
154 | ||||
|
155 | def Median(self,input,width): | |||
|
156 | ''' | |||
|
157 | Inputs: | |||
|
158 | input - Velocity array | |||
|
159 | width - Number of points for mask filter | |||
|
160 | ||||
|
161 | ''' | |||
|
162 | ||||
|
163 | if numpy.mod(width,2) == 1: | |||
|
164 | pc = int((width - 1) / 2) | |||
|
165 | cont = 0 | |||
|
166 | output = [] | |||
|
167 | ||||
|
168 | for i in range(len(input)): | |||
|
169 | if i >= pc and i < len(input) - pc: | |||
|
170 | new2 = input[i-pc:i+pc+1] | |||
|
171 | temp = numpy.where(numpy.isfinite(new2)) | |||
|
172 | new = new2[temp] | |||
|
173 | value = numpy.median(new) | |||
|
174 | output.append(value) | |||
|
175 | ||||
|
176 | output = numpy.array(output) | |||
|
177 | output = numpy.hstack((input[0:pc],output)) | |||
|
178 | output = numpy.hstack((output,input[-pc:len(input)])) | |||
|
179 | ||||
|
180 | return output | |||
|
181 | ||||
|
182 | def Smooth(self,input,width,edge_truncate = None): | |||
|
183 | ''' | |||
|
184 | Inputs: | |||
|
185 | input - Velocity array | |||
|
186 | width - Number of points for mask filter | |||
|
187 | edge_truncate - 1 for truncate the convolution product else | |||
|
188 | ||||
|
189 | ''' | |||
|
190 | ||||
|
191 | if numpy.mod(width,2) == 0: | |||
|
192 | real_width = width + 1 | |||
|
193 | nzeros = width / 2 | |||
|
194 | else: | |||
|
195 | real_width = width | |||
|
196 | nzeros = (width - 1) / 2 | |||
|
197 | ||||
|
198 | half_width = int(real_width)/2 | |||
|
199 | length = len(input) | |||
|
200 | ||||
|
201 | gate = numpy.ones(real_width,dtype='float') | |||
|
202 | norm_of_gate = numpy.sum(gate) | |||
|
203 | ||||
|
204 | nan_process = 0 | |||
|
205 | nan_id = numpy.where(numpy.isnan(input)) | |||
|
206 | if len(nan_id[0]) > 0: | |||
|
207 | nan_process = 1 | |||
|
208 | pb = numpy.zeros(len(input)) | |||
|
209 | pb[nan_id] = 1. | |||
|
210 | input[nan_id] = 0. | |||
|
211 | ||||
|
212 | if edge_truncate == True: | |||
|
213 | output = numpy.convolve(input/norm_of_gate,gate,mode='same') | |||
|
214 | elif edge_truncate == False or edge_truncate == None: | |||
|
215 | output = numpy.convolve(input/norm_of_gate,gate,mode='valid') | |||
|
216 | output = numpy.hstack((input[0:half_width],output)) | |||
|
217 | output = numpy.hstack((output,input[len(input)-half_width:len(input)])) | |||
|
218 | ||||
|
219 | if nan_process: | |||
|
220 | pb = numpy.convolve(pb/norm_of_gate,gate,mode='valid') | |||
|
221 | pb = numpy.hstack((numpy.zeros(half_width),pb)) | |||
|
222 | pb = numpy.hstack((pb,numpy.zeros(half_width))) | |||
|
223 | output[numpy.where(pb > 0.9999)] = numpy.nan | |||
|
224 | input[nan_id] = numpy.nan | |||
|
225 | return output | |||
|
226 | ||||
|
227 | def Average(self,aver=0,nhaver=1): | |||
|
228 | ''' | |||
|
229 | Inputs: | |||
|
230 | aver - Indicates the time period over which is averaged or consensus data | |||
|
231 | nhaver - Indicates the decimation factor in heights | |||
|
232 | ||||
|
233 | ''' | |||
|
234 | nhpoints = 48 | |||
|
235 | ||||
|
236 | lat_piura = -5.17 | |||
|
237 | lat_huancayo = -12.04 | |||
|
238 | lat_porcuya = -5.8 | |||
|
239 | ||||
|
240 | if '%2.2f'%self.dataOut.lat == '%2.2f'%lat_piura: | |||
|
241 | hcm = 3. | |||
|
242 | if self.dataOut.year == 2003 : | |||
|
243 | if self.dataOut.doy >= 25 and self.dataOut.doy < 64: | |||
|
244 | nhpoints = 12 | |||
|
245 | ||||
|
246 | elif '%2.2f'%self.dataOut.lat == '%2.2f'%lat_huancayo: | |||
|
247 | hcm = 3. | |||
|
248 | if self.dataOut.year == 2003 : | |||
|
249 | if self.dataOut.doy >= 25 and self.dataOut.doy < 64: | |||
|
250 | nhpoints = 12 | |||
|
251 | ||||
|
252 | ||||
|
253 | elif '%2.2f'%self.dataOut.lat == '%2.2f'%lat_porcuya: | |||
|
254 | hcm = 5.#2 | |||
|
255 | ||||
|
256 | pdata = 0.2 | |||
|
257 | taver = [1,2,3,4,6,8,12,24] | |||
|
258 | t0 = 0 | |||
|
259 | tf = 24 | |||
|
260 | ntime =(tf-t0)/taver[aver] | |||
|
261 | ti = numpy.arange(ntime) | |||
|
262 | tf = numpy.arange(ntime) + taver[aver] | |||
|
263 | ||||
|
264 | ||||
|
265 | old_height = self.dataOut.heightList | |||
|
266 | ||||
|
267 | if nhaver > 1: | |||
|
268 | num_hei = len(self.dataOut.heightList)/nhaver/self.dataOut.nmodes | |||
|
269 | deltha = 0.05*nhaver | |||
|
270 | minhvalid = pdata*nhaver | |||
|
271 | for im in range(self.dataOut.nmodes): | |||
|
272 | new_height = numpy.arange(num_hei)*deltha + self.dataOut.height[im,0] + deltha/2. | |||
|
273 | ||||
|
274 | ||||
|
275 | data_fHeigths_List = [] | |||
|
276 | data_fZonal_List = [] | |||
|
277 | data_fMeridional_List = [] | |||
|
278 | data_fVertical_List = [] | |||
|
279 | startDTList = [] | |||
|
280 | ||||
|
281 | ||||
|
282 | for i in range(ntime): | |||
|
283 | height = old_height | |||
|
284 | ||||
|
285 | start = datetime.datetime(self.dataOut.year,self.dataOut.month,self.dataOut.day) + datetime.timedelta(hours = int(ti[i])) - datetime.timedelta(hours = 5) | |||
|
286 | stop = datetime.datetime(self.dataOut.year,self.dataOut.month,self.dataOut.day) + datetime.timedelta(hours = int(tf[i])) - datetime.timedelta(hours = 5) | |||
|
287 | ||||
|
288 | ||||
|
289 | limit_sec1 = time.mktime(start.timetuple()) | |||
|
290 | limit_sec2 = time.mktime(stop.timetuple()) | |||
|
291 | ||||
|
292 | t1 = numpy.where(self.f_timesec >= limit_sec1) | |||
|
293 | t2 = numpy.where(self.f_timesec < limit_sec2) | |||
|
294 | time_select = [] | |||
|
295 | for val_sec in t1[0]: | |||
|
296 | if val_sec in t2[0]: | |||
|
297 | time_select.append(val_sec) | |||
|
298 | ||||
|
299 | ||||
|
300 | time_select = numpy.array(time_select,dtype = 'int') | |||
|
301 | minvalid = numpy.ceil(pdata*nhpoints) | |||
|
302 | ||||
|
303 | zon_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan | |||
|
304 | mer_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan | |||
|
305 | ver_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan | |||
|
306 | ||||
|
307 | if nhaver > 1: | |||
|
308 | new_zon_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan | |||
|
309 | new_mer_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan | |||
|
310 | new_ver_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan | |||
|
311 | ||||
|
312 | if len(time_select) > minvalid: | |||
|
313 | time_average = self.f_timesec[time_select] | |||
|
314 | ||||
|
315 | for im in range(self.dataOut.nmodes): | |||
|
316 | ||||
|
317 | for ih in range(self.dataOut.nranges): | |||
|
318 | if numpy.sum(numpy.isfinite(self.f_zon[time_select,ih,im])) >= minvalid: | |||
|
319 | zon_aver[ih,im] = numpy.nansum(self.f_zon[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_zon[time_select,ih,im])) | |||
|
320 | ||||
|
321 | if numpy.sum(numpy.isfinite(self.f_mer[time_select,ih,im])) >= minvalid: | |||
|
322 | mer_aver[ih,im] = numpy.nansum(self.f_mer[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_mer[time_select,ih,im])) | |||
|
323 | ||||
|
324 | if numpy.sum(numpy.isfinite(self.f_ver[time_select,ih,im])) >= minvalid: | |||
|
325 | ver_aver[ih,im] = numpy.nansum(self.f_ver[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_ver[time_select,ih,im])) | |||
|
326 | ||||
|
327 | if nhaver > 1: | |||
|
328 | for ih in range(num_hei): | |||
|
329 | hvalid = numpy.arange(nhaver) + nhaver*ih | |||
|
330 | ||||
|
331 | if numpy.sum(numpy.isfinite(zon_aver[hvalid,im])) >= minvalid: | |||
|
332 | new_zon_aver[ih,im] = numpy.nansum(zon_aver[hvalid,im]) / numpy.sum(numpy.isfinite(zon_aver[hvalid,im])) | |||
|
333 | ||||
|
334 | if numpy.sum(numpy.isfinite(mer_aver[hvalid,im])) >= minvalid: | |||
|
335 | new_mer_aver[ih,im] = numpy.nansum(mer_aver[hvalid,im]) / numpy.sum(numpy.isfinite(mer_aver[hvalid,im])) | |||
|
336 | ||||
|
337 | if numpy.sum(numpy.isfinite(ver_aver[hvalid,im])) >= minvalid: | |||
|
338 | new_ver_aver[ih,im] = numpy.nansum(ver_aver[hvalid,im]) / numpy.sum(numpy.isfinite(ver_aver[hvalid,im])) | |||
|
339 | if nhaver > 1: | |||
|
340 | zon_aver = new_zon_aver | |||
|
341 | mer_aver = new_mer_aver | |||
|
342 | ver_aver = new_ver_aver | |||
|
343 | height = new_height | |||
|
344 | ||||
|
345 | ||||
|
346 | tstart = time_average[0] | |||
|
347 | tend = time_average[-1] | |||
|
348 | startTime = time.gmtime(tstart) | |||
|
349 | ||||
|
350 | year = startTime.tm_year | |||
|
351 | month = startTime.tm_mon | |||
|
352 | day = startTime.tm_mday | |||
|
353 | hour = startTime.tm_hour | |||
|
354 | minute = startTime.tm_min | |||
|
355 | second = startTime.tm_sec | |||
|
356 | ||||
|
357 | startDTList.append(datetime.datetime(year,month,day,hour,minute,second)) | |||
|
358 | ||||
|
359 | ||||
|
360 | o_height = numpy.array([]) | |||
|
361 | o_zon_aver = numpy.array([]) | |||
|
362 | o_mer_aver = numpy.array([]) | |||
|
363 | o_ver_aver = numpy.array([]) | |||
|
364 | if self.dataOut.nmodes > 1: | |||
|
365 | for im in range(self.dataOut.nmodes): | |||
|
366 | ||||
|
367 | if im == 0: | |||
|
368 | h_select = numpy.where(numpy.bitwise_and(height[0,:] >=0,height[0,:] <= hcm,numpy.isfinite(height[0,:]))) | |||
|
369 | else: | |||
|
370 | h_select = numpy.where(numpy.bitwise_and(height[1,:] > hcm,height[1,:] < 20,numpy.isfinite(height[1,:]))) | |||
|
371 | ||||
|
372 | ||||
|
373 | ht = h_select[0] | |||
|
374 | ||||
|
375 | o_height = numpy.hstack((o_height,height[im,ht])) | |||
|
376 | o_zon_aver = numpy.hstack((o_zon_aver,zon_aver[ht,im])) | |||
|
377 | o_mer_aver = numpy.hstack((o_mer_aver,mer_aver[ht,im])) | |||
|
378 | o_ver_aver = numpy.hstack((o_ver_aver,ver_aver[ht,im])) | |||
|
379 | ||||
|
380 | data_fHeigths_List.append(o_height) | |||
|
381 | data_fZonal_List.append(o_zon_aver) | |||
|
382 | data_fMeridional_List.append(o_mer_aver) | |||
|
383 | data_fVertical_List.append(o_ver_aver) | |||
|
384 | ||||
|
385 | ||||
|
386 | else: | |||
|
387 | h_select = numpy.where(numpy.bitwise_and(height[0,:] <= hcm,numpy.isfinite(height[0,:]))) | |||
|
388 | ht = h_select[0] | |||
|
389 | o_height = numpy.hstack((o_height,height[im,ht])) | |||
|
390 | o_zon_aver = numpy.hstack((o_zon_aver,zon_aver[ht,im])) | |||
|
391 | o_mer_aver = numpy.hstack((o_mer_aver,mer_aver[ht,im])) | |||
|
392 | o_ver_aver = numpy.hstack((o_ver_aver,ver_aver[ht,im])) | |||
|
393 | ||||
|
394 | data_fHeigths_List.append(o_height) | |||
|
395 | data_fZonal_List.append(o_zon_aver) | |||
|
396 | data_fMeridional_List.append(o_mer_aver) | |||
|
397 | data_fVertical_List.append(o_ver_aver) | |||
|
398 | ||||
|
399 | ||||
|
400 | return startDTList, data_fHeigths_List, data_fZonal_List, data_fMeridional_List, data_fVertical_List | |||
|
401 | ||||
|
402 | ||||
|
403 | No newline at end of file |
@@ -104,3 +104,5 ENV/ | |||||
104 | *.png |
|
104 | *.png | |
105 | *.pyc |
|
105 | *.pyc | |
106 | schainpy/scripts |
|
106 | schainpy/scripts | |
|
107 | ||||
|
108 | .vscode No newline at end of file |
@@ -1201,7 +1201,7 class Parameters(Spectra): | |||||
1201 | time1 = self.utctimeInit - self.timeZone*60 |
|
1201 | time1 = self.utctimeInit - self.timeZone*60 | |
1202 | else: |
|
1202 | else: | |
1203 | time1 = self.utctimeInit |
|
1203 | time1 = self.utctimeInit | |
1204 | print 'interval',interval |
|
1204 | ||
1205 | datatime.append(time1) |
|
1205 | datatime.append(time1) | |
1206 | datatime.append(time1 + interval) |
|
1206 | datatime.append(time1 + interval) | |
1207 | datatime = numpy.array(datatime) |
|
1207 | datatime = numpy.array(datatime) |
@@ -658,13 +658,8 class WindProfilerPlot(Figure): | |||||
658 | # tmax = None |
|
658 | # tmax = None | |
659 |
|
659 | |||
660 | x = dataOut.getTimeRange1(dataOut.paramInterval) |
|
660 | x = dataOut.getTimeRange1(dataOut.paramInterval) | |
661 | y = dataOut.heightList |
|
661 | y = dataOut.heightList | |
662 | z = dataOut.data_output.copy() |
|
662 | z = dataOut.data_output.copy() | |
663 | print ' ' |
|
|||
664 | print 'Xvel',z[0] |
|
|||
665 | print ' ' |
|
|||
666 | print 'Yvel',z[1] |
|
|||
667 | print ' ' |
|
|||
668 | nplots = z.shape[0] #Number of wind dimensions estimated |
|
663 | nplots = z.shape[0] #Number of wind dimensions estimated | |
669 | nplotsw = nplots |
|
664 | nplotsw = nplots | |
670 |
|
665 |
@@ -13,7 +13,9 from jroIO_kamisr import * | |||||
13 | from jroIO_param import * |
|
13 | from jroIO_param import * | |
14 | from jroIO_hf import * |
|
14 | from jroIO_hf import * | |
15 |
|
15 | |||
|
16 | from jroIO_madrigal import * | |||
|
17 | ||||
|
18 | from bltrIO_param import * | |||
16 | from jroIO_bltr import * |
|
19 | from jroIO_bltr import * | |
17 | from jroIO_mira35c import * |
|
20 | from jroIO_mira35c import * | |
18 | from io_bltr_block import * |
|
|||
19 |
|
21 |
@@ -11,7 +11,6 import numpy | |||||
11 | import fnmatch |
|
11 | import fnmatch | |
12 | import inspect |
|
12 | import inspect | |
13 | import time, datetime |
|
13 | import time, datetime | |
14 | #import h5py |
|
|||
15 | import traceback |
|
14 | import traceback | |
16 |
|
15 | |||
17 | try: |
|
16 | try: | |
@@ -1058,10 +1057,10 class JRODataReader(JRODataIO): | |||||
1058 |
|
1057 | |||
1059 | break |
|
1058 | break | |
1060 |
|
1059 | |||
1061 |
|
|
1060 | if self.verbose: | |
1062 |
|
|
1061 | print "[Reading] Block No. %d/%d -> %s" %(self.nReadBlocks, | |
1063 |
|
|
1062 | self.processingHeaderObj.dataBlocksPerFile, | |
1064 |
|
|
1063 | self.dataOut.datatime.ctime()) | |
1065 | return 1 |
|
1064 | return 1 | |
1066 |
|
1065 | |||
1067 | def __readFirstHeader(self): |
|
1066 | def __readFirstHeader(self): |
@@ -10,10 +10,9 import matplotlib.pyplot as plt | |||||
10 |
|
10 | |||
11 | import pylab as plb |
|
11 | import pylab as plb | |
12 | from scipy.optimize import curve_fit |
|
12 | from scipy.optimize import curve_fit | |
13 | from scipy import asarray as ar,exp |
|
13 | from scipy import asarray as ar, exp | |
14 | from scipy import stats |
|
14 | from scipy import stats | |
15 |
|
15 | |||
16 | from duplicity.path import Path |
|
|||
17 | from numpy.ma.core import getdata |
|
16 | from numpy.ma.core import getdata | |
18 |
|
17 | |||
19 | SPEED_OF_LIGHT = 299792458 |
|
18 | SPEED_OF_LIGHT = 299792458 | |
@@ -427,7 +426,7 class RecordHeaderBLTR(Header): | |||||
427 | return 1 |
|
426 | return 1 | |
428 |
|
427 | |||
429 |
|
428 | |||
430 | class BLTRReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODataReader): |
|
429 | class BLTRSpectraReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODataReader): | |
431 |
|
430 | |||
432 | path = None |
|
431 | path = None | |
433 | startDate = None |
|
432 | startDate = None | |
@@ -456,7 +455,7 class BLTRReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODataReade | |||||
456 | #Eliminar de la base la herencia |
|
455 | #Eliminar de la base la herencia | |
457 | ProcessingUnit.__init__(self, **kwargs) |
|
456 | ProcessingUnit.__init__(self, **kwargs) | |
458 |
|
457 | |||
459 |
|
|
458 | #self.isConfig = False | |
460 |
|
459 | |||
461 | #self.pts2read_SelfSpectra = 0 |
|
460 | #self.pts2read_SelfSpectra = 0 | |
462 | #self.pts2read_CrossSpectra = 0 |
|
461 | #self.pts2read_CrossSpectra = 0 | |
@@ -1152,43 +1151,5 class BLTRReader (ProcessingUnit, FileHeaderBLTR, RecordHeaderBLTR, JRODataReade | |||||
1152 |
|
1151 | |||
1153 |
|
1152 | |||
1154 |
|
1153 | |||
1155 | class BLTRWriter(ProcessingUnit): |
|
1154 | ||
1156 | ''' |
|
|||
1157 | classdocs |
|
|||
1158 | ''' |
|
|||
1159 |
|
||||
1160 | def __init__(self): |
|
|||
1161 | ''' |
|
|||
1162 | Constructor |
|
|||
1163 | ''' |
|
|||
1164 | self.dataOut = None |
|
|||
1165 |
|
||||
1166 | self.isConfig = False |
|
|||
1167 |
|
||||
1168 | def setup(self, dataIn, path, blocksPerFile, set=0, ext=None): |
|
|||
1169 | ''' |
|
|||
1170 | In this method we should set all initial parameters. |
|
|||
1171 |
|
||||
1172 | Input: |
|
|||
1173 | dataIn : Input data will also be outputa data |
|
|||
1174 |
|
||||
1175 | ''' |
|
|||
1176 | self.dataOut = dataIn |
|
|||
1177 |
|
||||
1178 | self.isConfig = True |
|
|||
1179 |
|
||||
1180 | return |
|
|||
1181 |
|
||||
1182 | def run(self, dataIn, **kwargs): |
|
|||
1183 | ''' |
|
|||
1184 | This method will be called many times so here you should put all your code |
|
|||
1185 |
|
||||
1186 | Inputs: |
|
|||
1187 |
|
||||
1188 | dataIn : object with the data |
|
|||
1189 |
|
||||
1190 | ''' |
|
|||
1191 |
|
||||
1192 | if not self.isConfig: |
|
|||
1193 | self.setup(dataIn, **kwargs) |
|
|||
1194 |
|
1155 |
@@ -13,7 +13,6 from scipy.optimize import curve_fit | |||||
13 | from scipy import asarray as ar,exp |
|
13 | from scipy import asarray as ar,exp | |
14 | from scipy import stats |
|
14 | from scipy import stats | |
15 |
|
15 | |||
16 | from duplicity.path import Path |
|
|||
17 | from numpy.ma.core import getdata |
|
16 | from numpy.ma.core import getdata | |
18 |
|
17 | |||
19 | SPEED_OF_LIGHT = 299792458 |
|
18 | SPEED_OF_LIGHT = 299792458 |
@@ -12,5 +12,4 from jroproc_correlation import * | |||||
12 | from jroproc_parameters import * |
|
12 | from jroproc_parameters import * | |
13 | from jroproc_spectra_lags import * |
|
13 | from jroproc_spectra_lags import * | |
14 | from jroproc_spectra_acf import * |
|
14 | from jroproc_spectra_acf import * | |
15 |
from |
|
15 | from bltrproc_parameters import * | |
16 |
|
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed | ||
This diff has been collapsed as it changes many lines, (950 lines changed) Show them Hide them |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed | ||
This diff has been collapsed as it changes many lines, (576 lines changed) Show them Hide them |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed, binary diff hidden |
|
NO CONTENT: file was removed, binary diff hidden |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
1 | NO CONTENT: file was removed |
|
NO CONTENT: file was removed |
General Comments 0
You need to be logged in to leave comments.
Login now