##// END OF EJS Templates
Fix utc times and bugs in BLTR modules
Juan C. Espinoza -
r1085:0d7a50b6a171
parent child
Show More
@@ -1,362 +1,366
1 1 '''
2 2 Created on Nov 9, 2016
3 3
4 4 @author: roj- LouVD
5 5 '''
6 6
7 7
8 8 import os
9 9 import sys
10 10 import time
11 11 import glob
12 12 import datetime
13 13
14 14 import numpy
15 15
16 16 from schainpy.model.proc.jroproc_base import ProcessingUnit
17 17 from schainpy.model.data.jrodata import Parameters
18 18 from schainpy.model.io.jroIO_base import JRODataReader, isNumber
19 from schainpy.utils import log
19 20
20 21 FILE_HEADER_STRUCTURE = numpy.dtype([
21 22 ('FMN', '<u4'),
22 23 ('nrec', '<u4'),
23 24 ('fr_offset', '<u4'),
24 25 ('id', '<u4'),
25 26 ('site', 'u1', (32,))
26 27 ])
27 28
28 29 REC_HEADER_STRUCTURE = numpy.dtype([
29 30 ('rmn', '<u4'),
30 31 ('rcounter', '<u4'),
31 32 ('nr_offset', '<u4'),
32 33 ('tr_offset', '<u4'),
33 34 ('time', '<u4'),
34 35 ('time_msec', '<u4'),
35 36 ('tag', 'u1', (32,)),
36 37 ('comments', 'u1', (32,)),
37 38 ('lat', '<f4'),
38 39 ('lon', '<f4'),
39 40 ('gps_status', '<u4'),
40 41 ('freq', '<u4'),
41 42 ('freq0', '<u4'),
42 43 ('nchan', '<u4'),
43 44 ('delta_r', '<u4'),
44 45 ('nranges', '<u4'),
45 46 ('r0', '<u4'),
46 47 ('prf', '<u4'),
47 48 ('ncoh', '<u4'),
48 49 ('npoints', '<u4'),
49 50 ('polarization', '<i4'),
50 51 ('rx_filter', '<u4'),
51 52 ('nmodes', '<u4'),
52 53 ('dmode_index', '<u4'),
53 54 ('dmode_rngcorr', '<u4'),
54 55 ('nrxs', '<u4'),
55 56 ('acf_length', '<u4'),
56 57 ('acf_lags', '<u4'),
57 58 ('sea_to_atmos', '<f4'),
58 59 ('sea_notch', '<u4'),
59 60 ('lh_sea', '<u4'),
60 61 ('hh_sea', '<u4'),
61 62 ('nbins_sea', '<u4'),
62 63 ('min_snr', '<f4'),
63 64 ('min_cc', '<f4'),
64 65 ('max_time_diff', '<f4')
65 66 ])
66 67
67 68 DATA_STRUCTURE = numpy.dtype([
68 69 ('range', '<u4'),
69 70 ('status', '<u4'),
70 71 ('zonal', '<f4'),
71 72 ('meridional', '<f4'),
72 73 ('vertical', '<f4'),
73 74 ('zonal_a', '<f4'),
74 75 ('meridional_a', '<f4'),
75 76 ('corrected_fading', '<f4'), # seconds
76 77 ('uncorrected_fading', '<f4'), # seconds
77 78 ('time_diff', '<f4'),
78 79 ('major_axis', '<f4'),
79 80 ('axial_ratio', '<f4'),
80 81 ('orientation', '<f4'),
81 82 ('sea_power', '<u4'),
82 83 ('sea_algorithm', '<u4')
83 84 ])
84 85
85 86 class BLTRParamReader(JRODataReader, ProcessingUnit):
86 87 '''
87 88 Boundary Layer and Tropospheric Radar (BLTR) reader, Wind velocities and SNR from *.sswma files
88 89 '''
89 90
90 91 ext = '.sswma'
91 92
92 93 def __init__(self, **kwargs):
93 94
94 95 ProcessingUnit.__init__(self , **kwargs)
95 96
96 97 self.dataOut = Parameters()
97 98 self.counter_records = 0
98 99 self.flagNoMoreFiles = 0
99 100 self.isConfig = False
100 101 self.filename = None
101 102
102 103 def setup(self,
103 104 path=None,
104 105 startDate=None,
105 106 endDate=None,
106 107 ext=None,
107 108 startTime=datetime.time(0, 0, 0),
108 109 endTime=datetime.time(23, 59, 59),
109 110 timezone=0,
110 111 status_value=0,
111 112 **kwargs):
112
113
113 114 self.path = path
115 self.startDate = startDate
116 self.endDate = endDate
114 117 self.startTime = startTime
115 self.endTime = endTime
118 self.endTime = endTime
116 119 self.status_value = status_value
120 self.datatime = datetime.datetime(1900,1,1)
117 121
118 122 if self.path is None:
119 123 raise ValueError, "The path is not valid"
120 124
121 125 if ext is None:
122 126 ext = self.ext
123 127
124 128 self.search_files(self.path, startDate, endDate, ext)
125 129 self.timezone = timezone
126 130 self.fileIndex = 0
127 131
128 132 if not self.fileList:
129 133 raise Warning, "There is no files matching these date in the folder: %s. \n Check 'startDate' and 'endDate' "%(path)
130 134
131 135 self.setNextFile()
132 136
133 137 def search_files(self, path, startDate, endDate, ext):
134 138 '''
135 139 Searching for BLTR rawdata file in path
136 140 Creating a list of file to proces included in [startDate,endDate]
137 141
138 142 Input:
139 143 path - Path to find BLTR rawdata files
140 144 startDate - Select file from this date
141 145 enDate - Select file until this date
142 146 ext - Extension of the file to read
143 147
144 148 '''
145 149
146 print 'Searching file in %s ' % (path)
150 log.success('Searching files in {} '.format(path), 'BLTRParamReader')
147 151 foldercounter = 0
148 152 fileList0 = glob.glob1(path, "*%s" % ext)
149 153 fileList0.sort()
150 154
151 155 self.fileList = []
152 156 self.dateFileList = []
153 157
154 158 for thisFile in fileList0:
155 159 year = thisFile[-14:-10]
156 160 if not isNumber(year):
157 161 continue
158 162
159 163 month = thisFile[-10:-8]
160 164 if not isNumber(month):
161 165 continue
162 166
163 167 day = thisFile[-8:-6]
164 168 if not isNumber(day):
165 169 continue
166 170
167 171 year, month, day = int(year), int(month), int(day)
168 172 dateFile = datetime.date(year, month, day)
169 173
170 174 if (startDate > dateFile) or (endDate < dateFile):
171 175 continue
172 176
173 177 self.fileList.append(thisFile)
174 178 self.dateFileList.append(dateFile)
175 179
176 180 return
177 181
178 182 def setNextFile(self):
179 183
180 184 file_id = self.fileIndex
181 185
182 186 if file_id == len(self.fileList):
183 print '\nNo more files in the folder'
184 print 'Total number of file(s) read : {}'.format(self.fileIndex + 1)
187 log.success('No more files in the folder', 'BLTRParamReader')
185 188 self.flagNoMoreFiles = 1
186 189 return 0
187 190
188 print '\n[Setting file] (%s) ...' % self.fileList[file_id]
191 log.success('Opening {}'.format(self.fileList[file_id]), 'BLTRParamReader')
189 192 filename = os.path.join(self.path, self.fileList[file_id])
190 193
191 194 dirname, name = os.path.split(filename)
192 195 self.siteFile = name.split('.')[0] # 'peru2' ---> Piura - 'peru1' ---> Huancayo or Porcuya
193 196 if self.filename is not None:
194 197 self.fp.close()
195 198 self.filename = filename
196 199 self.fp = open(self.filename, 'rb')
197 200 self.header_file = numpy.fromfile(self.fp, FILE_HEADER_STRUCTURE, 1)
198 201 self.nrecords = self.header_file['nrec'][0]
199 202 self.sizeOfFile = os.path.getsize(self.filename)
200 203 self.counter_records = 0
201 204 self.flagIsNewFile = 0
202 205 self.fileIndex += 1
203 206
204 207 return 1
205 208
206 209 def readNextBlock(self):
207 210
208 while True:
211 while True:
209 212 if self.counter_records == self.nrecords:
210 213 self.flagIsNewFile = 1
211 214 if not self.setNextFile():
212 215 return 0
213
216
214 217 self.readBlock()
215
216 if (self.datatime.time() < self.startTime) or (self.datatime.time() > self.endTime):
217 print "[Reading] Record No. %d/%d -> %s [Skipping]" %(
218 self.counter_records,
219 self.nrecords,
220 self.datatime.ctime())
218
219 if (self.datatime < datetime.datetime.combine(self.startDate, self.startTime)) or \
220 (self.datatime > datetime.datetime.combine(self.endDate, self.endTime)):
221 log.warning(
222 'Reading Record No. {}/{} -> {} [Skipping]'.format(
223 self.counter_records,
224 self.nrecords,
225 self.datatime.ctime()),
226 'BLTRParamReader')
221 227 continue
222 228 break
223 229
224 print "[Reading] Record No. %d/%d -> %s" %(
230 log.log('Reading Record No. {}/{} -> {}'.format(
225 231 self.counter_records,
226 232 self.nrecords,
227 self.datatime.ctime())
233 self.datatime.ctime()), 'BLTRParamReader')
228 234
229 235 return 1
230 236
231 237 def readBlock(self):
232 238
233 239 pointer = self.fp.tell()
234 240 header_rec = numpy.fromfile(self.fp, REC_HEADER_STRUCTURE, 1)
235 241 self.nchannels = header_rec['nchan'][0]/2
236 242 self.kchan = header_rec['nrxs'][0]
237 243 self.nmodes = header_rec['nmodes'][0]
238 244 self.nranges = header_rec['nranges'][0]
239 245 self.fp.seek(pointer)
240 246 self.height = numpy.empty((self.nmodes, self.nranges))
241 247 self.snr = numpy.empty((self.nmodes, self.nchannels, self.nranges))
242 248 self.buffer = numpy.empty((self.nmodes, 3, self.nranges))
249 self.flagDiscontinuousBlock = 0
243 250
244 251 for mode in range(self.nmodes):
245 252 self.readHeader()
246 253 data = self.readData()
247 254 self.height[mode] = (data[0] - self.correction) / 1000.
248 255 self.buffer[mode] = data[1]
249 256 self.snr[mode] = data[2]
250 257
251 258 self.counter_records = self.counter_records + self.nmodes
252 259
253 260 return
254 261
255 262 def readHeader(self):
256 263 '''
257 264 RecordHeader of BLTR rawdata file
258 265 '''
259 266
260 267 header_structure = numpy.dtype(
261 268 REC_HEADER_STRUCTURE.descr + [
262 269 ('antenna_coord', 'f4', (2, self.nchannels)),
263 270 ('rx_gains', 'u4', (self.nchannels,)),
264 271 ('rx_analysis', 'u4', (self.nchannels,))
265 272 ]
266 273 )
267 274
268 275 self.header_rec = numpy.fromfile(self.fp, header_structure, 1)
269 276 self.lat = self.header_rec['lat'][0]
270 277 self.lon = self.header_rec['lon'][0]
271 278 self.delta = self.header_rec['delta_r'][0]
272 279 self.correction = self.header_rec['dmode_rngcorr'][0]
273 280 self.imode = self.header_rec['dmode_index'][0]
274 281 self.antenna = self.header_rec['antenna_coord']
275 282 self.rx_gains = self.header_rec['rx_gains']
276 self.time = self.header_rec['time'][0]
277 tseconds = self.header_rec['time'][0]
278 local_t1 = time.localtime(tseconds)
279 self.year = local_t1.tm_year
280 self.month = local_t1.tm_mon
281 self.day = local_t1.tm_mday
282 self.t = datetime.datetime(self.year, self.month, self.day)
283 self.datatime = datetime.datetime.utcfromtimestamp(self.time)
283 self.time = self.header_rec['time'][0]
284 dt = datetime.datetime.utcfromtimestamp(self.time)
285 if dt.date()>self.datatime.date():
286 self.flagDiscontinuousBlock = 1
287 self.datatime = dt
284 288
285 289 def readData(self):
286 290 '''
287 291 Reading and filtering data block record of BLTR rawdata file, filtering is according to status_value.
288 292
289 293 Input:
290 294 status_value - Array data is set to NAN for values that are not equal to status_value
291 295
292 296 '''
293 297
294 298 data_structure = numpy.dtype(
295 299 DATA_STRUCTURE.descr + [
296 300 ('rx_saturation', 'u4', (self.nchannels,)),
297 301 ('chan_offset', 'u4', (2 * self.nchannels,)),
298 302 ('rx_amp', 'u4', (self.nchannels,)),
299 303 ('rx_snr', 'f4', (self.nchannels,)),
300 304 ('cross_snr', 'f4', (self.kchan,)),
301 305 ('sea_power_relative', 'f4', (self.kchan,))]
302 306 )
303 307
304 308 data = numpy.fromfile(self.fp, data_structure, self.nranges)
305 309
306 310 height = data['range']
307 311 winds = numpy.array((data['zonal'], data['meridional'], data['vertical']))
308 312 snr = data['rx_snr'].T
309 313
310 314 winds[numpy.where(winds == -9999.)] = numpy.nan
311 315 winds[:, numpy.where(data['status'] != self.status_value)] = numpy.nan
312 316 snr[numpy.where(snr == -9999.)] = numpy.nan
313 317 snr[:, numpy.where(data['status'] != self.status_value)] = numpy.nan
314 318 snr = numpy.power(10, snr / 10)
315 319
316 320 return height, winds, snr
317 321
318 322 def set_output(self):
319 323 '''
320 324 Storing data from databuffer to dataOut object
321 325 '''
322 326
323 327 self.dataOut.data_SNR = self.snr
324 328 self.dataOut.height = self.height
325 self.dataOut.data_output = self.buffer
329 self.dataOut.data = self.buffer
326 330 self.dataOut.utctimeInit = self.time
327 331 self.dataOut.utctime = self.dataOut.utctimeInit
328 332 self.dataOut.useLocalTime = False
329 333 self.dataOut.paramInterval = 157
330 334 self.dataOut.timezone = self.timezone
331 335 self.dataOut.site = self.siteFile
332 336 self.dataOut.nrecords = self.nrecords/self.nmodes
333 337 self.dataOut.sizeOfFile = self.sizeOfFile
334 338 self.dataOut.lat = self.lat
335 339 self.dataOut.lon = self.lon
336 340 self.dataOut.channelList = range(self.nchannels)
337 self.dataOut.kchan = self.kchan
338 # self.dataOut.nHeights = self.nranges
341 self.dataOut.kchan = self.kchan
339 342 self.dataOut.delta = self.delta
340 343 self.dataOut.correction = self.correction
341 344 self.dataOut.nmodes = self.nmodes
342 345 self.dataOut.imode = self.imode
343 346 self.dataOut.antenna = self.antenna
344 347 self.dataOut.rx_gains = self.rx_gains
345 348 self.dataOut.flagNoData = False
349 self.dataOut.flagDiscontinuousBlock = self.flagDiscontinuousBlock
346 350
347 351 def getData(self):
348 352 '''
349 353 Storing data from databuffer to dataOut object
350 354 '''
351 355 if self.flagNoMoreFiles:
352 356 self.dataOut.flagNoData = True
353 print 'No file left to process'
357 log.success('No file left to process', 'BLTRParamReader')
354 358 return 0
355 359
356 360 if not self.readNextBlock():
357 361 self.dataOut.flagNoData = True
358 362 return 0
359 363
360 364 self.set_output()
361 365
362 366 return 1
@@ -1,403 +1,403
1 1 '''
2 2 Created on Oct 24, 2016
3 3
4 4 @author: roj- LouVD
5 5 '''
6 6
7 7 import numpy
8 8 import copy
9 9 import datetime
10 10 import time
11 11 from time import gmtime
12 12
13 13 from numpy import transpose
14 14
15 15 from jroproc_base import ProcessingUnit, Operation
16 16 from schainpy.model.data.jrodata import Parameters
17 17
18 18
19 19 class BLTRParametersProc(ProcessingUnit):
20 20 '''
21 21 Processing unit for BLTR parameters data (winds)
22 22
23 23 Inputs:
24 24 self.dataOut.nmodes - Number of operation modes
25 25 self.dataOut.nchannels - Number of channels
26 26 self.dataOut.nranges - Number of ranges
27 27
28 28 self.dataOut.data_SNR - SNR array
29 29 self.dataOut.data_output - Zonal, Vertical and Meridional velocity array
30 30 self.dataOut.height - Height array (km)
31 31 self.dataOut.time - Time array (seconds)
32 32
33 33 self.dataOut.fileIndex -Index of the file currently read
34 34 self.dataOut.lat - Latitude coordinate of BLTR location
35 35
36 36 self.dataOut.doy - Experiment doy (number of the day in the current year)
37 37 self.dataOut.month - Experiment month
38 38 self.dataOut.day - Experiment day
39 39 self.dataOut.year - Experiment year
40 40 '''
41 41
42 42 def __init__(self, **kwargs):
43 43 '''
44 44 Inputs: None
45 45 '''
46 46 ProcessingUnit.__init__(self, **kwargs)
47 47 self.dataOut = Parameters()
48 48 self.isConfig = False
49 49
50 50 def setup(self, mode):
51 51 '''
52 52 '''
53 53 self.dataOut.mode = mode
54 54
55 55 def run(self, mode, snr_threshold=None):
56 56 '''
57 57 Inputs:
58 58 mode = High resolution (0) or Low resolution (1) data
59 59 snr_threshold = snr filter value
60 60 '''
61 61
62 62 if not self.isConfig:
63 63 self.setup(mode)
64 64 self.isConfig = True
65 65
66 66 if self.dataIn.type == 'Parameters':
67 67 self.dataOut.copy(self.dataIn)
68
69 self.dataOut.data_output = self.dataOut.data_output[mode]
68
69 self.dataOut.data_param = self.dataOut.data[mode]
70 70 self.dataOut.heightList = self.dataOut.height[0]
71 71 self.dataOut.data_SNR = self.dataOut.data_SNR[mode]
72 72
73 73 if snr_threshold is not None:
74 74 SNRavg = numpy.average(self.dataOut.data_SNR, axis=0)
75 75 SNRavgdB = 10*numpy.log10(SNRavg)
76 76 for i in range(3):
77 self.dataOut.data_output[i][SNRavgdB <= snr_threshold] = numpy.nan
77 self.dataOut.data_param[i][SNRavgdB <= snr_threshold] = numpy.nan
78 78
79 79 # TODO
80 80 class OutliersFilter(Operation):
81 81
82 82 def __init__(self, **kwargs):
83 83 '''
84 84 '''
85 85 Operation.__init__(self, **kwargs)
86 86
87 87 def run(self, svalue2, method, factor, filter, npoints=9):
88 88 '''
89 89 Inputs:
90 90 svalue - string to select array velocity
91 91 svalue2 - string to choose axis filtering
92 92 method - 0 for SMOOTH or 1 for MEDIAN
93 93 factor - number used to set threshold
94 94 filter - 1 for data filtering using the standard deviation criteria else 0
95 95 npoints - number of points for mask filter
96 96 '''
97 97
98 98 print ' Outliers Filter {} {} / threshold = {}'.format(svalue, svalue, factor)
99 99
100 100
101 101 yaxis = self.dataOut.heightList
102 102 xaxis = numpy.array([[self.dataOut.utctime]])
103 103
104 104 # Zonal
105 105 value_temp = self.dataOut.data_output[0]
106 106
107 107 # Zonal
108 108 value_temp = self.dataOut.data_output[1]
109 109
110 110 # Vertical
111 111 value_temp = numpy.transpose(self.dataOut.data_output[2])
112 112
113 113 htemp = yaxis
114 114 std = value_temp
115 115 for h in range(len(htemp)):
116 116 nvalues_valid = len(numpy.where(numpy.isfinite(value_temp[h]))[0])
117 117 minvalid = npoints
118 118
119 119 #only if valid values greater than the minimum required (10%)
120 120 if nvalues_valid > minvalid:
121 121
122 122 if method == 0:
123 123 #SMOOTH
124 124 w = value_temp[h] - self.Smooth(input=value_temp[h], width=npoints, edge_truncate=1)
125 125
126 126
127 127 if method == 1:
128 128 #MEDIAN
129 129 w = value_temp[h] - self.Median(input=value_temp[h], width = npoints)
130 130
131 131 dw = numpy.std(w[numpy.where(numpy.isfinite(w))],ddof = 1)
132 132
133 133 threshold = dw*factor
134 134 value_temp[numpy.where(w > threshold),h] = numpy.nan
135 135 value_temp[numpy.where(w < -1*threshold),h] = numpy.nan
136 136
137 137
138 138 #At the end
139 139 if svalue2 == 'inHeight':
140 140 value_temp = numpy.transpose(value_temp)
141 141 output_array[:,m] = value_temp
142 142
143 143 if svalue == 'zonal':
144 144 self.dataOut.data_output[0] = output_array
145 145
146 146 elif svalue == 'meridional':
147 147 self.dataOut.data_output[1] = output_array
148 148
149 149 elif svalue == 'vertical':
150 150 self.dataOut.data_output[2] = output_array
151 151
152 152 return self.dataOut.data_output
153 153
154 154
155 155 def Median(self,input,width):
156 156 '''
157 157 Inputs:
158 158 input - Velocity array
159 159 width - Number of points for mask filter
160 160
161 161 '''
162 162
163 163 if numpy.mod(width,2) == 1:
164 164 pc = int((width - 1) / 2)
165 165 cont = 0
166 166 output = []
167 167
168 168 for i in range(len(input)):
169 169 if i >= pc and i < len(input) - pc:
170 170 new2 = input[i-pc:i+pc+1]
171 171 temp = numpy.where(numpy.isfinite(new2))
172 172 new = new2[temp]
173 173 value = numpy.median(new)
174 174 output.append(value)
175 175
176 176 output = numpy.array(output)
177 177 output = numpy.hstack((input[0:pc],output))
178 178 output = numpy.hstack((output,input[-pc:len(input)]))
179 179
180 180 return output
181 181
182 182 def Smooth(self,input,width,edge_truncate = None):
183 183 '''
184 184 Inputs:
185 185 input - Velocity array
186 186 width - Number of points for mask filter
187 187 edge_truncate - 1 for truncate the convolution product else
188 188
189 189 '''
190 190
191 191 if numpy.mod(width,2) == 0:
192 192 real_width = width + 1
193 193 nzeros = width / 2
194 194 else:
195 195 real_width = width
196 196 nzeros = (width - 1) / 2
197 197
198 198 half_width = int(real_width)/2
199 199 length = len(input)
200 200
201 201 gate = numpy.ones(real_width,dtype='float')
202 202 norm_of_gate = numpy.sum(gate)
203 203
204 204 nan_process = 0
205 205 nan_id = numpy.where(numpy.isnan(input))
206 206 if len(nan_id[0]) > 0:
207 207 nan_process = 1
208 208 pb = numpy.zeros(len(input))
209 209 pb[nan_id] = 1.
210 210 input[nan_id] = 0.
211 211
212 212 if edge_truncate == True:
213 213 output = numpy.convolve(input/norm_of_gate,gate,mode='same')
214 214 elif edge_truncate == False or edge_truncate == None:
215 215 output = numpy.convolve(input/norm_of_gate,gate,mode='valid')
216 216 output = numpy.hstack((input[0:half_width],output))
217 217 output = numpy.hstack((output,input[len(input)-half_width:len(input)]))
218 218
219 219 if nan_process:
220 220 pb = numpy.convolve(pb/norm_of_gate,gate,mode='valid')
221 221 pb = numpy.hstack((numpy.zeros(half_width),pb))
222 222 pb = numpy.hstack((pb,numpy.zeros(half_width)))
223 223 output[numpy.where(pb > 0.9999)] = numpy.nan
224 224 input[nan_id] = numpy.nan
225 225 return output
226 226
227 227 def Average(self,aver=0,nhaver=1):
228 228 '''
229 229 Inputs:
230 230 aver - Indicates the time period over which is averaged or consensus data
231 231 nhaver - Indicates the decimation factor in heights
232 232
233 233 '''
234 234 nhpoints = 48
235 235
236 236 lat_piura = -5.17
237 237 lat_huancayo = -12.04
238 238 lat_porcuya = -5.8
239 239
240 240 if '%2.2f'%self.dataOut.lat == '%2.2f'%lat_piura:
241 241 hcm = 3.
242 242 if self.dataOut.year == 2003 :
243 243 if self.dataOut.doy >= 25 and self.dataOut.doy < 64:
244 244 nhpoints = 12
245 245
246 246 elif '%2.2f'%self.dataOut.lat == '%2.2f'%lat_huancayo:
247 247 hcm = 3.
248 248 if self.dataOut.year == 2003 :
249 249 if self.dataOut.doy >= 25 and self.dataOut.doy < 64:
250 250 nhpoints = 12
251 251
252 252
253 253 elif '%2.2f'%self.dataOut.lat == '%2.2f'%lat_porcuya:
254 254 hcm = 5.#2
255 255
256 256 pdata = 0.2
257 257 taver = [1,2,3,4,6,8,12,24]
258 258 t0 = 0
259 259 tf = 24
260 260 ntime =(tf-t0)/taver[aver]
261 261 ti = numpy.arange(ntime)
262 262 tf = numpy.arange(ntime) + taver[aver]
263 263
264 264
265 265 old_height = self.dataOut.heightList
266 266
267 267 if nhaver > 1:
268 268 num_hei = len(self.dataOut.heightList)/nhaver/self.dataOut.nmodes
269 269 deltha = 0.05*nhaver
270 270 minhvalid = pdata*nhaver
271 271 for im in range(self.dataOut.nmodes):
272 272 new_height = numpy.arange(num_hei)*deltha + self.dataOut.height[im,0] + deltha/2.
273 273
274 274
275 275 data_fHeigths_List = []
276 276 data_fZonal_List = []
277 277 data_fMeridional_List = []
278 278 data_fVertical_List = []
279 279 startDTList = []
280 280
281 281
282 282 for i in range(ntime):
283 283 height = old_height
284 284
285 285 start = datetime.datetime(self.dataOut.year,self.dataOut.month,self.dataOut.day) + datetime.timedelta(hours = int(ti[i])) - datetime.timedelta(hours = 5)
286 286 stop = datetime.datetime(self.dataOut.year,self.dataOut.month,self.dataOut.day) + datetime.timedelta(hours = int(tf[i])) - datetime.timedelta(hours = 5)
287 287
288 288
289 289 limit_sec1 = time.mktime(start.timetuple())
290 290 limit_sec2 = time.mktime(stop.timetuple())
291 291
292 292 t1 = numpy.where(self.f_timesec >= limit_sec1)
293 293 t2 = numpy.where(self.f_timesec < limit_sec2)
294 294 time_select = []
295 295 for val_sec in t1[0]:
296 296 if val_sec in t2[0]:
297 297 time_select.append(val_sec)
298 298
299 299
300 300 time_select = numpy.array(time_select,dtype = 'int')
301 301 minvalid = numpy.ceil(pdata*nhpoints)
302 302
303 303 zon_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan
304 304 mer_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan
305 305 ver_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan
306 306
307 307 if nhaver > 1:
308 308 new_zon_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan
309 309 new_mer_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan
310 310 new_ver_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan
311 311
312 312 if len(time_select) > minvalid:
313 313 time_average = self.f_timesec[time_select]
314 314
315 315 for im in range(self.dataOut.nmodes):
316 316
317 317 for ih in range(self.dataOut.nranges):
318 318 if numpy.sum(numpy.isfinite(self.f_zon[time_select,ih,im])) >= minvalid:
319 319 zon_aver[ih,im] = numpy.nansum(self.f_zon[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_zon[time_select,ih,im]))
320 320
321 321 if numpy.sum(numpy.isfinite(self.f_mer[time_select,ih,im])) >= minvalid:
322 322 mer_aver[ih,im] = numpy.nansum(self.f_mer[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_mer[time_select,ih,im]))
323 323
324 324 if numpy.sum(numpy.isfinite(self.f_ver[time_select,ih,im])) >= minvalid:
325 325 ver_aver[ih,im] = numpy.nansum(self.f_ver[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_ver[time_select,ih,im]))
326 326
327 327 if nhaver > 1:
328 328 for ih in range(num_hei):
329 329 hvalid = numpy.arange(nhaver) + nhaver*ih
330 330
331 331 if numpy.sum(numpy.isfinite(zon_aver[hvalid,im])) >= minvalid:
332 332 new_zon_aver[ih,im] = numpy.nansum(zon_aver[hvalid,im]) / numpy.sum(numpy.isfinite(zon_aver[hvalid,im]))
333 333
334 334 if numpy.sum(numpy.isfinite(mer_aver[hvalid,im])) >= minvalid:
335 335 new_mer_aver[ih,im] = numpy.nansum(mer_aver[hvalid,im]) / numpy.sum(numpy.isfinite(mer_aver[hvalid,im]))
336 336
337 337 if numpy.sum(numpy.isfinite(ver_aver[hvalid,im])) >= minvalid:
338 338 new_ver_aver[ih,im] = numpy.nansum(ver_aver[hvalid,im]) / numpy.sum(numpy.isfinite(ver_aver[hvalid,im]))
339 339 if nhaver > 1:
340 340 zon_aver = new_zon_aver
341 341 mer_aver = new_mer_aver
342 342 ver_aver = new_ver_aver
343 343 height = new_height
344 344
345 345
346 346 tstart = time_average[0]
347 347 tend = time_average[-1]
348 348 startTime = time.gmtime(tstart)
349 349
350 350 year = startTime.tm_year
351 351 month = startTime.tm_mon
352 352 day = startTime.tm_mday
353 353 hour = startTime.tm_hour
354 354 minute = startTime.tm_min
355 355 second = startTime.tm_sec
356 356
357 357 startDTList.append(datetime.datetime(year,month,day,hour,minute,second))
358 358
359 359
360 360 o_height = numpy.array([])
361 361 o_zon_aver = numpy.array([])
362 362 o_mer_aver = numpy.array([])
363 363 o_ver_aver = numpy.array([])
364 364 if self.dataOut.nmodes > 1:
365 365 for im in range(self.dataOut.nmodes):
366 366
367 367 if im == 0:
368 368 h_select = numpy.where(numpy.bitwise_and(height[0,:] >=0,height[0,:] <= hcm,numpy.isfinite(height[0,:])))
369 369 else:
370 370 h_select = numpy.where(numpy.bitwise_and(height[1,:] > hcm,height[1,:] < 20,numpy.isfinite(height[1,:])))
371 371
372 372
373 373 ht = h_select[0]
374 374
375 375 o_height = numpy.hstack((o_height,height[im,ht]))
376 376 o_zon_aver = numpy.hstack((o_zon_aver,zon_aver[ht,im]))
377 377 o_mer_aver = numpy.hstack((o_mer_aver,mer_aver[ht,im]))
378 378 o_ver_aver = numpy.hstack((o_ver_aver,ver_aver[ht,im]))
379 379
380 380 data_fHeigths_List.append(o_height)
381 381 data_fZonal_List.append(o_zon_aver)
382 382 data_fMeridional_List.append(o_mer_aver)
383 383 data_fVertical_List.append(o_ver_aver)
384 384
385 385
386 386 else:
387 387 h_select = numpy.where(numpy.bitwise_and(height[0,:] <= hcm,numpy.isfinite(height[0,:])))
388 388 ht = h_select[0]
389 389 o_height = numpy.hstack((o_height,height[im,ht]))
390 390 o_zon_aver = numpy.hstack((o_zon_aver,zon_aver[ht,im]))
391 391 o_mer_aver = numpy.hstack((o_mer_aver,mer_aver[ht,im]))
392 392 o_ver_aver = numpy.hstack((o_ver_aver,ver_aver[ht,im]))
393 393
394 394 data_fHeigths_List.append(o_height)
395 395 data_fZonal_List.append(o_zon_aver)
396 396 data_fMeridional_List.append(o_mer_aver)
397 397 data_fVertical_List.append(o_ver_aver)
398 398
399 399
400 400 return startDTList, data_fHeigths_List, data_fZonal_List, data_fMeridional_List, data_fVertical_List
401 401
402 402
403 403 No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now