##// END OF EJS Templates
Operation MAD2Writer done Task #343
Juan C. Espinoza -
r1021:2aa9b10a9a70
parent child
Show More
@@ -1,19 +1,21
1 1 '''
2 2
3 3 $Author: murco $
4 4 $Id: JRODataIO.py 169 2012-11-19 21:57:03Z murco $
5 5 '''
6 6
7 7 from jroIO_voltage import *
8 8 from jroIO_spectra import *
9 9 from jroIO_heispectra import *
10 10 from jroIO_usrp import *
11 11
12 12 from jroIO_kamisr import *
13 13 from jroIO_param import *
14 14 from jroIO_hf import *
15 15
16 from jroIO_madrigal import *
17
16 18 from bltrIO_param import *
17 19 from jroIO_bltr import *
18 20 from jroIO_mira35c import *
19 21
@@ -1,364 +1,362
1 1 '''
2 2 Created on Nov 9, 2016
3 3
4 4 @author: roj- LouVD
5 5 '''
6 6
7 7
8 8 import os
9 9 import sys
10 10 import time
11 11 import glob
12 12 import datetime
13 13
14 14 import numpy
15 15
16 16 from schainpy.model.proc.jroproc_base import ProcessingUnit
17 17 from schainpy.model.data.jrodata import Parameters
18 18 from schainpy.model.io.jroIO_base import JRODataReader, isNumber
19 19
20 20 FILE_HEADER_STRUCTURE = numpy.dtype([
21 21 ('FMN', '<u4'),
22 22 ('nrec', '<u4'),
23 23 ('fr_offset', '<u4'),
24 24 ('id', '<u4'),
25 25 ('site', 'u1', (32,))
26 26 ])
27 27
28 28 REC_HEADER_STRUCTURE = numpy.dtype([
29 29 ('rmn', '<u4'),
30 30 ('rcounter', '<u4'),
31 31 ('nr_offset', '<u4'),
32 32 ('tr_offset', '<u4'),
33 33 ('time', '<u4'),
34 34 ('time_msec', '<u4'),
35 35 ('tag', 'u1', (32,)),
36 36 ('comments', 'u1', (32,)),
37 37 ('lat', '<f4'),
38 38 ('lon', '<f4'),
39 39 ('gps_status', '<u4'),
40 40 ('freq', '<u4'),
41 41 ('freq0', '<u4'),
42 42 ('nchan', '<u4'),
43 43 ('delta_r', '<u4'),
44 44 ('nranges', '<u4'),
45 45 ('r0', '<u4'),
46 46 ('prf', '<u4'),
47 47 ('ncoh', '<u4'),
48 48 ('npoints', '<u4'),
49 49 ('polarization', '<i4'),
50 50 ('rx_filter', '<u4'),
51 51 ('nmodes', '<u4'),
52 52 ('dmode_index', '<u4'),
53 53 ('dmode_rngcorr', '<u4'),
54 54 ('nrxs', '<u4'),
55 55 ('acf_length', '<u4'),
56 56 ('acf_lags', '<u4'),
57 57 ('sea_to_atmos', '<f4'),
58 58 ('sea_notch', '<u4'),
59 59 ('lh_sea', '<u4'),
60 60 ('hh_sea', '<u4'),
61 61 ('nbins_sea', '<u4'),
62 62 ('min_snr', '<f4'),
63 63 ('min_cc', '<f4'),
64 64 ('max_time_diff', '<f4')
65 65 ])
66 66
67 67 DATA_STRUCTURE = numpy.dtype([
68 68 ('range', '<u4'),
69 69 ('status', '<u4'),
70 70 ('zonal', '<f4'),
71 71 ('meridional', '<f4'),
72 72 ('vertical', '<f4'),
73 73 ('zonal_a', '<f4'),
74 74 ('meridional_a', '<f4'),
75 75 ('corrected_fading', '<f4'), # seconds
76 76 ('uncorrected_fading', '<f4'), # seconds
77 77 ('time_diff', '<f4'),
78 78 ('major_axis', '<f4'),
79 79 ('axial_ratio', '<f4'),
80 80 ('orientation', '<f4'),
81 81 ('sea_power', '<u4'),
82 82 ('sea_algorithm', '<u4')
83 83 ])
84 84
85 85 class BLTRParamReader(JRODataReader, ProcessingUnit):
86 86 '''
87 87 Boundary Layer and Tropospheric Radar (BLTR) reader, Wind velocities and SNR from *.sswma files
88 88 '''
89 89
90 90 ext = '.sswma'
91 91
92 92 def __init__(self, **kwargs):
93 93
94 94 ProcessingUnit.__init__(self , **kwargs)
95 95
96 96 self.dataOut = Parameters()
97 97 self.counter_records = 0
98 98 self.flagNoMoreFiles = 0
99 99 self.isConfig = False
100 100 self.filename = None
101 101
102 102 def setup(self,
103 103 path=None,
104 104 startDate=None,
105 105 endDate=None,
106 106 ext=None,
107 107 startTime=datetime.time(0, 0, 0),
108 108 endTime=datetime.time(23, 59, 59),
109 109 timezone=0,
110 110 status_value=0,
111 111 **kwargs):
112 112
113 113 self.path = path
114 114 self.startTime = startTime
115 115 self.endTime = endTime
116 116 self.status_value = status_value
117 117
118 118 if self.path is None:
119 119 raise ValueError, "The path is not valid"
120 120
121 121 if ext is None:
122 122 ext = self.ext
123 123
124 124 self.search_files(self.path, startDate, endDate, ext)
125 125 self.timezone = timezone
126 126 self.fileIndex = 0
127 127
128 128 if not self.fileList:
129 129 raise Warning, "There is no files matching these date in the folder: %s. \n Check 'startDate' and 'endDate' "%(path)
130 130
131 131 self.setNextFile()
132 132
133 133 def search_files(self, path, startDate, endDate, ext):
134 134 '''
135 135 Searching for BLTR rawdata file in path
136 136 Creating a list of file to proces included in [startDate,endDate]
137 137
138 138 Input:
139 139 path - Path to find BLTR rawdata files
140 140 startDate - Select file from this date
141 141 enDate - Select file until this date
142 142 ext - Extension of the file to read
143 143
144 144 '''
145 145
146 146 print 'Searching file in %s ' % (path)
147 147 foldercounter = 0
148 148 fileList0 = glob.glob1(path, "*%s" % ext)
149 149 fileList0.sort()
150 150
151 151 self.fileList = []
152 152 self.dateFileList = []
153 153
154 154 for thisFile in fileList0:
155 155 year = thisFile[-14:-10]
156 156 if not isNumber(year):
157 157 continue
158 158
159 159 month = thisFile[-10:-8]
160 160 if not isNumber(month):
161 161 continue
162 162
163 163 day = thisFile[-8:-6]
164 164 if not isNumber(day):
165 165 continue
166 166
167 167 year, month, day = int(year), int(month), int(day)
168 168 dateFile = datetime.date(year, month, day)
169 169
170 170 if (startDate > dateFile) or (endDate < dateFile):
171 171 continue
172 172
173 173 self.fileList.append(thisFile)
174 174 self.dateFileList.append(dateFile)
175 175
176 176 return
177 177
178 178 def setNextFile(self):
179 179
180 180 file_id = self.fileIndex
181 181
182 182 if file_id == len(self.fileList):
183 183 print '\nNo more files in the folder'
184 184 print 'Total number of file(s) read : {}'.format(self.fileIndex + 1)
185 185 self.flagNoMoreFiles = 1
186 186 return 0
187 187
188 188 print '\n[Setting file] (%s) ...' % self.fileList[file_id]
189 189 filename = os.path.join(self.path, self.fileList[file_id])
190 190
191 191 dirname, name = os.path.split(filename)
192 192 self.siteFile = name.split('.')[0] # 'peru2' ---> Piura - 'peru1' ---> Huancayo or Porcuya
193 193 if self.filename is not None:
194 194 self.fp.close()
195 195 self.filename = filename
196 196 self.fp = open(self.filename, 'rb')
197 197 self.header_file = numpy.fromfile(self.fp, FILE_HEADER_STRUCTURE, 1)
198 198 self.nrecords = self.header_file['nrec'][0]
199 199 self.sizeOfFile = os.path.getsize(self.filename)
200 200 self.counter_records = 0
201 201 self.flagIsNewFile = 0
202 202 self.fileIndex += 1
203 203
204 204 return 1
205 205
206 206 def readNextBlock(self):
207 207
208 208 while True:
209 209 if self.counter_records == self.nrecords:
210 210 self.flagIsNewFile = 1
211 211 if not self.setNextFile():
212 212 return 0
213 213
214 214 self.readBlock()
215 215
216 216 if (self.datatime.time() < self.startTime) or (self.datatime.time() > self.endTime):
217 217 print "[Reading] Record No. %d/%d -> %s [Skipping]" %(
218 218 self.counter_records,
219 219 self.nrecords,
220 220 self.datatime.ctime())
221 221 continue
222 222 break
223 223
224 224 print "[Reading] Record No. %d/%d -> %s" %(
225 225 self.counter_records,
226 226 self.nrecords,
227 227 self.datatime.ctime())
228 228
229 229 return 1
230 230
231 231 def readBlock(self):
232 232
233 233 pointer = self.fp.tell()
234 234 header_rec = numpy.fromfile(self.fp, REC_HEADER_STRUCTURE, 1)
235 235 self.nchannels = header_rec['nchan'][0]/2
236 236 self.kchan = header_rec['nrxs'][0]
237 237 self.nmodes = header_rec['nmodes'][0]
238 238 self.nranges = header_rec['nranges'][0]
239 239 self.fp.seek(pointer)
240 240 self.height = numpy.empty((self.nmodes, self.nranges))
241 241 self.snr = numpy.empty((self.nmodes, self.nchannels, self.nranges))
242 242 self.buffer = numpy.empty((self.nmodes, 3, self.nranges))
243 243
244 244 for mode in range(self.nmodes):
245 245 self.readHeader()
246 246 data = self.readData()
247 247 self.height[mode] = (data[0] - self.correction) / 1000.
248 248 self.buffer[mode] = data[1]
249 249 self.snr[mode] = data[2]
250 250
251 251 self.counter_records = self.counter_records + self.nmodes
252 252
253 253 return
254 254
255 255 def readHeader(self):
256 256 '''
257 257 RecordHeader of BLTR rawdata file
258 258 '''
259 259
260 260 header_structure = numpy.dtype(
261 261 REC_HEADER_STRUCTURE.descr + [
262 262 ('antenna_coord', 'f4', (2, self.nchannels)),
263 263 ('rx_gains', 'u4', (self.nchannels,)),
264 264 ('rx_analysis', 'u4', (self.nchannels,))
265 265 ]
266 266 )
267 267
268 268 self.header_rec = numpy.fromfile(self.fp, header_structure, 1)
269 269 self.lat = self.header_rec['lat'][0]
270 270 self.lon = self.header_rec['lon'][0]
271 271 self.delta = self.header_rec['delta_r'][0]
272 272 self.correction = self.header_rec['dmode_rngcorr'][0]
273 273 self.imode = self.header_rec['dmode_index'][0]
274 274 self.antenna = self.header_rec['antenna_coord']
275 275 self.rx_gains = self.header_rec['rx_gains']
276 276 self.time = self.header_rec['time'][0]
277 277 tseconds = self.header_rec['time'][0]
278 278 local_t1 = time.localtime(tseconds)
279 279 self.year = local_t1.tm_year
280 280 self.month = local_t1.tm_mon
281 281 self.day = local_t1.tm_mday
282 282 self.t = datetime.datetime(self.year, self.month, self.day)
283 283 self.datatime = datetime.datetime.utcfromtimestamp(self.time)
284 284
285 285 def readData(self):
286 286 '''
287 287 Reading and filtering data block record of BLTR rawdata file, filtering is according to status_value.
288 288
289 289 Input:
290 290 status_value - Array data is set to NAN for values that are not equal to status_value
291 291
292 292 '''
293 293
294 294 data_structure = numpy.dtype(
295 295 DATA_STRUCTURE.descr + [
296 296 ('rx_saturation', 'u4', (self.nchannels,)),
297 297 ('chan_offset', 'u4', (2 * self.nchannels,)),
298 298 ('rx_amp', 'u4', (self.nchannels,)),
299 299 ('rx_snr', 'f4', (self.nchannels,)),
300 300 ('cross_snr', 'f4', (self.kchan,)),
301 301 ('sea_power_relative', 'f4', (self.kchan,))]
302 302 )
303 303
304 304 data = numpy.fromfile(self.fp, data_structure, self.nranges)
305 305
306 306 height = data['range']
307 307 winds = numpy.array((data['zonal'], data['meridional'], data['vertical']))
308 308 snr = data['rx_snr'].T
309 309
310 310 winds[numpy.where(winds == -9999.)] = numpy.nan
311 311 winds[:, numpy.where(data['status'] != self.status_value)] = numpy.nan
312 312 snr[numpy.where(snr == -9999.)] = numpy.nan
313 313 snr[:, numpy.where(data['status'] != self.status_value)] = numpy.nan
314 314 snr = numpy.power(10, snr / 10)
315 315
316 316 return height, winds, snr
317 317
318 318 def set_output(self):
319 319 '''
320 320 Storing data from databuffer to dataOut object
321 321 '''
322 322
323 323 self.dataOut.data_SNR = self.snr
324 324 self.dataOut.height = self.height
325 325 self.dataOut.data_output = self.buffer
326 326 self.dataOut.utctimeInit = self.time
327 327 self.dataOut.utctime = self.dataOut.utctimeInit
328 self.dataOut.counter_records = self.counter_records
329 self.dataOut.nrecords = self.nrecords
330 328 self.dataOut.useLocalTime = False
331 329 self.dataOut.paramInterval = 157
332 330 self.dataOut.timezone = self.timezone
333 331 self.dataOut.site = self.siteFile
334 self.dataOut.nrecords = self.nrecords
332 self.dataOut.nrecords = self.nrecords/self.nmodes
335 333 self.dataOut.sizeOfFile = self.sizeOfFile
336 334 self.dataOut.lat = self.lat
337 335 self.dataOut.lon = self.lon
338 336 self.dataOut.channelList = range(self.nchannels)
339 337 self.dataOut.kchan = self.kchan
340 338 # self.dataOut.nHeights = self.nranges
341 339 self.dataOut.delta = self.delta
342 340 self.dataOut.correction = self.correction
343 341 self.dataOut.nmodes = self.nmodes
344 342 self.dataOut.imode = self.imode
345 343 self.dataOut.antenna = self.antenna
346 344 self.dataOut.rx_gains = self.rx_gains
347 345 self.dataOut.flagNoData = False
348 346
349 347 def getData(self):
350 348 '''
351 349 Storing data from databuffer to dataOut object
352 350 '''
353 351 if self.flagNoMoreFiles:
354 352 self.dataOut.flagNoData = True
355 353 print 'No file left to process'
356 354 return 0
357 355
358 356 if not self.readNextBlock():
359 357 self.dataOut.flagNoData = True
360 358 return 0
361 359
362 360 self.set_output()
363 361
364 362 return 1
@@ -1,375 +1,243
1 1 '''
2 2 Created on Aug 1, 2017
3 3
4 4 @author: Juan C. Espinoza
5 5 '''
6 6
7 7 import os
8 8 import sys
9 9 import time
10 import json
10 11 import datetime
11 12
12 13 import numpy
13 14
14 from schainpy.model.proc.jroproc_base import ProcessingUnit, Operation
15 from schainpy.model.data.jrodata import Parameters
16 from schainpy.model.data.jroheaderIO import RadarControllerHeader, SystemHeader
17 from schainpy.model.graphics.jroplot_parameters import WindProfilerPlot
18 from schainpy.model.io.jroIO_base import *
19
20 15 try:
21 16 import madrigal
22 17 import madrigal.cedar
23 from madrigal.cedar import MadrigalCatalogRecord
24 18 except:
25 19 print 'You should install "madrigal library" module if you want to read/write Madrigal data'
26 20
21 from schainpy.model.proc.jroproc_base import Operation
22 from schainpy.model.data.jrodata import Parameters
23
24 MISSING = -32767
25 DEF_CATALOG = {
26 'principleInvestigator': 'Marco Milla',
27 'expPurpose': None,
28 'expMode': None,
29 'cycleTime': None,
30 'correlativeExp': None,
31 'sciRemarks': None,
32 'instRemarks': None
33 }
34 DEF_HEADER = {
35 'kindatDesc': None,
36 'analyst': 'Jicamarca User',
37 'comments': None,
38 'history': None
39 }
40 MNEMONICS = {
41 10: 'jro',
42 11: 'jbr',
43 840: 'jul',
44 13: 'jas',
45 1000: 'pbr',
46 1001: 'hbr',
47 1002: 'obr',
48 }
49
50 def load_json(obj):
51 '''
52 Parse json as string instead of unicode
53 '''
54
55 if isinstance(obj, str):
56 obj = json.loads(obj)
57
58 return {str(k): load_json(v) if isinstance(v, dict) else str(v) if isinstance(v, unicode) else v
59 for k, v in obj.items()}
27 60
28 class MADWriter(Operation):
29 61
30 def __init__(self):
62 class MAD2Writer(Operation):
31 63
32 Operation.__init__(self)
64 def __init__(self, **kwargs):
65
66 Operation.__init__(self, **kwargs)
33 67 self.dataOut = Parameters()
34 68 self.path = None
35 69 self.dataOut = None
36 self.flagIsNewFile=1
37 self.ext = ".hdf5"
70 self.ext = '.dat'
38 71
39 72 return
40 73
41 def run(self, dataOut, path , modetowrite,**kwargs):
42
43 if self.flagIsNewFile:
44 flagdata = self.setup(dataOut, path, modetowrite)
74 def run(self, dataOut, path, oneDList, twoDParam='', twoDList='{}', metadata='{}', **kwargs):
75 '''
76 Inputs:
77 path - path where files will be created
78 oneDList - json of one-dimensional parameters in record where keys
79 are Madrigal codes (integers or mnemonics) and values the corresponding
80 dataOut attribute e.g: {
81 'gdlatr': 'lat',
82 'gdlonr': 'lon',
83 'gdlat2':'lat',
84 'glon2':'lon'}
85 twoDParam - independent parameter to get the number of rows e.g:
86 heighList
87 twoDList - json of two-dimensional parameters in record where keys
88 are Madrigal codes (integers or mnemonics) and values the corresponding
89 dataOut attribute if multidimensional array specify as tupple
90 ('attr', pos) e.g: {
91 'gdalt': 'heightList',
92 'vn1p2': ('data_output', 0),
93 'vn2p2': ('data_output', 1),
94 'vn3': ('data_output', 2),
95 'snl': ('data_SNR', 'db')
96 }
97 metadata - json of madrigal metadata (kinst, kindat, catalog and header)
98 '''
99 if not self.isConfig:
100 self.setup(dataOut, path, oneDList, twoDParam, twoDList, metadata, **kwargs)
101 self.isConfig = True
45 102
46 103 self.putData()
47 104 return
48 105
49 def setup(self, dataOut, path, modetowrite):
106 def setup(self, dataOut, path, oneDList, twoDParam, twoDList, metadata, **kwargs):
50 107 '''
51 Recovering data to write in new *.hdf5 file
52 Inputs:
53 modew -- mode to write (1 or 2)
54 path -- destination path
55
108 Configure Operation
56 109 '''
57 110
58 self.im = modetowrite-1
59 if self.im!=0 and self.im!=1:
60 raise ValueError, 'Check "modetowrite" value. Must be egual to 1 or 2, "{}" is not valid. '.format(modetowrite)
61
62 111 self.dataOut = dataOut
63 112 self.nmodes = self.dataOut.nmodes
64 self.nchannels = self.dataOut.nchannels
65 self.lat = self.dataOut.lat
66 self.lon = self.dataOut.lon
67 self.hcm = 3
68 self.thisDate = self.dataOut.utctimeInit
69 self.year = self.dataOut.year
70 self.month = self.dataOut.month
71 self.day = self.dataOut.day
72 113 self.path = path
114 self.blocks = kwargs.get('blocks', None)
115 self.counter = 0
116 self.oneDList = load_json(oneDList)
117 self.twoDList = load_json(twoDList)
118 self.twoDParam = twoDParam
119 meta = load_json(metadata)
120 self.kinst = meta.get('kinst')
121 self.kindat = meta.get('kindat')
122 self.catalog = meta.get('catalog', DEF_CATALOG)
123 self.header = meta.get('header', DEF_HEADER)
73 124
74 self.flagIsNewFile = 0
75
76 return 1
125 return
77 126
78 127 def setFile(self):
79 128 '''
80 - Determining the file name for each mode of operation
81 kinst - Kind of Instrument (mnemotic)
82 kindat - Kind of Data (mnemotic)
83
84 - Creating a cedarObject
85
129 Create new cedar file object
86 130 '''
87 lat_piura = -5.17
88 lat_huancayo = -12.04
89 lat_porcuya = -5.8
90
91 if '%2.2f' % self.lat == '%2.2f' % lat_piura:
92 self.instMnemonic = 'pbr'
93
94 elif '%2.2f' % self.lat == '%2.2f' % lat_huancayo:
95 self.instMnemonic = 'hbr'
96 131
97 elif '%2.2f' % self.lat == '%2.2f' % lat_porcuya:
98 self.instMnemonic = 'obr'
99 else: raise Warning, "The site of file read doesn't match any site known. Only file from Huancayo, Piura and Porcuya can be processed.\n Check the file "
132 self.mnemonic = MNEMONICS[self.kinst] #TODO get mnemonic from madrigal
133 date = datetime.datetime.utcfromtimestamp(self.dataOut.utctime)
100 134
101 mode = ['_mode1','_mode2']
102
103 self.hdf5filename = '%s%4.4d%2.2d%2.2d%s%s' % (self.instMnemonic,
104 self.year,
105 self.month,
106 self.day,
107 mode[self.im],
135 filename = '%s%s_%s%s' % (self.mnemonic,
136 date.strftime('%Y%m%d_%H%M%S'),
137 self.dataOut.mode,
108 138 self.ext)
109 139
110 self.fullname=os.path.join(self.path,self.hdf5filename)
140 self.fullname = os.path.join(self.path, filename)
111 141
112 142 if os.path.isfile(self.fullname) :
113 143 print "Destination path '%s' already exists. Previous file deleted. " %self.fullname
114 144 os.remove(self.fullname)
115 145
116 # Identify kinst and kindat
117 InstName = self.hdf5filename[0:3]
118 KinstList = [1000, 1001, 1002]
119 KinstId = {'pbr':0, 'hbr':1, 'obr':2} # pbr:piura, hbr:huancayo, obr:porcuya
120 KindatList = [1600, 1601] # mode 1, mode 2
121 self.type = KinstId[InstName]
122 self.kinst = KinstList[self.type]
123 self.kindat = KindatList[self.im]
124
125 146 try:
147 print '[Writing] creating file : %s' % (self.fullname)
126 148 self.cedarObj = madrigal.cedar.MadrigalCedarFile(self.fullname, True)
127 except ValueError, message:
149 except ValueError, e:
128 150 print '[Error]: Impossible to create a cedar object with "madrigal.cedar.MadrigalCedarFile" '
129 151 return
130 152
131 153 return 1
132 154
133 155 def writeBlock(self):
134 156 '''
135 - Selecting mode of operation:
136
137 bltr high resolution mode 1 - Low Atmosphere (0 - 3km) // bltr high resolution mode 2 - High Atmosphere (0 - 10km)
138 msnr - Average Signal Noise Ratio in dB
139 hcm - 3 km
140
141 - Filling the cedarObject by a block: each array data entry is assigned a code that defines the parameter to write to the file
142
143 GDLATR - Reference geod latitude (deg)
144 GDLONR - Reference geographic longitude (deg)
145 GDLAT2 - Geodetic latitude of second inst (deg)
146 GLON2 - Geographic longitude of second inst (deg)
147
148 GDALT - Geodetic altitude (height) (km)
149 SNL - Log10 (signal to noise ratio)
150 VN1P2 - Neutral wind in direction 1 (eastward) (m/s), ie zonal wind
151 VN2P2 - Neutral wind in direction 2 (northward) (m/s), ie meridional wind
152 EL2 - Ending elevation angle (deg), ie vertical wind
153
154 Other parameters: /madrigal3/metadata/parcodes.tab
155
157 Add data records to cedar file taking data from oneDList and twoDList
158 attributes.
159 Allowed parameters in: parcodes.tab
156 160 '''
157 161
158 self.z_zon = self.dataOut.data_output[0,:,:]
159 self.z_mer =self.dataOut.data_output[1,:,:]
160 self.z_ver = self.dataOut.data_output[2,:,:]
161
162 if self.im == 0:
163 h_select = numpy.where(numpy.bitwise_and(self.dataOut.height[0, :] >= 0., self.dataOut.height[0, :] <= self.hcm, numpy.isfinite(self.dataOut.height[0, :])))
164 else:
165 h_select = numpy.where(numpy.bitwise_and(self.dataOut.height[0, :] >= 0., self.dataOut.height[0, :] < 20, numpy.isfinite(self.dataOut.height[0, :])))
166
167 ht = h_select[0]
168
169 self.o_height = self.dataOut.height[self.im, ht]
170 self.o_zon = self.z_zon[ht, self.im]
171 self.o_mer = self.z_mer[ht, self.im]
172 self.o_ver = self.z_ver[ht, self.im]
173 o_snr = self.dataOut.data_SNR[ :, :, self.im]
174
175 o_snr = o_snr[ht, :]
162 startTime = datetime.datetime.utcfromtimestamp(self.dataOut.utctime)
163 endTime = startTime + datetime.timedelta(seconds=self.dataOut.paramInterval)
164 nrows = len(getattr(self.dataOut, self.twoDParam))
176 165
177 ndiv = numpy.nansum((numpy.isfinite(o_snr)), 1)
178 ndiv = ndiv.astype(float)
179
180 sel_div = numpy.where(ndiv == 0.)
181 ndiv[sel_div] = numpy.nan
182
183 if self.nchannels > 1:
184 msnr = numpy.nansum(o_snr, axis=1)
185 else:
186 msnr = o_snr
187
188 try:
189 self.msnr = 10 * numpy.log10(msnr / ndiv)
190 except ZeroDivisionError:
191 self.msnr = 10 * numpy.log10(msnr /1)
192 print 'Number of division (ndiv) egal to 1 by default. Check SNR'
193
194 time_t = time.gmtime(self.dataOut.time1)
195 year = time_t.tm_year
196 month = time_t.tm_mon
197 day = time_t.tm_mday
198 hour = time_t.tm_hour
199 minute = time_t.tm_min
200 second = time_t.tm_sec
201 timedate_0 = datetime.datetime(year, month, day, hour, minute, second)
202
203 # 1d parameters
204 GDLATR = self.lat
205 GDLONR = self.lon
206 GDLAT2 = self.lat
207 GLON2 = self.lon
208
209 # 2d parameters
210 GDALT = self.o_height
211
212 SNL = self.msnr
213 VN1P2 = self.o_zon
214 VN2P2 = self.o_mer
215 EL2 = self.o_ver
216 NROW = len(self.o_height)
217
218 startTime = timedate_0
219 endTime = startTime
220 self.dataRec = madrigal.cedar.MadrigalDataRecord(self.kinst,
166 rec = madrigal.cedar.MadrigalDataRecord(
167 self.kinst,
221 168 self.kindat,
222 169 startTime.year,
223 170 startTime.month,
224 171 startTime.day,
225 172 startTime.hour,
226 173 startTime.minute,
227 174 startTime.second,
228 0,
175 startTime.microsecond/10000,
229 176 endTime.year,
230 177 endTime.month,
231 178 endTime.day,
232 179 endTime.hour,
233 180 endTime.minute,
234 181 endTime.second,
235 0,
236 ('gdlatr', 'gdlonr', 'gdlat2', 'glon2'),
237 ('gdalt', 'snl', 'vn1p2', 'vn2p2', 'el2'),
238 NROW, ind2DList=['gdalt'])
182 endTime.microsecond/10000,
183 self.oneDList.keys(),
184 self.twoDList.keys(),
185 nrows
186 )
239 187
240 188 # Setting 1d values
241 self.dataRec.set1D('gdlatr', GDLATR)
242 self.dataRec.set1D('gdlonr', GDLONR)
243 self.dataRec.set1D('gdlat2', GDLAT2)
244 self.dataRec.set1D('glon2', GLON2)
189 for key in self.oneDList:
190 rec.set1D(key, getattr(self.dataOut, self.oneDList[key]))
245 191
246 192 # Setting 2d values
247 for n in range(self.o_height.shape[0]):
248 self.dataRec.set2D('gdalt', n, GDALT[n])
249 self.dataRec.set2D('snl', n, SNL[n])
250 self.dataRec.set2D('vn1p2', n, VN1P2[n])
251 self.dataRec.set2D('vn2p2', n, VN2P2[n])
252 self.dataRec.set2D('el2', n, EL2[n])
253
254 # Appending new data record
255 '''
256 [MADRIGAL3]There are two ways to write to a MadrigalCedarFile. Either this method (write) is called after all the
257 records have been appended to the MadrigalCedarFile, or dump is called after a certain number of records are appended,
258 and then at the end dump is called a final time if there were any records not yet dumped, followed by addArray.
259 '''
260
261 self.cedarObj.append(self.dataRec)
262 print ' [Writing] records {} (mode {}).'.format(self.dataOut.counter_records,self.im+1)
193 invalid = numpy.isnan(self.dataOut.data_output)
194 self.dataOut.data_output[invalid] = MISSING
195 out = {}
196 for key, value in self.twoDList.items():
197 if isinstance(value, str):
198 out[key] = getattr(self.dataOut, value)
199 elif isinstance(value, tuple):
200 attr, x = value
201 if isinstance(x, (int, float)):
202 out[key] = getattr(self.dataOut, attr)[int(x)]
203 elif x.lower()=='db':
204 tmp = getattr(self.dataOut, attr)
205 SNRavg = numpy.average(tmp, axis=0)
206 out[key] = 10*numpy.log10(SNRavg)
207
208 for n in range(nrows):
209 for key in out:
210 rec.set2D(key, n, out[key][n])
211
212 self.cedarObj.append(rec)
263 213 self.cedarObj.dump()
264
265
266
214 print '[Writing] Record No. {} (mode {}).'.format(
215 self.counter,
216 self.dataOut.mode
217 )
267 218
268 219 def setHeader(self):
269 220 '''
270 - Creating self.catHeadObj
271 - Adding information catalog
272 - Writing file header
273
221 Create an add catalog and header to cedar file
274 222 '''
275 self.catHeadObj = madrigal.cedar.CatalogHeaderCreator(self.fullname)
276 kindatDesc, comments, analyst, history, principleInvestigator = self._info_BLTR()
277
278 self.catHeadObj.createCatalog(principleInvestigator="Jarjar",
279 expPurpose='characterize the atmospheric dynamics in this region where frequently it happens the El Nino',
280 sciRemarks="http://madrigal3.haystack.mit.edu/static/CEDARMadrigalHdf5Format.pdf")
281
282 self.catHeadObj.createHeader(kindatDesc, analyst, comments, history)
283 223
284 self.catHeadObj.write()
285
286 print '[File created] path: %s' % (self.fullname)
224 header = madrigal.cedar.CatalogHeaderCreator(self.fullname)
225 header.createCatalog(**self.catalog)
226 header.createHeader(**self.header)
227 header.write()
287 228
288 229 def putData(self):
289 230
290 231 if self.dataOut.flagNoData:
291 232 return 0
292 233
293 if self.dataOut.counter_records == 1:
234 if self.counter == 0:
294 235 self.setFile()
295 print '[Writing] Setting new hdf5 file for the mode {}'.format(self.im+1)
296 236
297 if self.dataOut.counter_records <= self.dataOut.nrecords:
237 if self.counter <= self.dataOut.nrecords:
298 238 self.writeBlock()
239 self.counter += 1
299 240
300
301 if self.dataOut.counter_records == self.dataOut.nrecords:
302 self.cedarObj.addArray()
303
241 if self.counter == self.dataOut.nrecords or self.counter == self.blocks:
304 242 self.setHeader()
305 self.flagIsNewFile = 1
306
307 def _info_BLTR(self):
308
309 kindatDesc = '''--This header is for KINDAT = %d''' % self.kindat
310 history = None
311 analyst = '''Jarjar'''
312 principleInvestigator = '''
313 Jarjar
314 Radio Observatorio de Jicamarca
315 Instituto Geofisico del Peru
316
317 '''
318 if self.type == 1:
319 comments = '''
320
321 --These data are provided by two Boundary Layer and Tropospheric Radar (BLTR) deployed at two different locations at Peru(GMT-5), one of them at Piura(5.17 S, 80.64W) and another located at Huancayo (12.04 S, 75.32 W).
322
323 --The purpose of conducting these observations is to measure wind in the differents levels of height, this radar makes measurements the Zonal(U), Meridional(V) and Vertical(W) wind velocities component in northcoast from Peru. And the main purpose of these mensurations is to characterize the atmospheric dynamics in this region where frequently it happens the 'El Nino Phenomenon'
324
325 --In Kindat = 1600, contains information of wind velocities component since 0 Km to 3 Km.
326
327 --In Kindat = 1601, contains information of wind velocities component since 0 Km to 10 Km.
328
329 --The Huancayo-BLTR is a VHF Profiler Radar System is a 3 channel coherent receiver pulsed radar utilising state-of-the-art software and computing techniques to acquire, decode, and translate signals obtained from partial reflection echoes in the troposphere, lower stratosphere and mesosphere. It uses an array of three horizontal spaced and vertically directed receiving antennas. The data is recorded thirty seconds, averaged to one minute mean values of Height, Zonal, Meridional and Vertical wind.
330
331 --The Huancayo-BLTR was installed in January 2010. This instrument was designed and constructed by Genesis Soft Pty. Ltd. Is constituted by three groups of spaced antennas (distributed) forming an isosceles triangle.
332
333
334 Station _______ Geographic Coord ______ Geomagnetic Coord
335
336 _______________ Latitude _ Longitude __ Latitude _ Longitude
337
338 Huancayo (HUA) __12.04 S ___ 75.32 W _____ -12.05 ____ 352.85
339 Piura (PIU) _____ 5.17 S ___ 80.64 W ______ 5.18 ____ 350.93
340
341 WIND OBSERVATIONS
342
343 --To obtain wind the BLTR uses Spaced Antenna technique (e.g., Briggs 1984). The scatter and reflection it still provided by variations in the refractive index as in the Doppler method(Gage and Basley,1978; Balsley and Gage 1982; Larsen and Rottger 1982), but instead of using the Doppler shift to derive the velocity components, the cross-correlation between signals in an array of three horizontally spaced and vertically directed receiving antennas is used.
344
345 ......................................................................
346 For more information, consult the following references:
347 - Balsley, B. B., and K. S. Gage., On the use of radars for operational wind profiling, Bull. Amer. Meteor.Soc.,63, 1009-1018, 1982.
348
349 - Briggs, B. H., The analysis of spaced sensor data by correations techniques, Handbook for MAP, Vol. 13, SCOTEP Secretariat, University of Illinois, Urbana, 166-186, 1984.
350
351 - Gage, K. S., and B.B. Balsley., Doppler radar probing of the clear atmosphere, Bull. Amer. Meteor.Soc., 59, 1074-1093, 1978.
352
353 - Larsen, M. F., The Spaced Antenna Technique for Radar Wind Profiling, Journal of Atm. and Ocean. Technology. , Vol.6, 920-937, 1989.
354
355 - Larsen, M. F., A method for single radar voracity measurements?, Handbook for MAP,SCOSTEP Secretariat, University of the Illinois, Urban, in press, 1989.
356 ......................................................................
357
358 ACKNOWLEDGEMENTS:
359
360 --The Piura and Huancayo BLTR are part of the network of instruments operated by the Jicamarca Radio Observatory.
361
362 --The Jicamarca Radio Observatory is a facility of the Instituto Geofisico del Peru operated with support from the NSF Cooperative Agreement ATM-0432565 through Cornell University
363
364 ......................................................................
365
366 Further questions and comments should be addressed to:
367 Radio Observatorio de Jicamarca
368 Instituto Geofisico del Peru
369 Lima, Peru
370 Web URL: http://jro.igp.gob.pe
371 ......................................................................
372 '''
373
374 return kindatDesc, comments, analyst, history, principleInvestigator
375
243 self.counter = 0
@@ -1,393 +1,403
1 1 '''
2 2 Created on Oct 24, 2016
3 3
4 4 @author: roj- LouVD
5 5 '''
6 6
7 7 import numpy
8 8 import copy
9 9 import datetime
10 10 import time
11 11 from time import gmtime
12 12
13 13 from numpy import transpose
14 14
15 15 from jroproc_base import ProcessingUnit, Operation
16 16 from schainpy.model.data.jrodata import Parameters
17 17
18 18
19 19 class BLTRParametersProc(ProcessingUnit):
20 20 '''
21 21 Processing unit for BLTR parameters data (winds)
22 22
23 23 Inputs:
24 24 self.dataOut.nmodes - Number of operation modes
25 25 self.dataOut.nchannels - Number of channels
26 26 self.dataOut.nranges - Number of ranges
27 27
28 28 self.dataOut.data_SNR - SNR array
29 29 self.dataOut.data_output - Zonal, Vertical and Meridional velocity array
30 30 self.dataOut.height - Height array (km)
31 31 self.dataOut.time - Time array (seconds)
32 32
33 33 self.dataOut.fileIndex -Index of the file currently read
34 34 self.dataOut.lat - Latitude coordinate of BLTR location
35 35
36 36 self.dataOut.doy - Experiment doy (number of the day in the current year)
37 37 self.dataOut.month - Experiment month
38 38 self.dataOut.day - Experiment day
39 39 self.dataOut.year - Experiment year
40 40 '''
41 41
42 42 def __init__(self, **kwargs):
43 43 '''
44 44 Inputs: None
45 45 '''
46 46 ProcessingUnit.__init__(self, **kwargs)
47 47 self.dataOut = Parameters()
48 self.isConfig = False
48 49
49 def run(self, mode, snr_threshold=None):
50 def setup(self, mode):
51 '''
50 52 '''
53 self.dataOut.mode = mode
51 54
55 def run(self, mode, snr_threshold=None):
56 '''
52 57 Inputs:
53 58 mode = High resolution (0) or Low resolution (1) data
54 59 snr_threshold = snr filter value
55 60 '''
61
62 if not self.isConfig:
63 self.setup(mode)
64 self.isConfig = True
65
56 66 if self.dataIn.type == 'Parameters':
57 67 self.dataOut.copy(self.dataIn)
58 68
59 69 self.dataOut.data_output = self.dataOut.data_output[mode]
60 70 self.dataOut.heightList = self.dataOut.height[0]
61 71 self.dataOut.data_SNR = self.dataOut.data_SNR[mode]
62 72
63 73 if snr_threshold is not None:
64 74 SNRavg = numpy.average(self.dataOut.data_SNR, axis=0)
65 75 SNRavgdB = 10*numpy.log10(SNRavg)
66 76 for i in range(3):
67 77 self.dataOut.data_output[i][SNRavgdB <= snr_threshold] = numpy.nan
68 78
69 79 # TODO
70 80 class OutliersFilter(Operation):
71 81
72 82 def __init__(self, **kwargs):
73 83 '''
74 84 '''
75 85 Operation.__init__(self, **kwargs)
76 86
77 87 def run(self, svalue2, method, factor, filter, npoints=9):
78 88 '''
79 89 Inputs:
80 90 svalue - string to select array velocity
81 91 svalue2 - string to choose axis filtering
82 92 method - 0 for SMOOTH or 1 for MEDIAN
83 93 factor - number used to set threshold
84 94 filter - 1 for data filtering using the standard deviation criteria else 0
85 95 npoints - number of points for mask filter
86 96 '''
87 97
88 98 print ' Outliers Filter {} {} / threshold = {}'.format(svalue, svalue, factor)
89 99
90 100
91 101 yaxis = self.dataOut.heightList
92 102 xaxis = numpy.array([[self.dataOut.utctime]])
93 103
94 104 # Zonal
95 105 value_temp = self.dataOut.data_output[0]
96 106
97 107 # Zonal
98 108 value_temp = self.dataOut.data_output[1]
99 109
100 110 # Vertical
101 111 value_temp = numpy.transpose(self.dataOut.data_output[2])
102 112
103 113 htemp = yaxis
104 114 std = value_temp
105 115 for h in range(len(htemp)):
106 116 nvalues_valid = len(numpy.where(numpy.isfinite(value_temp[h]))[0])
107 117 minvalid = npoints
108 118
109 119 #only if valid values greater than the minimum required (10%)
110 120 if nvalues_valid > minvalid:
111 121
112 122 if method == 0:
113 123 #SMOOTH
114 124 w = value_temp[h] - self.Smooth(input=value_temp[h], width=npoints, edge_truncate=1)
115 125
116 126
117 127 if method == 1:
118 128 #MEDIAN
119 129 w = value_temp[h] - self.Median(input=value_temp[h], width = npoints)
120 130
121 131 dw = numpy.std(w[numpy.where(numpy.isfinite(w))],ddof = 1)
122 132
123 133 threshold = dw*factor
124 134 value_temp[numpy.where(w > threshold),h] = numpy.nan
125 135 value_temp[numpy.where(w < -1*threshold),h] = numpy.nan
126 136
127 137
128 138 #At the end
129 139 if svalue2 == 'inHeight':
130 140 value_temp = numpy.transpose(value_temp)
131 141 output_array[:,m] = value_temp
132 142
133 143 if svalue == 'zonal':
134 144 self.dataOut.data_output[0] = output_array
135 145
136 146 elif svalue == 'meridional':
137 147 self.dataOut.data_output[1] = output_array
138 148
139 149 elif svalue == 'vertical':
140 150 self.dataOut.data_output[2] = output_array
141 151
142 152 return self.dataOut.data_output
143 153
144 154
145 155 def Median(self,input,width):
146 156 '''
147 157 Inputs:
148 158 input - Velocity array
149 159 width - Number of points for mask filter
150 160
151 161 '''
152 162
153 163 if numpy.mod(width,2) == 1:
154 164 pc = int((width - 1) / 2)
155 165 cont = 0
156 166 output = []
157 167
158 168 for i in range(len(input)):
159 169 if i >= pc and i < len(input) - pc:
160 170 new2 = input[i-pc:i+pc+1]
161 171 temp = numpy.where(numpy.isfinite(new2))
162 172 new = new2[temp]
163 173 value = numpy.median(new)
164 174 output.append(value)
165 175
166 176 output = numpy.array(output)
167 177 output = numpy.hstack((input[0:pc],output))
168 178 output = numpy.hstack((output,input[-pc:len(input)]))
169 179
170 180 return output
171 181
172 182 def Smooth(self,input,width,edge_truncate = None):
173 183 '''
174 184 Inputs:
175 185 input - Velocity array
176 186 width - Number of points for mask filter
177 187 edge_truncate - 1 for truncate the convolution product else
178 188
179 189 '''
180 190
181 191 if numpy.mod(width,2) == 0:
182 192 real_width = width + 1
183 193 nzeros = width / 2
184 194 else:
185 195 real_width = width
186 196 nzeros = (width - 1) / 2
187 197
188 198 half_width = int(real_width)/2
189 199 length = len(input)
190 200
191 201 gate = numpy.ones(real_width,dtype='float')
192 202 norm_of_gate = numpy.sum(gate)
193 203
194 204 nan_process = 0
195 205 nan_id = numpy.where(numpy.isnan(input))
196 206 if len(nan_id[0]) > 0:
197 207 nan_process = 1
198 208 pb = numpy.zeros(len(input))
199 209 pb[nan_id] = 1.
200 210 input[nan_id] = 0.
201 211
202 212 if edge_truncate == True:
203 213 output = numpy.convolve(input/norm_of_gate,gate,mode='same')
204 214 elif edge_truncate == False or edge_truncate == None:
205 215 output = numpy.convolve(input/norm_of_gate,gate,mode='valid')
206 216 output = numpy.hstack((input[0:half_width],output))
207 217 output = numpy.hstack((output,input[len(input)-half_width:len(input)]))
208 218
209 219 if nan_process:
210 220 pb = numpy.convolve(pb/norm_of_gate,gate,mode='valid')
211 221 pb = numpy.hstack((numpy.zeros(half_width),pb))
212 222 pb = numpy.hstack((pb,numpy.zeros(half_width)))
213 223 output[numpy.where(pb > 0.9999)] = numpy.nan
214 224 input[nan_id] = numpy.nan
215 225 return output
216 226
217 227 def Average(self,aver=0,nhaver=1):
218 228 '''
219 229 Inputs:
220 230 aver - Indicates the time period over which is averaged or consensus data
221 231 nhaver - Indicates the decimation factor in heights
222 232
223 233 '''
224 234 nhpoints = 48
225 235
226 236 lat_piura = -5.17
227 237 lat_huancayo = -12.04
228 238 lat_porcuya = -5.8
229 239
230 240 if '%2.2f'%self.dataOut.lat == '%2.2f'%lat_piura:
231 241 hcm = 3.
232 242 if self.dataOut.year == 2003 :
233 243 if self.dataOut.doy >= 25 and self.dataOut.doy < 64:
234 244 nhpoints = 12
235 245
236 246 elif '%2.2f'%self.dataOut.lat == '%2.2f'%lat_huancayo:
237 247 hcm = 3.
238 248 if self.dataOut.year == 2003 :
239 249 if self.dataOut.doy >= 25 and self.dataOut.doy < 64:
240 250 nhpoints = 12
241 251
242 252
243 253 elif '%2.2f'%self.dataOut.lat == '%2.2f'%lat_porcuya:
244 254 hcm = 5.#2
245 255
246 256 pdata = 0.2
247 257 taver = [1,2,3,4,6,8,12,24]
248 258 t0 = 0
249 259 tf = 24
250 260 ntime =(tf-t0)/taver[aver]
251 261 ti = numpy.arange(ntime)
252 262 tf = numpy.arange(ntime) + taver[aver]
253 263
254 264
255 265 old_height = self.dataOut.heightList
256 266
257 267 if nhaver > 1:
258 268 num_hei = len(self.dataOut.heightList)/nhaver/self.dataOut.nmodes
259 269 deltha = 0.05*nhaver
260 270 minhvalid = pdata*nhaver
261 271 for im in range(self.dataOut.nmodes):
262 272 new_height = numpy.arange(num_hei)*deltha + self.dataOut.height[im,0] + deltha/2.
263 273
264 274
265 275 data_fHeigths_List = []
266 276 data_fZonal_List = []
267 277 data_fMeridional_List = []
268 278 data_fVertical_List = []
269 279 startDTList = []
270 280
271 281
272 282 for i in range(ntime):
273 283 height = old_height
274 284
275 285 start = datetime.datetime(self.dataOut.year,self.dataOut.month,self.dataOut.day) + datetime.timedelta(hours = int(ti[i])) - datetime.timedelta(hours = 5)
276 286 stop = datetime.datetime(self.dataOut.year,self.dataOut.month,self.dataOut.day) + datetime.timedelta(hours = int(tf[i])) - datetime.timedelta(hours = 5)
277 287
278 288
279 289 limit_sec1 = time.mktime(start.timetuple())
280 290 limit_sec2 = time.mktime(stop.timetuple())
281 291
282 292 t1 = numpy.where(self.f_timesec >= limit_sec1)
283 293 t2 = numpy.where(self.f_timesec < limit_sec2)
284 294 time_select = []
285 295 for val_sec in t1[0]:
286 296 if val_sec in t2[0]:
287 297 time_select.append(val_sec)
288 298
289 299
290 300 time_select = numpy.array(time_select,dtype = 'int')
291 301 minvalid = numpy.ceil(pdata*nhpoints)
292 302
293 303 zon_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan
294 304 mer_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan
295 305 ver_aver = numpy.zeros([self.dataOut.nranges,self.dataOut.nmodes],dtype='f4') + numpy.nan
296 306
297 307 if nhaver > 1:
298 308 new_zon_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan
299 309 new_mer_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan
300 310 new_ver_aver = numpy.zeros([num_hei,self.dataOut.nmodes],dtype='f4') + numpy.nan
301 311
302 312 if len(time_select) > minvalid:
303 313 time_average = self.f_timesec[time_select]
304 314
305 315 for im in range(self.dataOut.nmodes):
306 316
307 317 for ih in range(self.dataOut.nranges):
308 318 if numpy.sum(numpy.isfinite(self.f_zon[time_select,ih,im])) >= minvalid:
309 319 zon_aver[ih,im] = numpy.nansum(self.f_zon[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_zon[time_select,ih,im]))
310 320
311 321 if numpy.sum(numpy.isfinite(self.f_mer[time_select,ih,im])) >= minvalid:
312 322 mer_aver[ih,im] = numpy.nansum(self.f_mer[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_mer[time_select,ih,im]))
313 323
314 324 if numpy.sum(numpy.isfinite(self.f_ver[time_select,ih,im])) >= minvalid:
315 325 ver_aver[ih,im] = numpy.nansum(self.f_ver[time_select,ih,im]) / numpy.sum(numpy.isfinite(self.f_ver[time_select,ih,im]))
316 326
317 327 if nhaver > 1:
318 328 for ih in range(num_hei):
319 329 hvalid = numpy.arange(nhaver) + nhaver*ih
320 330
321 331 if numpy.sum(numpy.isfinite(zon_aver[hvalid,im])) >= minvalid:
322 332 new_zon_aver[ih,im] = numpy.nansum(zon_aver[hvalid,im]) / numpy.sum(numpy.isfinite(zon_aver[hvalid,im]))
323 333
324 334 if numpy.sum(numpy.isfinite(mer_aver[hvalid,im])) >= minvalid:
325 335 new_mer_aver[ih,im] = numpy.nansum(mer_aver[hvalid,im]) / numpy.sum(numpy.isfinite(mer_aver[hvalid,im]))
326 336
327 337 if numpy.sum(numpy.isfinite(ver_aver[hvalid,im])) >= minvalid:
328 338 new_ver_aver[ih,im] = numpy.nansum(ver_aver[hvalid,im]) / numpy.sum(numpy.isfinite(ver_aver[hvalid,im]))
329 339 if nhaver > 1:
330 340 zon_aver = new_zon_aver
331 341 mer_aver = new_mer_aver
332 342 ver_aver = new_ver_aver
333 343 height = new_height
334 344
335 345
336 346 tstart = time_average[0]
337 347 tend = time_average[-1]
338 348 startTime = time.gmtime(tstart)
339 349
340 350 year = startTime.tm_year
341 351 month = startTime.tm_mon
342 352 day = startTime.tm_mday
343 353 hour = startTime.tm_hour
344 354 minute = startTime.tm_min
345 355 second = startTime.tm_sec
346 356
347 357 startDTList.append(datetime.datetime(year,month,day,hour,minute,second))
348 358
349 359
350 360 o_height = numpy.array([])
351 361 o_zon_aver = numpy.array([])
352 362 o_mer_aver = numpy.array([])
353 363 o_ver_aver = numpy.array([])
354 364 if self.dataOut.nmodes > 1:
355 365 for im in range(self.dataOut.nmodes):
356 366
357 367 if im == 0:
358 368 h_select = numpy.where(numpy.bitwise_and(height[0,:] >=0,height[0,:] <= hcm,numpy.isfinite(height[0,:])))
359 369 else:
360 370 h_select = numpy.where(numpy.bitwise_and(height[1,:] > hcm,height[1,:] < 20,numpy.isfinite(height[1,:])))
361 371
362 372
363 373 ht = h_select[0]
364 374
365 375 o_height = numpy.hstack((o_height,height[im,ht]))
366 376 o_zon_aver = numpy.hstack((o_zon_aver,zon_aver[ht,im]))
367 377 o_mer_aver = numpy.hstack((o_mer_aver,mer_aver[ht,im]))
368 378 o_ver_aver = numpy.hstack((o_ver_aver,ver_aver[ht,im]))
369 379
370 380 data_fHeigths_List.append(o_height)
371 381 data_fZonal_List.append(o_zon_aver)
372 382 data_fMeridional_List.append(o_mer_aver)
373 383 data_fVertical_List.append(o_ver_aver)
374 384
375 385
376 386 else:
377 387 h_select = numpy.where(numpy.bitwise_and(height[0,:] <= hcm,numpy.isfinite(height[0,:])))
378 388 ht = h_select[0]
379 389 o_height = numpy.hstack((o_height,height[im,ht]))
380 390 o_zon_aver = numpy.hstack((o_zon_aver,zon_aver[ht,im]))
381 391 o_mer_aver = numpy.hstack((o_mer_aver,mer_aver[ht,im]))
382 392 o_ver_aver = numpy.hstack((o_ver_aver,ver_aver[ht,im]))
383 393
384 394 data_fHeigths_List.append(o_height)
385 395 data_fZonal_List.append(o_zon_aver)
386 396 data_fMeridional_List.append(o_mer_aver)
387 397 data_fVertical_List.append(o_ver_aver)
388 398
389 399
390 400 return startDTList, data_fHeigths_List, data_fZonal_List, data_fMeridional_List, data_fVertical_List
391 401
392 402
393 403 No newline at end of file
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
1 NO CONTENT: file was removed, binary diff hidden
General Comments 0
You need to be logged in to leave comments. Login now